Version 82

This commit is contained in:
Hydrus 2013-08-28 16:31:52 -05:00
parent b0d3fd0831
commit b2affb8fdc
17 changed files with 1666 additions and 934 deletions

View File

@ -8,6 +8,51 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 82</h3></li>
<ul>
<li>a bug where slow search results would sometimes appear after search predicates were removed has been fixed</li>
<li>a lot of autocomplete gui- and db-blocking reorganisation</li>
<li>searches are now entirely asynchronous to gui thread</li>
<li>searches are split into two granular phases, and are cancellable during processing</li>
<li>simplified system predicate storage</li>
<li>consolidated all system predicate filtering to initial db search</li>
<li>huge improvements to how the thumbnail canvas is sized and extended and drawn to</li>
<li>numerous fixes and improvements to how thumbnails are drawn to screen</li>
<li>reworked how collect works</li>
<li>and sort</li>
<li>the raw cpu time behind sorting in muuuuuuuch faster</li>
<li>clarified my listening media class</li>
<li>added another optimisation to canvas resizing</li>
<li>big improvement to canvas redrawing and refreshing</li>
<li>important fix in how client figured out what to draw when clicking in whitespace</li>
<li>thumbnail fade is much smoother</li>
<li>thumbnails should now generally draw a little smoother</li>
<li>selectall and selectnone's thumbnail fade is less cpu intensive</li>
<li>cleared up a number of collect related selection and index bugs</li>
<li>fixed a media identifier issue</li>
<li>all import-related append and drawing is muuuuch faster</li>
<li>flicker bug on small appends fixed</li>
<li>fixed a page up/down drawing bug on the main thumbnail canvas view</li>
<li>fixed an unknown-timestamp thumbnail right click issue</li>
<li>made my internal media data storage system a lot simpler</li>
<li>new sortedlist class to make some media stuff easier to manage</li>
<li>reworked how new media is added to a page of thumbs (usually import-append, but the new system supports collect- and sort-sympathetic insertion):</li>
<li>-calculating combined tags on the left can be up to ten times faster</li>
<li>-integrating new files into the sort is much quicker</li>
<li>-integrating new files into the collect is much quicker</li>
<li>-improved how new thumbnails are decided to be drawn</li>
<li>trying to archive or inbox more than one file, you'll get a yes/no dialog to confirm</li>
<li>started db testing framework</li>
<li>db tracebacks improved</li>
<li>slimmed down content_update data-side processing</li>
<li>a couple small code fixes</li>
<li>archive and inbox no longer remove from the search if the opposite system predicate is set</li>
<li>system:rating fixed</li>
<li>fixed a bug where you could middle-click-download files that had no known source</li>
<li>couple of bugs with system:ratio fixed</li>
<li>system:ratio string representation is 16:9, rather than 1.777779</li>
<li>gui thread is more intelligent in telling non-gui threads that it is busy</li>
</ul>
<li><h3>version 81</h3></li>
<ul>
<li>mp4 added</li>

View File

@ -670,7 +670,7 @@ def ShowExceptionClient( e ):
etype = type( e )
value = u( e )
value = HC.u( e )
trace_list = traceback.format_stack()
@ -1378,10 +1378,8 @@ class DataCache():
class FileQueryResult():
def __init__( self, file_service_identifier, predicates, media_results ):
def __init__( self, media_results ):
self._file_service_identifier = file_service_identifier
self._predicates = predicates
self._hashes_to_media_results = { media_result.GetHash() : media_result for media_result in media_results }
self._hashes_ordered = [ media_result.GetHash() for media_result in media_results ]
self._hashes = set( self._hashes_ordered )
@ -1413,17 +1411,20 @@ class FileQueryResult():
self._hashes.difference_update( hashes )
def AddMediaResult( self, media_result ):
def AddMediaResults( self, media_results ):
hash = media_result.GetHash()
if hash in self._hashes: return # this is actually important, as sometimes we don't want the media result overwritten
self._hashes_to_media_results[ hash ] = media_result
self._hashes_ordered.append( hash )
self._hashes.add( hash )
for media_result in media_results:
hash = media_result.GetHash()
if hash in self._hashes: continue # this is actually important, as sometimes we don't want the media result overwritten
self._hashes_to_media_results[ hash ] = media_result
self._hashes_ordered.append( hash )
self._hashes.add( hash )
def GetHashes( self ): return self._hashes
@ -1442,20 +1443,6 @@ class FileQueryResult():
if len( hashes ) > 0:
( data_type, action, row ) = content_update.ToTuple()
service_type = service_identifier.GetType()
if action == HC.CONTENT_UPDATE_ARCHIVE:
if 'system:inbox' in self._predicates: self._Remove( hashes )
elif action == HC.CONTENT_UPDATE_INBOX:
if 'system:archive' in self._predicates: self._Remove( hashes )
elif action == HC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier: self._Remove( hashes )
for hash in self._hashes.intersection( hashes ):
media_result = self._hashes_to_media_results[ hash ]
@ -1543,43 +1530,16 @@ class FileSearchContext():
class FileSystemPredicates():
INBOX = 0
LOCAL = 1
HASH = 2
TIMESTAMP = 3
DURATION = 4
SIZE = 5
NUM_TAGS = 6
WIDTH = 7
HEIGHT = 8
RATIO = 9
REPOSITORIES = 10
MIME = 11
def __init__( self, system_predicates ):
self._predicates = {}
self._predicates[ self.INBOX ] = []
self._predicates[ self.LOCAL ] = [] # not using this!
self._predicates[ self.HASH ] = []
self._predicates[ self.MIME ] = []
self._predicates[ self.TIMESTAMP ] = []
self._predicates[ self.DURATION ] = []
self._predicates[ self.SIZE ] = []
self._predicates[ self.NUM_TAGS ] = []
self._predicates[ self.WIDTH ] = []
self._predicates[ self.HEIGHT ] = []
self._predicates[ self.RATIO ] = []
self._predicates[ self.REPOSITORIES ] = []
self._inbox = False
self._archive = False
self._local = False
self._not_local = False
self._num_tags_zero = False
self._num_tags_nonzero = False
self._min_num_tags = None
self._num_tags = None
self._max_num_tags = None
self._hash = None
self._min_size = None
@ -1594,6 +1554,9 @@ class FileSystemPredicates():
self._min_height = None
self._height = None
self._max_height = None
self._min_ratio = None
self._ratio = None
self._max_ratio = None
self._min_num_words = None
self._num_words = None
self._max_num_words = None
@ -1601,6 +1564,7 @@ class FileSystemPredicates():
self._duration = None
self._max_duration = None
self._limit = None
self._similar_to = None
@ -1683,10 +1647,14 @@ class FileSystemPredicates():
if system_predicate_type == HC.SYSTEM_PREDICATE_TYPE_RATIO:
( operator, ratio ) = info
( operator, ratio_width, ratio_height ) = info
if operator == '=': self._predicates[ self.RATIO ].append( ( equals, ratio ) )
elif operator == u'\u2248': self._predicates[ self.RATIO ].append( ( about_equals, ratio ) )
if operator == '=': self._ratio = ( ratio_width, ratio_height )
elif operator == u'\u2248':
self._min_ratio = ( ratio_width * 0.85, ratio_height )
self._max_ratio = ( ratio_width * 1.15, ratio_height )
if system_predicate_type == HC.SYSTEM_PREDICATE_TYPE_SIZE:
@ -1709,19 +1677,9 @@ class FileSystemPredicates():
( operator, num_tags ) = info
if operator == '<': self._predicates[ self.NUM_TAGS ].append( ( lessthan, num_tags ) )
elif operator == '>':
self._predicates[ self.NUM_TAGS ].append( ( greaterthan, num_tags ) )
if num_tags == 0: self._num_tags_nonzero = True
elif operator == '=':
self._predicates[ self.NUM_TAGS ].append( ( equals, num_tags ) )
if num_tags == 0: self._num_tags_zero = True
if operator == '<': self._max_num_tags = num_tags
elif operator == '>': self._num_tags = num_tags
elif operator == '=': self._min_num_tags = num_tags
if system_predicate_type == HC.SYSTEM_PREDICATE_TYPE_WIDTH:
@ -1798,31 +1756,13 @@ class FileSystemPredicates():
def CanPreFirstRoundLimit( self ):
if self._limit is None: return False
if len( self._predicates[ self.RATIO ] ) > 0: return False
return self.CanPreSecondRoundLimit()
def CanPreSecondRoundLimit( self ):
if self._limit is None: return False
if len( self._predicates[ self.NUM_TAGS ] ) > 0: return False
return True
def GetFileServiceInfo( self ): return ( self._file_services_to_include_current, self._file_services_to_include_pending, self._file_services_to_exclude_current, self._file_services_to_exclude_pending )
def GetInfo( self ): return ( self._hash, self._min_size, self._size, self._max_size, self._mimes, self._min_timestamp, self._max_timestamp, self._min_width, self._width, self._max_width, self._min_height, self._height, self._max_height, self._min_num_words, self._num_words, self._max_num_words, self._min_duration, self._duration, self._max_duration )
def GetInfo( self ): return ( self._hash, self._min_size, self._size, self._max_size, self._mimes, self._min_timestamp, self._max_timestamp, self._min_width, self._width, self._max_width, self._min_height, self._height, self._max_height, self._min_ratio, self._ratio, self._max_ratio, self._min_num_words, self._num_words, self._max_num_words, self._min_duration, self._duration, self._max_duration )
def GetLimit( self ): return self._limit
def GetNumTagsInfo( self ): return ( self._num_tags_zero, self._num_tags_nonzero )
def GetNumTagsInfo( self ): return ( self._min_num_tags, self._num_tags, self._max_num_tags )
def GetRatingsPredicates( self ): return self._ratings_predicates
@ -1838,22 +1778,6 @@ class FileSystemPredicates():
def MustNotBeLocal( self ): return self._not_local
def OkFirstRound( self, width, height ):
if len( self._predicates[ self.RATIO ] ) > 0 and ( width is None or height is None or width == 0 or height == 0): return False
if False in ( function( float( width ) / float( height ), arg ) for ( function, arg ) in self._predicates[ self.RATIO ] ): return False
return True
def OkSecondRound( self, num_tags ):
if False in ( function( num_tags, arg ) for ( function, arg ) in self._predicates[ self.NUM_TAGS ] ): return False
return True
class GlobalBMPs():
@staticmethod

View File

@ -9,6 +9,8 @@ import ClientDB
import ClientGUI
import ClientGUIDialogs
import os
import random
import shutil
import sqlite3
import sys
import threading
@ -104,10 +106,16 @@ class Controller( wx.App ):
( callable, args, kwargs ) = pubsubs_queue.get()
HC.busy_doing_pubsub = True
try: callable( *args, **kwargs )
except wx._core.PyDeadObjectError: pass
except TypeError: pass
finally: pubsubs_queue.task_done()
finally:
pubsubs_queue.task_done()
HC.busy_doing_pubsub = False
def GetFullscreenImageCache( self ): return self._fullscreen_image_cache
@ -151,6 +159,25 @@ class Controller( wx.App ):
try:
try:
def make_temp_files_deletable( function_called, path, traceback_gumpf ):
os.chmod( path, stat.S_IWRITE )
function_called( path ) # try again
if os.path.exists( HC.TEMP_DIR ): shutil.rmtree( HC.TEMP_DIR, onerror = make_temp_files_deletable )
except: pass
try:
if not os.path.exists( HC.TEMP_DIR ): os.mkdir( HC.TEMP_DIR )
except: pass
self._splash = ClientGUI.FrameSplash()
self.SetSplashText( 'log' )
@ -185,6 +212,8 @@ class Controller( wx.App ):
threading.Thread( target = self._db.MainLoop, name = 'Database Main Loop' ).start()
self._session_manager = HydrusSessions.HydrusSessionManagerClient()
self._web_session_manager = CC.WebSessionManagerClient()
self._tag_parents_manager = HydrusTags.TagParentsManager()
@ -222,7 +251,8 @@ class Controller( wx.App ):
if HC.is_first_start: self._gui.DoFirstStart()
if HC.is_db_updated: wx.CallAfter( HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'The client has updated to version ' + HC.u( HC.SOFTWARE_VERSION ) + '!' ) )
self._db._InitPostGUI()
self._db.StartServer()
self._db.StartDaemons()
self._last_idle_time = 0.0
@ -285,6 +315,60 @@ class Controller( wx.App ):
self.Yield() # this processes the event queue immediately, so the paint event can occur
def StartFileQuery( self, query_key, search_context ):
threading.Thread( target = self.THREADDoFileQuery, name = 'file query', args = ( query_key, search_context ) ).start()
def THREADDoFileQuery( self, query_key, search_context ):
try:
query_hash_ids = HC.app.Read( 'file_query_ids', search_context )
query_hash_ids = list( query_hash_ids )
random.shuffle( query_hash_ids )
limit = search_context.GetSystemPredicates().GetLimit()
if limit is not None: query_hash_ids = query_hash_ids[ : limit ]
file_service_identifier = search_context.GetFileServiceIdentifier()
include_current_tags = search_context.IncludeCurrentTags()
media_results = []
include_pending_tags = search_context.IncludePendingTags()
i = 0
base = 256
while i < len( query_hash_ids ):
if query_key.IsCancelled(): return
if i == 0: ( last_i, i ) = ( 0, base )
else: ( last_i, i ) = ( i, i + base )
sub_query_hash_ids = query_hash_ids[ last_i : i ]
more_media_results = HC.app.Read( 'media_results_from_ids', file_service_identifier, sub_query_hash_ids )
media_results.extend( more_media_results )
HC.pubsub.pub( 'set_num_query_results', len( media_results ), len( query_hash_ids ) )
HC.app.WaitUntilGoodTimeToUseGUIThread()
HC.pubsub.pub( 'file_query_done', query_key, media_results )
except Exception as e: HC.ShowException( e )
def WaitUntilGoodTimeToUseGUIThread( self ):
pubsubs_queue = HC.pubsub.GetQueue()
@ -292,7 +376,7 @@ class Controller( wx.App ):
while True:
if HC.shutdown: raise Exception( 'Client shutting down!' )
elif pubsubs_queue.qsize() == 0: return
elif pubsubs_queue.qsize() == 0 and not HC.busy_doing_pubsub: return
else: time.sleep( 0.0001 )

View File

@ -1103,9 +1103,7 @@ class RatingDB():
( hash_id, ) = result
search_context = CC.FileSearchContext()
( media_result, ) = self._GetMediaResults( c, search_context, set( ( hash_id, ) ) )
( media_result, ) = self._GetMediaResults( c, HC.COMBINED_FILE_SERVICE_IDENTIFIER, { hash_id } )
return media_result
@ -1585,182 +1583,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self.pub_service_updates( { service_identifier : [ HC.ServiceUpdate( HC.SERVICE_UPDATE_DELETE_PENDING ) ] } )
def _DoFileQuery( self, c, query_key, search_context ):
time.sleep( 0.0001 )
# setting up
system_predicates = search_context.GetSystemPredicates()
file_service_identifier = search_context.GetFileServiceIdentifier()
tag_service_identifier = search_context.GetTagServiceIdentifier()
file_service_id = self._GetServiceId( c, file_service_identifier )
tag_service_id = self._GetServiceId( c, tag_service_identifier )
file_service_type = file_service_identifier.GetType()
tag_service_type = tag_service_identifier.GetType()
tags_to_include = search_context.GetTagsToInclude()
tags_to_exclude = search_context.GetTagsToExclude()
namespaces_to_include = search_context.GetNamespacesToInclude()
namespaces_to_exclude = search_context.GetNamespacesToExclude()
include_current_tags = search_context.IncludeCurrentTags()
include_pending_tags = search_context.IncludePendingTags()
sql_predicates = [ 'service_id = ' + HC.u( file_service_id ) ]
( hash, min_size, size, max_size, mimes, min_timestamp, max_timestamp, min_width, width, max_width, min_height, height, max_height, min_num_words, num_words, max_num_words, min_duration, duration, max_duration ) = system_predicates.GetInfo()
if min_size is not None: sql_predicates.append( 'size > ' + HC.u( min_size ) )
if size is not None: sql_predicates.append( 'size = ' + HC.u( size ) )
if max_size is not None: sql_predicates.append( 'size < ' + HC.u( max_size ) )
if mimes is not None:
if len( mimes ) == 1:
( mime, ) = mimes
sql_predicates.append( 'mime = ' + HC.u( mime ) )
else: sql_predicates.append( 'mime IN ' + HC.SplayListForDB( mimes ) )
if min_timestamp is not None: sql_predicates.append( 'timestamp >= ' + HC.u( min_timestamp ) )
if max_timestamp is not None: sql_predicates.append( 'timestamp <= ' + HC.u( max_timestamp ) )
if min_width is not None: sql_predicates.append( 'width > ' + HC.u( min_width ) )
if width is not None: sql_predicates.append( 'width = ' + HC.u( width ) )
if max_width is not None: sql_predicates.append( 'width < ' + HC.u( max_width ) )
if min_height is not None: sql_predicates.append( 'height > ' + HC.u( min_height ) )
if height is not None: sql_predicates.append( 'height = ' + HC.u( height ) )
if max_height is not None: sql_predicates.append( 'height < ' + HC.u( max_height ) )
if min_num_words is not None: sql_predicates.append( 'num_words > ' + HC.u( min_num_words ) )
if num_words is not None: sql_predicates.append( 'num_words = ' + HC.u( num_words ) )
if max_num_words is not None: sql_predicates.append( 'num_words < ' + HC.u( max_num_words ) )
if min_duration is not None: sql_predicates.append( 'duration > ' + HC.u( min_duration ) )
if duration is not None:
if duration == 0: sql_predicates.append( '( duration IS NULL OR duration = 0 )' )
else: sql_predicates.append( 'duration = ' + HC.u( duration ) )
if max_duration is not None: sql_predicates.append( 'duration < ' + HC.u( max_duration ) )
if len( tags_to_include ) > 0 or len( namespaces_to_include ) > 0:
query_hash_ids = None
if len( tags_to_include ) > 0: query_hash_ids = HC.IntelligentMassIntersect( ( self._GetHashIdsFromTag( c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ) for tag in tags_to_include ) )
if len( namespaces_to_include ) > 0:
namespace_query_hash_ids = HC.IntelligentMassIntersect( ( self._GetHashIdsFromNamespace( c, file_service_identifier, tag_service_identifier, namespace, include_current_tags, include_pending_tags ) for namespace in namespaces_to_include ) )
if query_hash_ids is None: query_hash_ids = namespace_query_hash_ids
else: query_hash_ids.intersection_update( namespace_query_hash_ids )
if len( sql_predicates ) > 1: query_hash_ids.intersection_update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) ] )
else:
if file_service_identifier != HC.COMBINED_FILE_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) }
elif tag_service_identifier != HC.COMBINED_TAG_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND status IN ( ?, ? );', ( tag_service_id, HC.CURRENT, HC.PENDING ) ) }
else: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings UNION SELECT hash_id FROM files_info;' ) }
( num_tags_zero, num_tags_nonzero ) = system_predicates.GetNumTagsInfo()
if num_tags_zero or num_tags_nonzero:
statuses = []
if include_current_tags: statuses.append( HC.CURRENT )
if include_pending_tags: statuses.append( HC.PENDING )
nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND status IN ' + HC.SplayListForDB( statuses ) + ';', ( tag_service_id, ) ) }
if num_tags_zero: query_hash_ids.difference_update( nonzero_tag_hash_ids )
elif num_tags_nonzero: query_hash_ids.intersection_update( nonzero_tag_hash_ids )
if hash is not None:
hash_id = self._GetHashId( c, hash )
query_hash_ids.intersection_update( { hash_id } )
exclude_query_hash_ids = set()
for tag in tags_to_exclude: exclude_query_hash_ids.update( self._GetHashIdsFromTag( c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ) )
for namespace in namespaces_to_exclude: exclude_query_hash_ids.update( self._GetHashIdsFromNamespace( c, file_service_identifier, tag_service_identifier, namespace, include_current_tags, include_pending_tags ) )
if file_service_type == HC.FILE_REPOSITORY and HC.options[ 'exclude_deleted_files' ]: exclude_query_hash_ids.update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_files WHERE service_id = ?;', ( self._local_file_service_id, ) ) ] )
query_hash_ids.difference_update( exclude_query_hash_ids )
( file_services_to_include_current, file_services_to_include_pending, file_services_to_exclude_current, file_services_to_exclude_pending ) = system_predicates.GetFileServiceInfo()
for service_identifier in file_services_to_include_current:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] )
for service_identifier in file_services_to_include_pending:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_transfers WHERE service_id = ?;', ( service_id, ) ) ] )
for service_identifier in file_services_to_exclude_current:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] )
for service_identifier in file_services_to_exclude_pending:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_transfers WHERE service_id = ?;', ( service_id, ) ) ] )
for ( service_identifier, operator, value ) in system_predicates.GetRatingsPredicates():
service_id = self._GetServiceId( c, service_identifier )
if value == 'rated': query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ?;', ( service_id, ) ) ] )
elif value == 'not rated': query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ?;', ( service_id, ) ) ] )
elif value == 'uncertain': query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM ratings_filter WHERE service_id = ?;', ( service_id, ) ) ] )
else:
if operator == u'\u2248': predicate = HC.u( value * 0.95 ) + ' < rating AND rating < ' + HC.u( value * 1.05 )
else: predicate = 'rating ' + operator + ' ' + HC.u( value )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ? AND ' + predicate + ';', ( service_id, ) ) ] )
media_results = self._GetMediaResults( c, search_context, query_hash_ids )
self.pub( 'file_query_done', query_key, media_results )
return media_results
def _FattenAutocompleteCache( self, c ):
tag_service_identifiers = self._GetServiceIdentifiers( c, ( HC.TAG_REPOSITORY, HC.LOCAL_TAG, HC.COMBINED_TAG ) )
@ -1928,6 +1750,278 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
def _GetFavouriteCustomFilterActions( self, c ): return dict( c.execute( 'SELECT name, actions FROM favourite_custom_filter_actions;' ).fetchall() )
def _GetFileQueryIds( self, c, search_context ):
system_predicates = search_context.GetSystemPredicates()
file_service_identifier = search_context.GetFileServiceIdentifier()
tag_service_identifier = search_context.GetTagServiceIdentifier()
file_service_id = self._GetServiceId( c, file_service_identifier )
tag_service_id = self._GetServiceId( c, tag_service_identifier )
file_service_type = file_service_identifier.GetType()
tag_service_type = tag_service_identifier.GetType()
tags_to_include = search_context.GetTagsToInclude()
tags_to_exclude = search_context.GetTagsToExclude()
namespaces_to_include = search_context.GetNamespacesToInclude()
namespaces_to_exclude = search_context.GetNamespacesToExclude()
include_current_tags = search_context.IncludeCurrentTags()
include_pending_tags = search_context.IncludePendingTags()
#
sql_predicates = [ 'service_id = ' + HC.u( file_service_id ) ]
( hash, min_size, size, max_size, mimes, min_timestamp, max_timestamp, min_width, width, max_width, min_height, height, max_height, min_ratio, ratio, max_ratio, min_num_words, num_words, max_num_words, min_duration, duration, max_duration ) = system_predicates.GetInfo()
if min_size is not None: sql_predicates.append( 'size > ' + HC.u( min_size ) )
if size is not None: sql_predicates.append( 'size = ' + HC.u( size ) )
if max_size is not None: sql_predicates.append( 'size < ' + HC.u( max_size ) )
if mimes is not None:
if len( mimes ) == 1:
( mime, ) = mimes
sql_predicates.append( 'mime = ' + HC.u( mime ) )
else: sql_predicates.append( 'mime IN ' + HC.SplayListForDB( mimes ) )
if min_timestamp is not None: sql_predicates.append( 'timestamp >= ' + HC.u( min_timestamp ) )
if max_timestamp is not None: sql_predicates.append( 'timestamp <= ' + HC.u( max_timestamp ) )
if min_width is not None: sql_predicates.append( 'width > ' + HC.u( min_width ) )
if width is not None: sql_predicates.append( 'width = ' + HC.u( width ) )
if max_width is not None: sql_predicates.append( 'width < ' + HC.u( max_width ) )
if min_height is not None: sql_predicates.append( 'height > ' + HC.u( min_height ) )
if height is not None: sql_predicates.append( 'height = ' + HC.u( height ) )
if max_height is not None: sql_predicates.append( 'height < ' + HC.u( max_height ) )
if min_ratio is not None:
( ratio_width, ratio_height ) = min_ratio
sql_predicates.append( '( width * 1.0 ) / height > ' + HC.u( float( ratio_width ) ) + ' / ' + HC.u( ratio_height ) )
if ratio is not None:
( ratio_width, ratio_height ) = ratio
sql_predicates.append( '( width * 1.0 ) / height = ' + HC.u( float( ratio_width ) ) + ' / ' + HC.u( ratio_height ) )
if max_ratio is not None:
( ratio_width, ratio_height ) = max_ratio
sql_predicates.append( '( width * 1.0 ) / height < ' + HC.u( float( ratio_width ) ) + ' / ' + HC.u( ratio_height ) )
if min_num_words is not None: sql_predicates.append( 'num_words > ' + HC.u( min_num_words ) )
if num_words is not None: sql_predicates.append( 'num_words = ' + HC.u( num_words ) )
if max_num_words is not None: sql_predicates.append( 'num_words < ' + HC.u( max_num_words ) )
if min_duration is not None: sql_predicates.append( 'duration > ' + HC.u( min_duration ) )
if duration is not None:
if duration == 0: sql_predicates.append( '( duration IS NULL OR duration = 0 )' )
else: sql_predicates.append( 'duration = ' + HC.u( duration ) )
if max_duration is not None: sql_predicates.append( 'duration < ' + HC.u( max_duration ) )
if len( tags_to_include ) > 0 or len( namespaces_to_include ) > 0:
query_hash_ids = None
if len( tags_to_include ) > 0: query_hash_ids = HC.IntelligentMassIntersect( ( self._GetHashIdsFromTag( c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ) for tag in tags_to_include ) )
if len( namespaces_to_include ) > 0:
namespace_query_hash_ids = HC.IntelligentMassIntersect( ( self._GetHashIdsFromNamespace( c, file_service_identifier, tag_service_identifier, namespace, include_current_tags, include_pending_tags ) for namespace in namespaces_to_include ) )
if query_hash_ids is None: query_hash_ids = namespace_query_hash_ids
else: query_hash_ids.intersection_update( namespace_query_hash_ids )
if len( sql_predicates ) > 1: query_hash_ids.intersection_update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) ] )
else:
if file_service_identifier != HC.COMBINED_FILE_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) }
elif tag_service_identifier != HC.COMBINED_TAG_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND status IN ( ?, ? );', ( tag_service_id, HC.CURRENT, HC.PENDING ) ) }
else: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings UNION SELECT hash_id FROM files_info;' ) }
#
( min_num_tags, num_tags, max_num_tags ) = system_predicates.GetNumTagsInfo()
num_tags_zero = False
num_tags_nonzero = False
tag_predicates = []
if min_num_tags is not None:
if min_num_tags == 1: num_tags_nonzero = True
else: tag_predicates.append( lambda num_tags: num_tags >= min_num_tags )
if num_tags is not None:
if num_tags == 0: num_tags_zero = True
else: tag_predicates.append( lambda num_tags: num_tags == num_tags )
if max_num_tags is not None:
if max_num_tags == 1: num_tags_nonzero = True
else: tag_predicates.append( lambda num_tags: num_tags <= max_num_tags )
statuses = []
if include_current_tags: statuses.append( HC.CURRENT )
if include_pending_tags: statuses.append( HC.PENDING )
if num_tags_zero or num_tags_nonzero:
query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( query_hash_ids ) + ' AND status IN ' + HC.SplayListForDB( statuses ) + ';', ( tag_service_id, ) ) }
if len( tag_predicates ) > 0:
query_hash_ids = { id for ( id, count ) in c.execute( 'SELECT hash_id, COUNT( * ) as num_tags FROM mappings WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( query_hash_ids ) + ' AND status IN ' + HC.SplayListForDB( statuses ) + ' GROUP BY hash_id;', ( tag_service_id, ) ) if False not in ( pred( count ) for pred in tag_predicates ) }
#
if hash is not None:
hash_id = self._GetHashId( c, hash )
query_hash_ids.intersection_update( { hash_id } )
#
exclude_query_hash_ids = set()
for tag in tags_to_exclude: exclude_query_hash_ids.update( self._GetHashIdsFromTag( c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ) )
for namespace in namespaces_to_exclude: exclude_query_hash_ids.update( self._GetHashIdsFromNamespace( c, file_service_identifier, tag_service_identifier, namespace, include_current_tags, include_pending_tags ) )
if file_service_type == HC.FILE_REPOSITORY and HC.options[ 'exclude_deleted_files' ]: exclude_query_hash_ids.update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_files WHERE service_id = ?;', ( self._local_file_service_id, ) ) ] )
query_hash_ids.difference_update( exclude_query_hash_ids )
#
( file_services_to_include_current, file_services_to_include_pending, file_services_to_exclude_current, file_services_to_exclude_pending ) = system_predicates.GetFileServiceInfo()
for service_identifier in file_services_to_include_current:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] )
for service_identifier in file_services_to_include_pending:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_transfers WHERE service_id = ?;', ( service_id, ) ) ] )
for service_identifier in file_services_to_exclude_current:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] )
for service_identifier in file_services_to_exclude_pending:
service_id = self._GetServiceId( c, service_identifier )
query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_transfers WHERE service_id = ?;', ( service_id, ) ) ] )
for ( service_identifier, operator, value ) in system_predicates.GetRatingsPredicates():
service_id = self._GetServiceId( c, service_identifier )
if value == 'rated': query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ?;', ( service_id, ) ) ] )
elif value == 'not rated': query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ?;', ( service_id, ) ) ] )
elif value == 'uncertain': query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM ratings_filter WHERE service_id = ?;', ( service_id, ) ) ] )
else:
if operator == u'\u2248': predicate = HC.u( value * 0.95 ) + ' < rating AND rating < ' + HC.u( value * 1.05 )
else: predicate = 'rating ' + operator + ' ' + HC.u( value )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ? AND ' + predicate + ';', ( service_id, ) ) ] )
#
must_be_local = system_predicates.MustBeLocal() or system_predicates.MustBeArchive()
must_not_be_local = system_predicates.MustNotBeLocal()
must_be_inbox = system_predicates.MustBeInbox()
must_be_archive = system_predicates.MustBeArchive()
if must_be_local or must_not_be_local:
if file_service_id == self._local_file_service_id:
if must_not_be_local: query_hash_ids = set()
else:
local_hash_ids = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( self._local_file_service_id, ) ) ]
if must_be_local: query_hash_ids.intersection_update( local_hash_ids )
else: query_hash_ids.difference_update( local_hash_ids )
if must_be_inbox or must_be_archive:
inbox_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM file_inbox;' ) }
if must_be_inbox: query_hash_ids.intersection_update( inbox_hash_ids )
elif must_be_archive: query_hash_ids.difference_update( inbox_hash_ids )
#
if system_predicates.HasSimilarTo():
( similar_to_hash, max_hamming ) = system_predicates.GetSimilarTo()
hash_id = self._GetHashId( c, similar_to_hash )
result = c.execute( 'SELECT phash FROM perceptual_hashes WHERE hash_id = ?;', ( hash_id, ) ).fetchone()
if result is not None:
( phash, ) = result
similar_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM perceptual_hashes WHERE hydrus_hamming( phash, ? ) <= ?;', ( sqlite3.Binary( phash ), max_hamming ) ) ]
query_hash_ids.intersection_update( similar_hash_ids )
return query_hash_ids
def _GetFileSystemPredicates( self, c, service_identifier ):
service_id = self._GetServiceId( c, service_identifier )
@ -2113,70 +2207,17 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
return ( 'new', None )
def _GetMediaResults( self, c, search_context, query_hash_ids ):
file_service_identifier = search_context.GetFileServiceIdentifier()
tag_service_identifier = search_context.GetTagServiceIdentifier()
def _GetMediaResults( self, c, file_service_identifier, hash_ids ):
service_id = self._GetServiceId( c, file_service_identifier )
system_predicates = search_context.GetSystemPredicates()
limit = system_predicates.GetLimit()
inbox_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM file_inbox;' ) }
# get basic results
must_be_local = system_predicates.MustBeLocal() or system_predicates.MustBeArchive()
must_not_be_local = system_predicates.MustNotBeLocal()
must_be_inbox = system_predicates.MustBeInbox()
must_be_archive = system_predicates.MustBeArchive()
if must_be_local or must_not_be_local:
if service_id == self._local_file_service_id:
if must_not_be_local: query_hash_ids = set()
else:
local_hash_ids = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( self._local_file_service_id, ) ) ]
if must_be_local: query_hash_ids.intersection_update( local_hash_ids )
else: query_hash_ids.difference_update( local_hash_ids )
if must_be_inbox: query_hash_ids.intersection_update( inbox_hash_ids )
elif must_be_archive: query_hash_ids.difference_update( inbox_hash_ids )
# similar to
if system_predicates.HasSimilarTo():
( hash, max_hamming ) = system_predicates.GetSimilarTo()
hash_id = self._GetHashId( c, hash )
result = c.execute( 'SELECT phash FROM perceptual_hashes WHERE hash_id = ?;', ( hash_id, ) ).fetchone()
if result is not None:
( phash, ) = result
similar_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM perceptual_hashes WHERE hydrus_hamming( phash, ? ) <= ?;', ( sqlite3.Binary( phash ), max_hamming ) ) ]
query_hash_ids.intersection_update( similar_hash_ids )
# get first detailed results
# since I've changed to new search model, this bit needs working over, I think?
if file_service_identifier == HC.COMBINED_FILE_SERVICE_IDENTIFIER:
all_services_results = c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( query_hash_ids ) + ';' ).fetchall()
all_services_results = c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ).fetchall()
hash_ids_i_have_info_for = set()
@ -2194,30 +2235,12 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
results.extend( [ ( hash_id, None, HC.APPLICATION_UNKNOWN, None, None, None, None, None, None ) for hash_id in query_hash_ids - hash_ids_i_have_info_for ] )
else: results = c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( query_hash_ids ) + ';', ( service_id, ) ).fetchall()
# filtering basic results
if system_predicates.CanPreFirstRoundLimit():
if len( results ) > limit: results = random.sample( results, limit )
else:
results = [ ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) for ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in results if system_predicates.OkFirstRound( width, height ) ]
if system_predicates.CanPreSecondRoundLimit():
if len( results ) > limit: results = random.sample( results, system_predicates.GetLimit() )
results.extend( [ ( hash_id, None, HC.APPLICATION_UNKNOWN, None, None, None, None, None, None ) for hash_id in hash_ids if hash_id not in hash_ids_i_have_info_for ] )
else: results = c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ).fetchall()
# get tagged results
hash_ids = [ result[0] for result in results ]
splayed_hash_ids = HC.SplayListForDB( hash_ids )
hash_ids_to_hashes = self._GetHashIdsToHashes( c, hash_ids )
@ -2240,25 +2263,19 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
# build it
limit = system_predicates.GetLimit()
include_current_tags = search_context.IncludeCurrentTags()
include_pending_tags = search_context.IncludePendingTags()
media_results = []
random.shuffle( results ) # important for system:limit
for ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in results:
if limit is not None and len( media_results ) >= limit: break
hash = hash_ids_to_hashes[ hash_id ]
if hash_id in hash_ids_to_tags: tags_dict = HC.BuildKeyToListDict( hash_ids_to_tags[ hash_id ] )
else: tags_dict = {}
#
# s_i : status : tags
inbox = hash_id in inbox_hash_ids
#
tags_dict = HC.BuildKeyToListDict( hash_ids_to_tags[ hash_id ] )
service_identifiers_to_statuses_to_tags = collections.defaultdict( HC.default_dict_set )
@ -2266,41 +2283,38 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
tags_manager = HydrusTags.TagsManager( self._tag_service_precedence, service_identifiers_to_statuses_to_tags )
if not system_predicates.OkSecondRound( tags_manager.GetNumTags( tag_service_identifier, include_current_tags = include_current_tags, include_pending_tags = include_pending_tags ) ): continue
#
inbox = hash_id in inbox_hash_ids
current_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_current_file_service_ids[ hash_id ] }
if hash_id in hash_ids_to_current_file_service_ids: current_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_current_file_service_ids[ hash_id ] }
else: current_file_service_identifiers = set()
deleted_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_deleted_file_service_ids[ hash_id ] }
if hash_id in hash_ids_to_deleted_file_service_ids: deleted_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_deleted_file_service_ids[ hash_id ] }
else: deleted_file_service_identifiers = set()
pending_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_pending_file_service_ids[ hash_id ] }
if hash_id in hash_ids_to_pending_file_service_ids: pending_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_pending_file_service_ids[ hash_id ] }
else: pending_file_service_identifiers = set()
if hash_id in hash_ids_to_petitioned_file_service_ids: petitioned_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_petitioned_file_service_ids[ hash_id ] }
else: petitioned_file_service_identifiers = set()
petitioned_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_petitioned_file_service_ids[ hash_id ] }
file_service_identifiers_cdpp = CC.CDPPFileServiceIdentifiers( current_file_service_identifiers, deleted_file_service_identifiers, pending_file_service_identifiers, petitioned_file_service_identifiers )
if hash_id in hash_ids_to_local_ratings: local_ratings = { service_ids_to_service_identifiers[ service_id ] : rating for ( service_id, rating ) in hash_ids_to_local_ratings[ hash_id ] }
else: local_ratings = {}
#
local_ratings = { service_ids_to_service_identifiers[ service_id ] : rating for ( service_id, rating ) in hash_ids_to_local_ratings[ hash_id ] }
local_ratings = CC.LocalRatings( local_ratings )
remote_ratings = {}
#
media_results.append( CC.MediaResult( ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, file_service_identifiers_cdpp, local_ratings, remote_ratings ) ) )
return CC.FileQueryResult( file_service_identifier, search_context.GetPredicates(), media_results )
return media_results
def _GetMediaResultsFromHashes( self, c, search_context, hashes ):
def _GetMediaResultsFromHashes( self, c, file_service_identifier, hashes ):
query_hash_ids = set( self._GetHashIds( c, hashes ) )
return self._GetMediaResults( c, search_context, query_hash_ids )
return self._GetMediaResults( c, file_service_identifier, query_hash_ids )
def _GetMessageSystemPredicates( self, c, identity ):
@ -3031,9 +3045,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if ( can_add or already_in_db ):
search_context = CC.FileSearchContext()
( media_result, ) = self._GetMediaResults( c, search_context, set( ( hash_id, ) ) )
( media_result, ) = self._GetMediaResults( c, HC.LOCAL_FILE_SERVICE_IDENTIFIER, { hash_id } )
return ( result, hash, media_result )
@ -3048,7 +3060,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
( result, hash, media_result ) = self._ImportFile( c, path, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, generate_media_result = True, url = url )
if media_result is not None: self.pub( 'add_media_result', page_key, media_result )
if media_result is not None: self.pub( 'add_media_results', page_key, ( media_result, ) )
self.pub( 'import_done', page_key, result )
@ -4191,6 +4203,8 @@ class DB( ServiceDB ):
def __init__( self ):
self._local_shutdown = False
self._db_path = HC.DB_DIR + os.path.sep + 'client.db'
self._jobs = Queue.PriorityQueue()
@ -4200,27 +4214,6 @@ class DB( ServiceDB ):
self._InitDB()
temp_dir = HC.TEMP_DIR
try:
def make_temp_files_deletable( function_called, path, traceback_gumpf ):
os.chmod( path, stat.S_IWRITE )
function_called( path ) # try again
if os.path.exists( temp_dir ): shutil.rmtree( temp_dir, onerror = make_temp_files_deletable )
except: pass
try:
if not os.path.exists( temp_dir ): os.mkdir( temp_dir )
except: pass
# clean up if last connection closed badly
( db, c ) = self._GetDBCursor()
@ -4268,8 +4261,6 @@ class DB( ServiceDB ):
if not self._CheckPassword(): raise HydrusExceptions.PermissionException( 'No password!' )
threading.Thread( target = self.MainLoop, name = 'Database Main Loop' ).start()
def _CheckPassword( self ):
@ -4704,41 +4695,6 @@ class DB( ServiceDB ):
def _InitPostGUI( self ):
port = HC.DEFAULT_LOCAL_FILE_PORT
local_file_server_service_identifier = HC.ServerServiceIdentifier( HC.LOCAL_FILE, port )
self._server = HydrusServer.HydrusHTTPServer( local_file_server_service_identifier )
server_thread = threading.Thread( target=self._server.serve_forever )
server_thread.start()
connection = httplib.HTTPConnection( '127.0.0.1:' + HC.u( port ) )
try:
connection.connect()
connection.close()
except:
message = 'Could not bind the client to port ' + HC.u( port )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
HC.DAEMONWorker( 'CheckImportFolders', DAEMONCheckImportFolders, ( 'notify_new_import_folders', ), period = 180 )
HC.DAEMONWorker( 'DownloadFiles', DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) )
HC.DAEMONWorker( 'DownloadThumbnails', DAEMONDownloadThumbnails, ( 'notify_new_permissions', 'notify_new_thumbnails' ) )
HC.DAEMONWorker( 'ResizeThumbnails', DAEMONResizeThumbnails, init_wait = 600 )
HC.DAEMONWorker( 'SynchroniseAccounts', DAEMONSynchroniseAccounts, ( 'notify_new_services', 'permissions_are_stale' ) )
HC.DAEMONWorker( 'SynchroniseMessages', DAEMONSynchroniseMessages, ( 'notify_new_permissions', 'notify_check_messages' ), period = 60 )
HC.DAEMONWorker( 'SynchroniseRepositoriesAndSubscriptions', DAEMONSynchroniseRepositoriesAndSubscriptions, ( 'notify_new_permissions', 'notify_new_subscriptions' ) )
HC.DAEMONQueue( 'FlushRepositoryUpdates', DAEMONFlushServiceUpdates, 'service_updates_delayed', period = 2 )
def _SaveOptions( self, c ):
( old_options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
@ -6483,7 +6439,7 @@ class DB( ServiceDB ):
elif action == 'booru': result = self._GetBooru( c, *args, **kwargs )
elif action == 'boorus': result = self._GetBoorus( c, *args, **kwargs )
elif action == 'contact_names': result = self._GetContactNames( c, *args, **kwargs )
elif action == 'do_file_query': result = self._DoFileQuery( c, *args, **kwargs )
elif action == 'file_query_ids': result = self._GetFileQueryIds( c, *args, **kwargs )
elif action == 'do_message_query': result = self._DoMessageQuery( c, *args, **kwargs )
elif action == 'downloads': result = self._GetDownloads( c, *args, **kwargs )
elif action == 'favourite_custom_filter_actions': result = self._GetFavouriteCustomFilterActions( c, *args, **kwargs )
@ -6495,6 +6451,7 @@ class DB( ServiceDB ):
elif action == 'import_folders': result = self._GetImportFolders( c, *args, **kwargs )
elif action == 'md5_status': result = self._GetMD5Status( c, *args, **kwargs )
elif action == 'media_results': result = self._GetMediaResultsFromHashes( c, *args, **kwargs )
elif action == 'media_results_from_ids': result = self._GetMediaResults( c, *args, **kwargs )
elif action == 'message_keys_to_download': result = self._GetMessageKeysToDownload( c, *args, **kwargs )
elif action == 'message_system_predicates': result = self._GetMessageSystemPredicates( c, *args, **kwargs )
elif action == 'messages_to_send': result = self._GetMessagesToSend( c, *args, **kwargs )
@ -6625,7 +6582,7 @@ class DB( ServiceDB ):
( db, c ) = self._GetDBCursor()
while not ( HC.shutdown and self._jobs.empty() ):
while not ( ( self._local_shutdown or HC.shutdown ) and self._jobs.empty() ):
try:
@ -6654,13 +6611,55 @@ class DB( ServiceDB ):
if action in ( 'service_info', 'system_predicates' ): job_type = 'read_write'
else: job_type = 'read'
job = HC.JobInternal( action, job_type, True, *args, **kwargs )
synchronous = True
job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
if HC.shutdown: raise Exception( 'Application has shutdown!' )
self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first
if action != 'do_file_query': return job.GetResult()
if synchronous: return job.GetResult()
def Shutdown( self ): self._local_shutdown = True
def StartDaemons( self ):
HC.DAEMONWorker( 'CheckImportFolders', DAEMONCheckImportFolders, ( 'notify_new_import_folders', ), period = 180 )
HC.DAEMONWorker( 'DownloadFiles', DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) )
HC.DAEMONWorker( 'DownloadThumbnails', DAEMONDownloadThumbnails, ( 'notify_new_permissions', 'notify_new_thumbnails' ) )
HC.DAEMONWorker( 'ResizeThumbnails', DAEMONResizeThumbnails, init_wait = 600 )
HC.DAEMONWorker( 'SynchroniseAccounts', DAEMONSynchroniseAccounts, ( 'notify_new_services', 'permissions_are_stale' ) )
HC.DAEMONWorker( 'SynchroniseMessages', DAEMONSynchroniseMessages, ( 'notify_new_permissions', 'notify_check_messages' ), period = 60 )
HC.DAEMONWorker( 'SynchroniseRepositoriesAndSubscriptions', DAEMONSynchroniseRepositoriesAndSubscriptions, ( 'notify_new_permissions', 'notify_new_subscriptions' ) )
HC.DAEMONQueue( 'FlushRepositoryUpdates', DAEMONFlushServiceUpdates, 'service_updates_delayed', period = 2 )
def StartServer( self ):
port = HC.DEFAULT_LOCAL_FILE_PORT
local_file_server_service_identifier = HC.ServerServiceIdentifier( HC.LOCAL_FILE, port )
self._server = HydrusServer.HydrusHTTPServer( local_file_server_service_identifier )
server_thread = threading.Thread( target=self._server.serve_forever )
server_thread.start()
connection = httplib.HTTPConnection( '127.0.0.1:' + HC.u( port ) )
try:
connection.connect()
connection.close()
except:
message = 'Could not bind the client to port ' + HC.u( port )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
def WaitUntilGoodTimeToUseDBThread( self ):
@ -6811,7 +6810,7 @@ def DAEMONDownloadFiles():
for hash in hashes:
( media_result, ) = HC.app.ReadDaemon( 'media_results', CC.FileSearchContext(), ( hash, ) )
( media_result, ) = HC.app.ReadDaemon( 'media_results', HC.COMBINED_FILE_SERVICE_IDENTIFIER, ( hash, ) )
service_identifiers = list( media_result.GetFileServiceIdentifiersCDPP().GetCurrent() )

View File

@ -140,7 +140,7 @@ class FrameGUI( ClientGUICommon.Frame ):
( upload_hashes, update ) = result
media_results = HC.app.Read( 'media_results', CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER ), upload_hashes )
media_results = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, upload_hashes )
num_uploads = len( media_results )
@ -195,7 +195,7 @@ class FrameGUI( ClientGUICommon.Frame ):
content_updates = []
media_results = HC.app.Read( 'media_results', CC.FileSearchContext(), good_hashes )
media_results = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, good_hashes )
for media_result in media_results:

View File

@ -679,7 +679,7 @@ class CanvasFullscreenMediaList( ClientGUIMixins.ListeningMediaList, Canvas, Cli
def _GetIndexString( self ):
index_string = HC.ConvertIntToPrettyString( self._sorted_media_to_indices[ self._current_media ] + 1 ) + os.path.sep + HC.ConvertIntToPrettyString( len( self._sorted_media ) )
index_string = HC.ConvertIntToPrettyString( self._sorted_media.index( self._current_media ) + 1 ) + os.path.sep + HC.ConvertIntToPrettyString( len( self._sorted_media ) )
return index_string
@ -870,11 +870,11 @@ class CanvasFullscreenMediaList( ClientGUIMixins.ListeningMediaList, Canvas, Cli
def AddMediaResult( self, page_key, media_result ):
def AddMediaResults( self, page_key, media_results ):
if page_key == self._page_key:
ClientGUIMixins.ListeningMediaList.AddMediaResult( self, media_result )
ClientGUIMixins.ListeningMediaList.AddMediaResults( self, media_results )
self._DrawBackgroundBitmap()
@ -1016,7 +1016,7 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaList ):
if first_hash is None: self.SetMedia( self._GetFirst() )
else: self.SetMedia( self._GetMedia( { first_hash } )[0] )
HC.pubsub.sub( self, 'AddMediaResult', 'add_media_result' )
HC.pubsub.sub( self, 'AddMediaResults', 'add_media_results' )
def _Archive( self ): HC.app.Write( 'content_updates', { HC.LOCAL_FILE_SERVICE_IDENTIFIER : [ HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, ( self._current_media.GetHash(), ) ) ] } )
@ -1289,7 +1289,7 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ):
FullscreenPopoutFilterCustom( self )
HC.pubsub.sub( self, 'AddMediaResult', 'add_media_result' )
HC.pubsub.sub( self, 'AddMediaResults', 'add_media_results' )
def _Archive( self ): HC.app.Write( 'content_updates', { HC.LOCAL_FILE_SERVICE_IDENTIFIER : [ HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, ( self._current_media.GetHash(), ) ) ] } )
@ -2280,7 +2280,7 @@ class RatingsFilterFrameNumerical( ClientGUICommon.Frame ):
self._media_still_to_rate = { ClientGUIMixins.MediaSingleton( media_result ) for media_result in media_results }
self._current_media_to_rate = None
self._file_query_result = CC.FileQueryResult( HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
self._file_query_result = CC.FileQueryResult( media_results )
if service_identifier.GetType() == HC.LOCAL_RATING_LIKE: self._score_gap = 1.0
else:
@ -2526,7 +2526,7 @@ class RatingsFilterFrameNumerical( ClientGUICommon.Frame ):
hash = media_result_to_rate_against.GetHash()
if hash in self._file_query_result.GetHashes(): media_result_to_rate_against = self._file_query_result.GetMediaResult( hash )
else: self._file_query_result.AddMediaResult( media_result_to_rate_against )
else: self._file_query_result.AddMediaResults( ( media_result_to_rate_against, ) )
media_to_rate_against = ClientGUIMixins.MediaSingleton( media_result_to_rate_against )

View File

@ -121,17 +121,20 @@ class AutoCompleteDropdown( wx.TextCtrl ):
tlp.Bind( wx.EVT_MOVE, self.EventMove )
wx.CallAfter( self._UpdateList )
def _BroadcastChoice( self, predicate ): pass
def BroadcastChoice( self, predicate ):
if self.GetValue() != '':
self.SetValue( '' )
self._BroadcastChoice( predicate )
self.Clear()
wx.CallAfter( self._UpdateList )
def _HideDropdown( self ): self._dropdown_window.Show( False )
@ -242,8 +245,6 @@ class AutoCompleteDropdown( wx.TextCtrl ):
def EventSetFocus( self, event ):
self._UpdateList()
self._ShowDropdownIfFocussed()
event.Skip()
@ -251,12 +252,8 @@ class AutoCompleteDropdown( wx.TextCtrl ):
def EventText( self, event ):
num_chars = len( self.GetValue() )
if num_chars == 0: lag = 0
else: lag = 150
#lag = 0
self._lag_timer.Start( lag, wx.TIMER_ONE_SHOT )
if len( self.GetValue() ) == 0: self._UpdateList()
else: self._lag_timer.Start( 150, wx.TIMER_ONE_SHOT )
class AutoCompleteDropdownContacts( AutoCompleteDropdown ):
@ -2069,12 +2066,8 @@ class PopupMessageFiles( PopupMessage ):
def EventButton( self, event ):
search_context = CC.FileSearchContext()
unsorted_file_query_result = HC.app.Read( 'media_results', search_context, self._hashes )
media_results = { media_result for media_result in unsorted_file_query_result }
media_results = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, self._hashes )
HC.pubsub.pub( 'new_page_query', HC.LOCAL_FILE_SERVICE_IDENTIFIER, initial_media_results = media_results )
@ -2132,7 +2125,7 @@ class PopupMessageManager( wx.Frame ):
self._old_show_exception = HC.ShowException
sys.excepthook = CC.CatchExceptionClient
#HC.ShowException = CC.ShowExceptionClient
HC.ShowException = CC.ShowExceptionClient
def _CheckPending( self ):
@ -3287,9 +3280,9 @@ class TagsBoxCPP( TagsBox ):
self._tag_service_identifier = HC.COMBINED_TAG_SERVICE_IDENTIFIER
self._last_media = None
self._current_tags_to_count = {}
self._pending_tags_to_count = {}
self._petitioned_tags_to_count = {}
self._current_tags_to_count = collections.Counter()
self._pending_tags_to_count = collections.Counter()
self._petitioned_tags_to_count = collections.Counter()
HC.pubsub.sub( self, 'SetTagsByMedia', 'new_tags_selection' )
HC.pubsub.sub( self, 'ChangeTagRepository', 'change_tag_repository' )
@ -3302,6 +3295,38 @@ class TagsBoxCPP( TagsBox ):
HC.pubsub.pub( 'add_predicate', self._page_key, predicate )
def _RecalcStrings( self ):
siblings_manager = HC.app.GetTagSiblingsManager()
all_current = ( tag for tag in self._current_tags_to_count if self._current_tags_to_count[ tag ] > 0 )
all_pending = ( tag for tag in self._pending_tags_to_count if self._pending_tags_to_count[ tag ] > 0 )
all_petitioned = ( tag for tag in self._petitioned_tags_to_count if self._petitioned_tags_to_count[ tag ] > 0 )
all_tags = set( itertools.chain( all_current, all_pending, all_petitioned ) )
self._ordered_strings = []
self._strings_to_terms = {}
for tag in all_tags:
tag_string = tag
if tag in self._current_tags_to_count: tag_string += ' (' + HC.ConvertIntToPrettyString( self._current_tags_to_count[ tag ] ) + ')'
if tag in self._pending_tags_to_count: tag_string += ' (+' + HC.ConvertIntToPrettyString( self._pending_tags_to_count[ tag ] ) + ')'
if tag in self._petitioned_tags_to_count: tag_string += ' (-' + HC.ConvertIntToPrettyString( self._petitioned_tags_to_count[ tag ] ) + ')'
sibling = siblings_manager.GetSibling( tag )
if sibling is not None: tag_string += ' (' + sibling + ')'
self._ordered_strings.append( tag_string )
self._strings_to_terms[ tag_string ] = tag
self._SortTags()
def _SortTags( self ):
if self._sort == CC.SORT_BY_LEXICOGRAPHIC_ASC: compare_function = lambda a, b: cmp( a, b )
@ -3345,46 +3370,56 @@ class TagsBoxCPP( TagsBox ):
current_tags_to_count = siblings_manager.CollapseTagsToCount( current_tags_to_count )
if current_tags_to_count != self._current_tags_to_count or pending_tags_to_count != self._pending_tags_to_count or petitioned_tags_to_count != self._petitioned_tags_to_count:
self._current_tags_to_count = current_tags_to_count
self._pending_tags_to_count = pending_tags_to_count
self._petitioned_tags_to_count = petitioned_tags_to_count
all_tags = { tag for tag in self._current_tags_to_count.keys() + self._pending_tags_to_count.keys() + self._petitioned_tags_to_count.keys() }
self._ordered_strings = []
self._strings_to_terms = {}
for tag in all_tags:
tag_string = tag
if tag in self._current_tags_to_count: tag_string += ' (' + HC.ConvertIntToPrettyString( self._current_tags_to_count[ tag ] ) + ')'
if tag in self._pending_tags_to_count: tag_string += ' (+' + HC.ConvertIntToPrettyString( self._pending_tags_to_count[ tag ] ) + ')'
if tag in self._petitioned_tags_to_count: tag_string += ' (-' + HC.ConvertIntToPrettyString( self._petitioned_tags_to_count[ tag ] ) + ')'
sibling = siblings_manager.GetSibling( tag )
if sibling is not None: tag_string += ' (' + sibling + ')'
self._ordered_strings.append( tag_string )
self._strings_to_terms[ tag_string ] = tag
self._SortTags()
self._current_tags_to_count = current_tags_to_count
self._pending_tags_to_count = pending_tags_to_count
self._petitioned_tags_to_count = petitioned_tags_to_count
self._RecalcStrings()
def SetTagsByMedia( self, page_key, media ):
def SetTagsByMedia( self, page_key, media, force_reload = False ):
if page_key == self._page_key:
media = set( media )
if force_reload:
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = CC.GetMediasTagCount( media, self._tag_service_identifier )
self.SetTags( current_tags_to_count, pending_tags_to_count, petitioned_tags_to_count )
else:
if self._last_media is None: ( removees, adds ) = ( set(), media )
else:
removees = self._last_media.difference( media )
adds = media.difference( self._last_media )
siblings_manager = HC.app.GetTagSiblingsManager()
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = CC.GetMediasTagCount( removees, self._tag_service_identifier )
current_tags_to_count = siblings_manager.CollapseTagsToCount( current_tags_to_count )
self._current_tags_to_count.subtract( current_tags_to_count )
self._pending_tags_to_count.subtract( pending_tags_to_count )
self._petitioned_tags_to_count.subtract( petitioned_tags_to_count )
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = CC.GetMediasTagCount( adds, self._tag_service_identifier )
current_tags_to_count = siblings_manager.CollapseTagsToCount( current_tags_to_count )
self._current_tags_to_count.update( current_tags_to_count )
self._pending_tags_to_count.update( pending_tags_to_count )
self._petitioned_tags_to_count.update( petitioned_tags_to_count )
self._last_media = media
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = CC.GetMediasTagCount( media, self._tag_service_identifier )
self.SetTags( current_tags_to_count, pending_tags_to_count, petitioned_tags_to_count )
self._RecalcStrings()

View File

@ -1249,7 +1249,7 @@ class DialogInputFileSystemPredicate( Dialog ):
self.SetInitialSize( ( x, y ) )
wx.CallAfter( self._self._value_numerical.SetFocus )
wx.CallAfter( self._value_numerical.SetFocus )
def Ratio():
@ -1585,7 +1585,7 @@ class DialogInputFileSystemPredicate( Dialog ):
info = ( service_identifier, operator, value )
elif self._type == HC.SYSTEM_PREDICATE_TYPE_RATIO: info = ( self._sign.GetStringSelection(), float( ( self._width.GetValue() ) / float( self._height.GetValue() ) ) )
elif self._type == HC.SYSTEM_PREDICATE_TYPE_RATIO: info = ( self._sign.GetStringSelection(), self._width.GetValue(), self._height.GetValue() )
elif self._type == HC.SYSTEM_PREDICATE_TYPE_SIZE: info = ( self._sign.GetStringSelection(), self._size.GetValue(), HC.ConvertUnitToInteger( self._unit.GetStringSelection() ) )
elif self._type == HC.SYSTEM_PREDICATE_TYPE_WIDTH: info = ( self._sign.GetStringSelection(), self._width.GetValue() )
elif self._type == HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO:

View File

@ -1717,9 +1717,9 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
elif status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', CC.FileSearchContext(), ( hash, ) )
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
if do_tags:
@ -2056,9 +2056,9 @@ class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ):
if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
elif status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', CC.FileSearchContext(), ( hash, ) )
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
else:
@ -2250,9 +2250,9 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
elif status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', CC.FileSearchContext(), ( hash, ) )
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
else:
@ -2266,9 +2266,9 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
elif status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', CC.FileSearchContext(), ( hash, ) )
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
else:
@ -2504,11 +2504,9 @@ class ManagementPanelPetitions( ManagementPanel ):
if self._can_ban: self._modify_petitioner.Enable()
search_context = CC.FileSearchContext( self._file_service_identifier )
with wx.BusyCursor(): media_results = HC.app.Read( 'media_results', self._file_service_identifier, self._current_petition.GetHashes() )
with wx.BusyCursor(): file_query_result = HC.app.Read( 'media_results', search_context, self._current_petition.GetHashes() )
panel = ClientGUIMedia.MediaPanelThumbnails( self._page, self._page_key, self._file_service_identifier, [], file_query_result )
panel = ClientGUIMedia.MediaPanelThumbnails( self._page, self._page_key, self._file_service_identifier, [], media_results )
panel.Collect( self._page_key, self._collect_by.GetChoice() )
@ -2611,7 +2609,7 @@ class ManagementPanelQuery( ManagementPanel ):
ManagementPanel.__init__( self, parent, page, page_key, file_service_identifier )
self._query_key = os.urandom( 32 )
self._query_key = HC.QueryKey()
self._synchronised = True
self._include_current_tags = True
self._include_pending_tags = True
@ -2638,6 +2636,7 @@ class ManagementPanelQuery( ManagementPanel ):
if len( initial_predicates ) > 0: wx.CallAfter( self._DoQuery )
HC.pubsub.sub( self, 'AddMediaResultsFromQuery', 'add_media_results_from_query' )
HC.pubsub.sub( self, 'AddPredicate', 'add_predicate' )
HC.pubsub.sub( self, 'ChangeFileRepository', 'change_file_repository' )
HC.pubsub.sub( self, 'ChangeTagRepository', 'change_tag_repository' )
@ -2651,6 +2650,10 @@ class ManagementPanelQuery( ManagementPanel ):
def _DoQuery( self ):
self._query_key.Cancel()
self._query_key = HC.QueryKey()
if self._synchronised:
try:
@ -2659,14 +2662,12 @@ class ManagementPanelQuery( ManagementPanel ):
if len( current_predicates ) > 0:
self._query_key = os.urandom( 32 )
include_current = self._include_current_tags
include_pending = self._include_pending_tags
search_context = CC.FileSearchContext( self._file_service_identifier, self._tag_service_identifier, include_current, include_pending, current_predicates )
HC.app.Read( 'do_file_query', self._query_key, search_context )
HC.app.StartFileQuery( self._query_key, search_context )
panel = ClientGUIMedia.MediaPanelLoading( self._page, self._page_key, self._file_service_identifier )
@ -2678,6 +2679,11 @@ class ManagementPanelQuery( ManagementPanel ):
def AddMediaResultsFromQuery( self, query_key, media_results ):
if query_key == self._query_key: HC.pubsub.pub( 'add_media_results', self._page_key, media_results, append = False )
def AddPredicate( self, page_key, predicate ):
if page_key == self._page_key:
@ -2782,7 +2788,7 @@ class ManagementPanelQuery( ManagementPanel ):
if page_key == self._page_key: self._searchbox.SetFocus()
def ShowQuery( self, query_key, file_query_result ):
def ShowQuery( self, query_key, media_results ):
try:
@ -2790,7 +2796,7 @@ class ManagementPanelQuery( ManagementPanel ):
current_predicates = self._current_predicates_box.GetPredicates()
panel = ClientGUIMedia.MediaPanelThumbnails( self._page, self._page_key, self._file_service_identifier, current_predicates, file_query_result )
panel = ClientGUIMedia.MediaPanelThumbnails( self._page, self._page_key, self._file_service_identifier, current_predicates, media_results )
panel.Collect( self._page_key, self._collect_by.GetChoice() )
@ -2813,7 +2819,7 @@ class ManagementPanelMessages( wx.ScrolledWindow ):
self._page_key = page_key
self._identity = identity
self._query_key = os.urandom( 32 )
self._query_key = HC.QueryKey()
# sort out push-refresh later
#self._refresh_inbox = wx.Button( self, label = 'refresh inbox' )
@ -2868,10 +2874,12 @@ class ManagementPanelMessages( wx.ScrolledWindow ):
HC.pubsub.pub( 'set_conversations', self._page_key, [] )
self._query_key.Cancel()
self._query_key = HC.QueryKey()
if len( current_predicates ) > 0:
self._query_key = os.urandom( 32 )
search_context = ClientConstantsMessages.MessageSearchContext( self._identity, current_predicates )
HC.app.Read( 'do_message_query', self._query_key, search_context )

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,9 @@
import bisect
import collections
import ClientConstants as CC
import HydrusConstants as HC
import HydrusTags
import os
import random
import time
import traceback
@ -9,28 +11,71 @@ import wx
class Media():
def __init__( self ): pass
def __init__( self ):
self._id = os.urandom( 32 )
def __eq__( self, other ): return self.__hash__() == other.__hash__()
def __hash__( self ): return self._id.__hash__()
def __ne__( self, other ): return self.__hash__() != other.__hash__()
class MediaList():
def __init__( self, file_service_identifier, predicates, file_query_result ):
def __init__( self, file_service_identifier, predicates, media_results ):
self._file_service_identifier = file_service_identifier
self._predicates = predicates
self._file_query_result = file_query_result
self._sort_by = CC.SORT_BY_SMALLEST
self._collect_by = None
self._sorted_media = [ self._GenerateMediaSingleton( media_result ) for media_result in file_query_result ]
self._sorted_media_to_indices = { media : index for ( index, media ) in enumerate( self._sorted_media ) }
self._collect_map_singletons = {}
self._collect_map_collected = {}
self._sorted_media = HC.SortedList( [ self._GenerateMediaSingleton( media_result ) for media_result in media_results ] )
self._singleton_media = set( self._sorted_media )
self._collected_media = set()
def _CalculateCollectionKeysToMedias( self, collect_by, medias ):
namespaces_to_collect_by = [ data for ( collect_by_type, data ) in collect_by if collect_by_type == 'namespace' ]
ratings_to_collect_by = [ data for ( collect_by_type, data ) in collect_by if collect_by_type == 'rating' ]
local_ratings_to_collect_by = [ service_identifier for service_identifier in ratings_to_collect_by if service_identifier.GetType() in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ]
remote_ratings_to_collect_by = [ service_identifier for service_identifier in ratings_to_collect_by if service_identifier.GetType() in ( HC.RATING_LIKE_REPOSITORY, HC.RATING_NUMERICAL_REPOSITORY ) ]
keys_to_medias = collections.defaultdict( list )
for media in medias:
if len( namespaces_to_collect_by ) > 0: namespace_key = media.GetTagsManager().GetNamespaceSlice( namespaces_to_collect_by )
else: namespace_key = None
if len( ratings_to_collect_by ) > 0:
( local_ratings, remote_ratings ) = media.GetRatings()
if len( local_ratings_to_collect_by ) > 0: local_rating_key = local_ratings.GetRatingSlice( local_ratings_to_collect_by )
else: local_rating_key = None
if len( remote_ratings_to_collect_by ) > 0: remote_rating_key = remote_ratings.GetRatingSlice( remote_ratings_to_collect_by )
else: remote_rating_key = None
rating_key = ( local_rating_key, remote_rating_key )
else: rating_key = None
keys_to_medias[ ( namespace_key, rating_key ) ].append( media )
return keys_to_medias
def _GenerateMediaCollection( self, media_results ): return MediaCollection( self._file_service_identifier, self._predicates, media_results )
def _GenerateMediaSingleton( self, media_result ): return MediaSingleton( media_result )
@ -61,7 +106,7 @@ class MediaList():
if media is None: return None
next_index = self._sorted_media_to_indices[ media ] + 1
next_index = self._sorted_media.index( media ) + 1
if next_index == len( self._sorted_media ): return self._GetFirst()
else: return self._sorted_media[ next_index ]
@ -71,7 +116,7 @@ class MediaList():
if media is None: return None
previous_index = self._sorted_media_to_indices[ media ] - 1
previous_index = self._sorted_media.index( media ) - 1
if previous_index == -1: return self._GetLast()
else: return self._sorted_media[ previous_index ]
@ -79,81 +124,46 @@ class MediaList():
def _RemoveMedia( self, singleton_media, collected_media ):
if type( singleton_media ) != set: singleton_media = set( singleton_media )
if type( collected_media ) != set: collected_media = set( collected_media )
self._singleton_media.difference_update( singleton_media )
self._collected_media.difference_update( collected_media )
self._sorted_media = [ media for media in self._sorted_media if media in self._singleton_media or media in self._collected_media ]
self._sorted_media_to_indices = { media : index for ( index, media ) in enumerate( self._sorted_media ) }
keys_to_remove = [ key for ( key, media ) in self._collect_map_singletons if media in singleton_media ]
for key in keys_to_remove: del self._collect_map_singletons[ key ]
keys_to_remove = [ key for ( key, media ) in self._collect_map_collected if media in collected_media ]
for key in keys_to_remove: del self._collect_map_collected[ key ]
self._sorted_media.remove_items( singleton_media.union( collected_media ) )
def AddMediaResult( self, media_result ):
def Collect( self, collect_by = -1 ):
self._file_query_result.AddMediaResult( media_result )
if collect_by == -1: collect_by = self._collect_by
hash = media_result.GetHash()
self._collect_by = collect_by
if hash in self._GetHashes(): return
for media in self._collected_media: self._singleton_media.update( [ self._GenerateMediaSingleton( media_result ) for media_result in media.GenerateMediaResults() ] )
media = self._GenerateMediaSingleton( media_result )
self._collected_media = set()
# turn this little bit into a medialist call, yo
# but be careful of media vs media_result
self._singleton_media.add( media )
self._sorted_media.append( media )
self._sorted_media_to_indices[ media ] = len( self._sorted_media ) - 1
self._collect_map_singletons = {}
self._collect_map_collected = {}
return media
def Collect( self, collect_by ):
try:
if collect_by is not None:
for media in self._collected_media: self._singleton_media.update( [ self._GenerateMediaSingleton( media_result ) for media_result in media.GenerateMediaResults() ] )
keys_to_medias = self._CalculateCollectionKeysToMedias( collect_by, self._singleton_media )
self._collected_media = set()
self._collect_map_singletons = { key : medias[0] for ( key, medias ) in keys_to_medias.items() if len( medias ) == 1 }
self._collect_map_collected = { key : self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] ) for ( key, medias ) in keys_to_medias.items() if len( medias ) > 1 }
if collect_by is not None:
namespaces_to_collect_by = [ data for ( collect_by_type, data ) in collect_by if collect_by_type == 'namespace' ]
ratings_to_collect_by = [ data for ( collect_by_type, data ) in collect_by if collect_by_type == 'rating' ]
local_ratings_to_collect_by = [ service_identifier for service_identifier in ratings_to_collect_by if service_identifier.GetType() in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ]
remote_ratings_to_collect_by = [ service_identifier for service_identifier in ratings_to_collect_by if service_identifier.GetType() in ( HC.RATING_LIKE_REPOSITORY, HC.RATING_NUMERICAL_REPOSITORY ) ]
singletons = set()
keys_to_medias = collections.defaultdict( list )
for media in self._singleton_media:
if len( namespaces_to_collect_by ) > 0: namespace_key = media.GetTagsManager().GetNamespaceSlice( namespaces_to_collect_by )
else: namespace_key = None
if len( ratings_to_collect_by ) > 0:
( local_ratings, remote_ratings ) = media.GetRatings()
if len( local_ratings_to_collect_by ) > 0: local_rating_key = local_ratings.GetRatingSlice( local_ratings_to_collect_by )
else: local_rating_key = None
if len( remote_ratings_to_collect_by ) > 0: remote_rating_key = remote_ratings.GetRatingSlice( remote_ratings_to_collect_by )
else: remote_rating_key = None
rating_key = ( local_rating_key, remote_rating_key )
else: rating_key = None
keys_to_medias[ ( namespace_key, rating_key ) ].append( media )
self._singleton_media = set( [ medias[0] for medias in keys_to_medias.values() if len( medias ) == 1 ] )
self._collected_media = set( [ self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] ) for medias in keys_to_medias.values() if len( medias ) > 1 ] )
self._singleton_media = set( self._collect_map_singletons.values() )
self._collected_media = set( self._collect_map_collected.values() )
self._sorted_media = list( self._singleton_media ) + list( self._collected_media )
except: wx.MessageBox( traceback.format_exc() )
self._sorted_media = HC.SortedList( list( self._singleton_media ) + list( self._collected_media ) )
def DeletePending( self, service_identifier ):
@ -202,7 +212,7 @@ class MediaList():
return flat_media
def GetMediaIndex( self, media ): return self._sorted_media_to_indices[ media ]
def GetMediaIndex( self, media ): return self._sorted_media.index( media )
def GetSortedMedia( self ): return self._sorted_media
@ -235,27 +245,7 @@ class MediaList():
if data_type == HC.CONTENT_DATA_TYPE_FILES:
if action == HC.CONTENT_UPDATE_ARCHIVE:
if HC.SYSTEM_PREDICATE_INBOX in self._predicates:
affected_singleton_media = self._GetMedia( hashes, 'singletons' )
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
self._RemoveMedia( affected_singleton_media, affected_collected_media )
elif action == HC.CONTENT_UPDATE_INBOX:
if HC.SYSTEM_PREDICATE_ARCHIVE in self._predicates:
affected_singleton_media = self._GetMedia( hashes, 'singletons' )
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
self._RemoveMedia( affected_singleton_media, affected_collected_media )
elif action == HC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier:
if action == HC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier:
affected_singleton_media = self._GetMedia( hashes, 'singletons' )
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
@ -296,33 +286,35 @@ class MediaList():
def Sort( self, sort_by ):
def Sort( self, sort_by = None ):
for media in self._collected_media: media.Sort( sort_by )
if sort_by is None: sort_by = self._sort_by
self._sort_by = sort_by
( sort_by_type, sort_by_data ) = sort_by
if sort_by_type == 'system':
if sort_by_data == CC.SORT_BY_RANDOM: random.shuffle( self._sorted_media )
else:
if sort_by_data == CC.SORT_BY_SMALLEST: compare_function = lambda x, y: cmp( x.GetSize(), y.GetSize() )
elif sort_by_data == CC.SORT_BY_LARGEST: compare_function = lambda x, y: cmp( y.GetSize(), x.GetSize() )
elif sort_by_data == CC.SORT_BY_SHORTEST: compare_function = lambda x, y: cmp( x.GetDuration(), y.GetDuration() )
elif sort_by_data == CC.SORT_BY_LONGEST: compare_function = lambda x, y: cmp( y.GetDuration(), x.GetDuration() )
elif sort_by_data == CC.SORT_BY_OLDEST: compare_function = lambda x, y: cmp( x.GetTimestamp(), y.GetTimestamp() )
elif sort_by_data == CC.SORT_BY_NEWEST: compare_function = lambda x, y: cmp( y.GetTimestamp(), x.GetTimestamp() )
elif sort_by_data == CC.SORT_BY_MIME: compare_function = lambda x, y: cmp( x.GetMime(), y.GetMime() )
self._sorted_media.sort( compare_function )
if sort_by_data == CC.SORT_BY_RANDOM: sort_function = lambda x: random.random()
elif sort_by_data == CC.SORT_BY_SMALLEST: sort_function = lambda x: x.GetSize()
elif sort_by_data == CC.SORT_BY_LARGEST: sort_function = lambda x: -x.GetSize()
elif sort_by_data == CC.SORT_BY_SHORTEST: sort_function = lambda x: x.GetDuration()
elif sort_by_data == CC.SORT_BY_LONGEST: sort_function = lambda x: -x.GetDuration()
elif sort_by_data == CC.SORT_BY_OLDEST: sort_function = lambda x: x.GetTimestamp()
elif sort_by_data == CC.SORT_BY_NEWEST: sort_function = lambda x: -x.GetTimestamp()
elif sort_by_data == CC.SORT_BY_MIME: sort_function = lambda x: x.GetMime()
elif sort_by_type == 'namespaces':
def namespace_compare( x, y ):
def namespace_sort_function( namespaces, x ):
x_tags_manager = x.GetTagsManager()
y_tags_manager = y.GetTagsManager()
return [ x_tags_manager.GetComparableNamespaceSlice( ( namespace, ) ) for namespace in namespaces ]
'''
for namespace in sort_by_data:
x_namespace_slice = x_tags_manager.GetNamespaceSlice( ( namespace, ) )
@ -366,52 +358,143 @@ class MediaList():
return cmp( x.GetSize(), y.GetSize() )
'''
self._sorted_media.sort( namespace_compare )
sort_function = lambda x: namespace_sort_function( sort_by_data, x )
elif sort_by_type in ( 'rating_descend', 'rating_ascend' ):
service_identifier = sort_by_data
service_type = service_identifier.GetType()
def ratings_compare( x, y ):
def ratings_sort_function( service_identifier, reverse, x ):
( x_local_ratings, x_remote_ratings ) = x.GetRatings()
( y_local_ratings, y_remote_ratings ) = y.GetRatings()
# btw None is always considered less than an int in cmp( int, None )
if service_identifier.GetType() in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): rating = x_local_ratings.GetRating( service_identifier )
else: rating = x_remote_ratings.GetScore( service_identifier )
if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): return cmp( x_local_ratings.GetRating( service_identifier ), y_local_ratings.GetRating( service_identifier ) )
else: return cmp( x_remote_ratings.GetScore( service_identifier ), y_remote_ratings.GetScore( service_identifier ) )
if reverse: rating *= -1
return rating
reverse = sort_by_type == 'rating_descend'
self._sorted_media.sort( ratings_compare, reverse = reverse )
sort_function = lambda x: ratings_sort_function( service_identifier, reverse, x )
for media in self._collected_media: media.Sort( sort_by )
self._sorted_media_to_indices = { media : index for ( index, media ) in enumerate( self._sorted_media ) }
self._sorted_media.sort( sort_function )
class ListeningMediaList( MediaList ):
def __init__( self, *args ):
def __init__( self, file_service_identifier, predicates, media_results ):
MediaList.__init__( self, *args )
MediaList.__init__( self, file_service_identifier, predicates, media_results )
self._file_query_result = CC.FileQueryResult( media_results )
HC.pubsub.sub( self, 'ProcessContentUpdates', 'content_updates_gui' )
HC.pubsub.sub( self, 'ProcessServiceUpdates', 'service_updates_gui' )
def AddMediaResults( self, media_results, append = True ):
self._file_query_result.AddMediaResults( media_results )
existing_hashes = self._GetHashes()
new_media = []
for media_result in media_results:
hash = media_result.GetHash()
if hash in existing_hashes: continue
new_media.append( self._GenerateMediaSingleton( media_result ) )
if append:
self._singleton_media.update( new_media )
self._sorted_media.append_items( new_media )
else:
if self._collect_by is not None:
keys_to_medias = self._CalculateCollectionKeysToMedias( collect_by, new_media )
new_media = []
for ( key, medias ) in keys_to_medias.items():
if key in self._collect_map_singletons:
singleton_media = self._collect_map_singletons[ key ]
self._sorted_media.remove_items( singleton_media )
self._singleton_media.discard( singleton_media )
del self._collect_map_singletons[ key ]
medias.append( singleton_media )
collected_media = self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] )
collected_media.Sort( self._sort_by )
self._collected_media.add( collected_media )
self._collect_map_collected[ key ] = collected_media
new_media.append( collected_media )
elif key in self._collect_map_collected:
collected_media = self._collect_map_collected[ key ]
self._sorted_media.remove_items( collected_media )
# mediacollection needs addmediaresult with efficient recalcinternals
collected_media.MagicalAddMediasOrMediaResultsWhatever( medias )
collected_media.Sort( self._sort_by )
new_media.append( collected_media )
elif len( medias ) == 1:
( singleton_media, ) = medias
self._singleton_media.add( singleton_media )
self._collect_map_singletons[ key ] = singleton_media
else:
collected_media = self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] )
collected_media.Sort( self._sort_by )
self._collected_media.add( collected_media )
self._collect_map_collected[ key ] = collected_media
new_media.append( collected_media )
self._sorted_media.insert_items( new_media )
return new_media
class MediaCollection( MediaList, Media ):
def __init__( self, file_service_identifier, predicates, file_query_result ):
def __init__( self, file_service_identifier, predicates, media_results ):
Media.__init__( self )
MediaList.__init__( self, file_service_identifier, predicates, file_query_result )
MediaList.__init__( self, file_service_identifier, predicates, media_results )
self._hashes = set()
@ -434,7 +517,7 @@ class MediaCollection( MediaList, Media ):
self._RecalcInternals()
def __hash__( self ): return frozenset( self._hashes ).__hash__()
#def __hash__( self ): return frozenset( self._hashes ).__hash__()
def _RecalcInternals( self ):
@ -596,7 +679,7 @@ class MediaSingleton( Media ):
self._media_result = media_result
def __hash__( self ): return self.GetHash().__hash__()
#def __hash__( self ): return self.GetHash().__hash__()
def GetDisplayMedia( self ): return self
@ -606,16 +689,23 @@ class MediaSingleton( Media ):
def GetHashes( self, discriminant = None, not_uploaded_to = None ):
inbox = self._media_result.GetInbox()
file_service_identifiers = self._media_result.GetFileServiceIdentifiersCDPP()
if discriminant is not None:
inbox = self._media_result.GetInbox()
file_service_identifiers = self._media_result.GetFileServiceIdentifiersCDPP()
if ( discriminant == CC.DISCRIMINANT_INBOX and not inbox ) or ( discriminant == CC.DISCRIMINANT_ARCHIVE and inbox ) or ( discriminant == CC.DISCRIMINANT_LOCAL and not file_service_identifiers.HasLocal() ) or ( discriminant == CC.DISCRIMINANT_NOT_LOCAL and file_service_identifiers.HasLocal() ): return set()
if not_uploaded_to is not None:
file_service_identifiers = self._media_result.GetFileServiceIdentifiersCDPP()
if not_uploaded_to in file_service_identifiers.GetCurrentRemote(): return set()
return set( [ self._media_result.GetHash() ] )
return { self._media_result.GetHash() }
def GetFileServiceIdentifiersCDPP( self ): return self._media_result.GetFileServiceIdentifiersCDPP()

View File

@ -266,7 +266,7 @@ class PageWithMedia( PageBase, wx.SplitterWindow ):
class PageImport( PageWithMedia ):
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, [], CC.FileQueryResult( self._file_service_identifier, [], [] ) )
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, [], [] )
class PageImportBooru( PageImport ):
@ -399,12 +399,7 @@ class PageQuery( PageWithMedia ):
def _InitMediaPanel( self ):
if len( self._initial_media_results ) == 0: self._media_panel = ClientGUIMedia.MediaPanelNoQuery( self, self._page_key, self._file_service_identifier )
else:
file_query_result = CC.FileQueryResult( self._file_service_identifier, self._initial_predicates, self._initial_media_results )
self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_predicates, file_query_result )
else: self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_predicates, self._initial_media_results )
class PageThreadDumper( PageWithMedia ):
@ -413,22 +408,18 @@ class PageThreadDumper( PageWithMedia ):
self._imageboard = imageboard
search_context = CC.FileSearchContext()
media_results = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes )
self._unsorted_file_query_result = HC.app.Read( 'media_results', search_context, hashes )
hashes_to_media_results = { media_result.GetHash() : media_result for media_result in self._unsorted_file_query_result }
hashes_to_media_results = { media_result.GetHash() : media_result for media_result in media_results }
self._media_results = [ hashes_to_media_results[ hash ] for hash in hashes ]
self._media_results = filter( self._imageboard.IsOkToPost, self._media_results )
self._file_query_result = CC.FileQueryResult( HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], self._media_results )
PageWithMedia.__init__( self, parent, HC.LOCAL_FILE_SERVICE_IDENTIFIER )
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelDumper( self._search_preview_split, self, self._page_key, self._imageboard, self._media_results )
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], self._file_query_result )
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], self._media_results )

View File

@ -1,3 +1,4 @@
import bisect
import collections
import httplib
import HydrusExceptions
@ -36,7 +37,7 @@ TEMP_DIR = BASE_DIR + os.path.sep + 'temp'
# Misc
NETWORK_VERSION = 10
SOFTWARE_VERSION = 81
SOFTWARE_VERSION = 82
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -52,6 +53,8 @@ is_first_start = False
is_db_updated = False
repos_or_subs_changed = False
busy_doing_pubsub = False
# Enums
CONTENT_DATA_TYPE_MAPPINGS = 0
@ -741,7 +744,7 @@ def ConvertTimestampToPrettyAge( timestamp ):
def ConvertTimestampToPrettyAgo( timestamp ):
if timestamp == 0: return 'unknown time'
if timestamp is None or timestamp == 0: return 'unknown time'
age = GetNow() - timestamp
@ -885,7 +888,7 @@ def ConvertTimestampToPrettyPending( timestamp ):
def ConvertTimestampToPrettySync( timestamp ):
if timestamp == 0: return 'not updated'
if timestamp is None or timestamp == 0: return 'not updated'
age = GetNow() - timestamp
@ -1874,18 +1877,6 @@ class DAEMONWorker( DAEMON ):
def set( self, *args, **kwargs ): self._event.set()
class Message():
def __init__( self, message_type, info ):
self._message_type = message_type
self._info = info
def GetInfo( self ): return self._info
def GetType( self ): return self._message_type
class JobInternal():
yaml_tag = u'!JobInternal'
@ -1916,7 +1907,19 @@ class JobInternal():
elif shutdown: raise Exception( 'Application quit before db could serve result!' )
if issubclass( type( self._result ), Exception ): raise self._result
if issubclass( type( self._result ), Exception ):
etype = type( self._result )
db_traceback = unicode( self._result )
trace_list = traceback.format_stack()
my_trace = ''.join( trace_list )
raise etype( my_trace + os.linesep + db_traceback )
else: return self._result
@ -1964,6 +1967,39 @@ class JobServer():
self._result_ready.set()
class Message():
def __init__( self, message_type, info ):
self._message_type = message_type
self._info = info
def GetInfo( self ): return self._info
def GetType( self ): return self._message_type
class QueryKey():
def __init__( self ):
self._key = os.urandom( 32 )
self._cancelled = threading.Event()
def __eq__( self, other ): return self.__hash__() == other.__hash__()
def __hash__( self ): return self._key.__hash__()
def __ne__( self, other ): return self.__hash__() != other.__hash__()
def Cancel( self ): self._cancelled.set()
def GetKey( self ): return self._key
def IsCancelled( self ): return self._cancelled.is_set()
class Predicate():
def __init__( self, predicate_type, value, count ):
@ -2005,12 +2041,11 @@ class Predicate():
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_UNTAGGED: base = u'system:untagged'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_LOCAL: base = u'system:local'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_NOT_LOCAL: base = u'system:not local'
elif system_predicate_type in ( SYSTEM_PREDICATE_TYPE_NUM_TAGS, SYSTEM_PREDICATE_TYPE_WIDTH, SYSTEM_PREDICATE_TYPE_HEIGHT, SYSTEM_PREDICATE_TYPE_RATIO, SYSTEM_PREDICATE_TYPE_DURATION, SYSTEM_PREDICATE_TYPE_NUM_WORDS ):
elif system_predicate_type in ( SYSTEM_PREDICATE_TYPE_NUM_TAGS, SYSTEM_PREDICATE_TYPE_WIDTH, SYSTEM_PREDICATE_TYPE_HEIGHT, SYSTEM_PREDICATE_TYPE_DURATION, SYSTEM_PREDICATE_TYPE_NUM_WORDS ):
if system_predicate_type == SYSTEM_PREDICATE_TYPE_NUM_TAGS: base = u'system:number of tags'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_WIDTH: base = u'system:width'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_HEIGHT: base = u'system:height'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_RATIO: base = u'system:ratio'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_DURATION: base = u'system:duration'
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_NUM_WORDS: base = u'system:number of words'
@ -2021,6 +2056,17 @@ class Predicate():
base += u' ' + operator + u' ' + u( value )
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_RATIO:
base = u'system:ratio'
if info is not None:
( operator, ratio_width, ratio_height ) = info
base += u' ' + operator + u' ' + u( ratio_width ) + u':' + u( ratio_height )
elif system_predicate_type == SYSTEM_PREDICATE_TYPE_SIZE:
base = u'system:size'
@ -2463,6 +2509,93 @@ class ServerToClientUpdate( HydrusYAMLBase ):
def GetServiceData( self, service_type ): return self._service_data[ service_type ]
class SortedList():
def __init__( self, initial_items = [], sort_function = None ):
do_sort = sort_function is not None
if sort_function is None: sort_function = lambda x: x
self._sorted_list = [ ( sort_function( item ), item ) for item in initial_items ]
self._items_to_indices = None
self._sort_function = sort_function
if do_sort: self.sort()
def __contains__( self, item ): return self._items_to_indices.__contains__( item )
def __getitem__( self, value ):
if type( value ) == int: return self._sorted_list.__getitem__( value )[1]
elif type( value ) == slice: return [ item for ( sort_item, item ) in self._sorted_list.__getitem__( value ) ]
def __iter__( self ):
for ( sorting_value, item ) in self._sorted_list: yield item
def __len__( self ): return self._sorted_list.__len__()
def _DirtyIndices( self ): self._items_to_indices = None
def _RecalcIndices( self ): self._items_to_indices = { item : index for ( index, ( sort_item, item ) ) in enumerate( self._sorted_list ) }
def append_items( self, items ):
self._sorted_list.extend( [ ( self._sort_function( item ), item ) for item in items ] )
self._DirtyIndices()
def index( self, item ):
if self._items_to_indices is None: self._RecalcIndices()
return self._items_to_indices[ item ]
def insert_items( self, items ):
for item in items: bisect.insort( self._sorted_list, ( self._sort_function( item ), item ) )
self._DirtyIndices()
def remove_items( self, items ):
try: deletee_indices = [ self.index( item ) for item in items ]
except:
print( self._items_to_indices )
raise
deletee_indices.sort()
deletee_indices.reverse()
for index in deletee_indices: self._sorted_list.pop( index )
self._DirtyIndices()
def sort( self, f = None ):
if f is not None: self._sort_function = f
self._sorted_list = [ ( self._sort_function( item ), item ) for ( old_value, item ) in self._sorted_list ]
self._sorted_list.sort()
self._DirtyIndices()
# sqlite mod
sqlite3.register_adapter( dict, yaml.safe_dump )

View File

@ -94,6 +94,38 @@ class TagsManagerSimple():
return result
def GetComparableNamespaceSlice( self, namespaces, collapse = True ):
combined_statuses_to_tags = self._service_identifiers_to_statuses_to_tags[ HC.COMBINED_TAG_SERVICE_IDENTIFIER ]
combined_current = combined_statuses_to_tags[ HC.CURRENT ]
combined_pending = combined_statuses_to_tags[ HC.PENDING ]
combined = combined_current.union( combined_pending )
siblings_manager = HC.app.GetTagSiblingsManager()
slice = []
for namespace in namespaces:
tags = [ tag for tag in combined if tag.startswith( namespace + ':' ) ]
if collapse:
tags = list( siblings_manager.CollapseTags( tags ) )
tags.sort()
tags = tuple( tags )
slice.append( tags )
return tuple( slice )
def GetNamespaceSlice( self, namespaces, collapse = True ):
combined_statuses_to_tags = self._service_identifiers_to_statuses_to_tags[ HC.COMBINED_TAG_SERVICE_IDENTIFIER ]

View File

@ -3056,7 +3056,7 @@ class DB( ServiceDB ):
self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first
if action != 'do_query': return job.GetResult()
return job.GetResult()
def Write( self, action, priority, *args, **kwargs ):

View File

@ -1,4 +1,155 @@
import ClientConstants as CC
import ClientDB
import HydrusConstants as HC
import itertools
import os
import shutil
import stat
import TestConstants
import time
import threading
import unittest
class TestDB( unittest.TestCase ): pass
class TestClientDB( unittest.TestCase ):
@classmethod
def setUpClass( self ):
self._old_db_dir = HC.DB_DIR
self._old_client_files_dir = HC.CLIENT_FILES_DIR
self._old_client_thumbnails_dir = HC.CLIENT_THUMBNAILS_DIR
HC.DB_DIR = HC.TEMP_DIR + os.path.sep + 'client_db_test'
HC.CLIENT_FILES_DIR = HC.DB_DIR + os.path.sep + 'client_files'
HC.CLIENT_THUMBNAILS_DIR = HC.DB_DIR + os.path.sep + 'client_thumbnails'
if not os.path.exists( HC.TEMP_DIR ): os.mkdir( HC.TEMP_DIR )
if not os.path.exists( HC.DB_DIR ): os.mkdir( HC.DB_DIR )
self._db = ClientDB.DB()
threading.Thread( target = self._db.MainLoop, name = 'Database Main Loop' ).start()
@classmethod
def tearDownClass( self ):
self._db.Shutdown()
time.sleep( 2 )
def make_temp_files_deletable( function_called, path, traceback_gumpf ):
os.chmod( path, stat.S_IWRITE )
function_called( path ) # try again
if os.path.exists( HC.DB_DIR ): shutil.rmtree( HC.DB_DIR, onerror = make_temp_files_deletable )
HC.DB_DIR = self._old_db_dir
HC.CLIENT_FILES_DIR = self._old_client_files_dir
HC.CLIENT_THUMBNAILS_DIR = self._old_client_thumbnails_dir
def test_folders_exist( self ):
self.assertTrue( os.path.exists( HC.DB_DIR ) )
self.assertTrue( os.path.exists( HC.DB_DIR + os.path.sep + 'client.db' ) )
self.assertTrue( os.path.exists( HC.CLIENT_FILES_DIR ) )
self.assertTrue( os.path.exists( HC.CLIENT_THUMBNAILS_DIR ) )
hex_chars = '0123456789abcdef'
for ( one, two ) in itertools.product( hex_chars, hex_chars ):
dir = HC.CLIENT_FILES_DIR + os.path.sep + one + two
self.assertTrue( os.path.exists( dir ) )
dir = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + one + two
self.assertTrue( os.path.exists( dir ) )
def test_services( self ):
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_FILE, ) )
self.assertEqual( result, { HC.LOCAL_FILE_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_TAG, ) )
self.assertEqual( result, { HC.LOCAL_TAG_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.COMBINED_FILE, ) )
self.assertEqual( result, { HC.COMBINED_FILE_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.COMBINED_TAG, ) )
self.assertEqual( result, { HC.COMBINED_TAG_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_FILE, HC.COMBINED_FILE ) )
self.assertEqual( result, { HC.LOCAL_FILE_SERVICE_IDENTIFIER, HC.COMBINED_FILE_SERVICE_IDENTIFIER } )
#
new_tag_repo = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.TAG_REPOSITORY, 'new tag repo' )
new_tag_repo_credentials = CC.Credentials( 'example_host', 80, access_key = os.urandom( 32 ) )
other_new_tag_repo = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.TAG_REPOSITORY, 'new tag repo2' )
other_new_tag_repo_credentials = CC.Credentials( 'example_host2', 80, access_key = os.urandom( 32 ) )
new_local_like = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.LOCAL_RATING_LIKE, 'new local rating' )
new_local_like_extra_info = ( 'love', 'hate' )
new_local_numerical = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.LOCAL_RATING_NUMERICAL, 'new local numerical' )
new_local_numerical_extra_info = ( 1, 5 )
edit_log = []
edit_log.append( ( 'add', ( new_tag_repo, new_tag_repo_credentials, None ) ) )
edit_log.append( ( 'add', ( other_new_tag_repo, new_tag_repo_credentials, None ) ) )
edit_log.append( ( 'add', ( new_local_like, None, new_local_like_extra_info ) ) )
edit_log.append( ( 'add', ( new_local_numerical, None, new_local_numerical_extra_info ) ) )
self._db.Write( 'update_services', HC.HIGH_PRIORITY, True, edit_log )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.TAG_REPOSITORY, ) )
self.assertEqual( result, { new_tag_repo, other_new_tag_repo } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_RATING_LIKE, ) )
self.assertEqual( result, { new_local_like } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_RATING_NUMERICAL, ) )
self.assertEqual( result, { new_local_numerical } )
#
# should the service key be different or the same?
other_new_tag_repo_updated = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.TAG_REPOSITORY, 'a better name' )
other_new_tag_repo_credentials_updated = CC.Credentials( 'corrected host', 85, access_key = os.urandom( 32 ) )
edit_log = []
edit_log.append( ( 'delete', new_local_like ) )
edit_log.append( ( 'edit', ( other_new_tag_repo, ( other_new_tag_repo_updated, other_new_tag_repo_credentials_updated, None ) ) ) )
self._db.Write( 'update_services', HC.HIGH_PRIORITY, True, edit_log )
# now delete local_like, test that
# edit other_tag_repo, test that
#
result = self._db.Read( 'service', HC.HIGH_PRIORITY, new_tag_repo )
# test credentials
result = self._db.Read( 'services', HC.HIGH_PRIORITY, ( HC.TAG_REPOSITORY, ) )
# test there are two, and test credentials

View File

@ -92,4 +92,7 @@ if __name__ == '__main__':
app = App()
raw_input()
HC.shutdown = True
HC.pubsub.pubimmediate( 'shutdown' )