hydrus/include/ClientMedia.py

2629 lines
81 KiB
Python
Raw Normal View History

2015-03-18 21:46:29 +00:00
import bisect
import collections
import ClientConstants as CC
import ClientData
import ClientFiles
import ClientRatings
2015-12-09 23:16:41 +00:00
import ClientSearch
2018-02-21 21:59:37 +00:00
import ClientTags
2015-03-18 21:46:29 +00:00
import HydrusConstants as HC
import HydrusTags
import os
import random
import time
import traceback
import wx
2015-03-25 22:04:19 +00:00
import HydrusData
import HydrusFileHandling
import HydrusExceptions
2017-05-10 21:33:58 +00:00
import HydrusGlobals as HG
2017-08-09 21:33:51 +00:00
import HydrusSerialisable
2015-08-05 18:42:35 +00:00
import itertools
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
def FlattenMedia( media_list ):
flat_media = []
for media in media_list:
if media.IsCollection():
flat_media.extend( media.GetFlatMedia() )
else:
flat_media.append( media )
return flat_media
2017-05-17 21:53:02 +00:00
def GetDuplicateComparisonStatements( shown_media, comparison_media ):
statements = []
score = 0
2017-05-24 20:28:24 +00:00
# size
s_size = shown_media.GetSize()
c_size = comparison_media.GetSize()
size_ratio = float( s_size ) / float( c_size )
if size_ratio > 2.0:
statements.append( 'This has a much larger filesize.' )
score += 2
2017-06-28 20:23:21 +00:00
elif size_ratio > 1.05:
2017-05-24 20:28:24 +00:00
statements.append( 'This has a larger filesize.' )
score += 0.5
elif size_ratio < 0.5:
statements.append( 'This has a much smaller filesize.' )
score -= 2
2017-06-28 20:23:21 +00:00
elif size_ratio < 0.95:
2017-05-24 20:28:24 +00:00
statements.append( 'This has a smaller filesize.' )
score -= 0.5
2017-05-17 21:53:02 +00:00
# higher/same res
s_resolution = shown_media.GetResolution()
c_resolution = comparison_media.GetResolution()
if s_resolution is not None and c_resolution is not None:
( s_w, s_h ) = shown_media.GetResolution()
( c_w, c_h ) = comparison_media.GetResolution()
resolution_ratio = float( s_w * s_h ) / float( c_w * c_h )
if resolution_ratio == 1.0:
2017-05-24 20:28:24 +00:00
if s_resolution != c_resolution:
2017-05-17 21:53:02 +00:00
2017-05-24 20:28:24 +00:00
statements.append( 'The files have the same number of pixels but different resolution.' )
2017-05-17 21:53:02 +00:00
2017-05-24 20:28:24 +00:00
elif resolution_ratio > 2.0:
2017-05-17 21:53:02 +00:00
statements.append( 'This has much higher resolution.' )
score += 2
elif resolution_ratio > 1.0:
statements.append( 'This has higher resolution.' )
score += 1
2017-05-24 20:28:24 +00:00
elif resolution_ratio < 0.5:
2017-05-17 21:53:02 +00:00
statements.append( 'This has much lower resolution.' )
score -= 2
elif resolution_ratio < 1.0:
statements.append( 'This has lower resolution.' )
score -= 1
# same/diff mime
s_mime = shown_media.GetMime()
c_mime = comparison_media.GetMime()
if s_mime != c_mime:
statements.append( 'This is ' + HC.mime_string_lookup[ s_mime ] + ', the other is ' + HC.mime_string_lookup[ c_mime ] + '.' )
# more tags
s_num_tags = len( shown_media.GetTagsManager().GetCurrent() )
c_num_tags = len( comparison_media.GetTagsManager().GetCurrent() )
2017-05-24 20:28:24 +00:00
if s_num_tags > 0 and c_num_tags > 0:
2017-05-17 21:53:02 +00:00
if s_num_tags > c_num_tags:
2017-05-24 20:28:24 +00:00
statements.append( 'This has more tags.' )
2017-05-17 21:53:02 +00:00
score += 1
2017-05-24 20:28:24 +00:00
elif s_num_tags < c_num_tags:
2017-05-17 21:53:02 +00:00
2017-05-24 20:28:24 +00:00
statements.append( 'This has fewer tags.' )
2017-05-17 21:53:02 +00:00
score += 1
elif s_num_tags > 0:
statements.append( 'This has tags, the other does not.' )
elif c_num_tags > 0:
2017-05-24 20:28:24 +00:00
statements.append( 'This has no tags, the other does.' )
2017-05-17 21:53:02 +00:00
# older
s_ts = shown_media.GetLocationsManager().GetTimestamp( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
c_ts = comparison_media.GetLocationsManager().GetTimestamp( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
if s_ts is not None and c_ts is not None:
if s_ts < c_ts - 86400 * 30:
statements.append( 'This is older.' )
score += 0.5
elif c_ts < s_ts - 86400 * 30:
2017-05-24 20:28:24 +00:00
statements.append( 'This is newer.' )
2017-05-17 21:53:02 +00:00
score -= 0.5
return ( statements, score )
2015-08-05 18:42:35 +00:00
def MergeTagsManagers( tags_managers ):
def CurrentAndPendingFilter( items ):
for ( service_key, statuses_to_tags ) in items:
2017-03-02 02:14:56 +00:00
filtered = { status : tags for ( status, tags ) in statuses_to_tags.items() if status in ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) }
2015-08-05 18:42:35 +00:00
yield ( service_key, filtered )
# [[( service_key, statuses_to_tags )]]
s_k_s_t_t_tupled = ( CurrentAndPendingFilter( tags_manager.GetServiceKeysToStatusesToTags().items() ) for tags_manager in tags_managers )
# [(service_key, statuses_to_tags)]
flattened_s_k_s_t_t = itertools.chain.from_iterable( s_k_s_t_t_tupled )
# service_key : [ statuses_to_tags ]
s_k_s_t_t_dict = HydrusData.BuildKeyToListDict( flattened_s_k_s_t_t )
# now let's merge so we have service_key : statuses_to_tags
merged_service_keys_to_statuses_to_tags = collections.defaultdict( HydrusData.default_dict_set )
for ( service_key, several_statuses_to_tags ) in s_k_s_t_t_dict.items():
# [[( status, tags )]]
s_t_t_tupled = ( s_t_t.items() for s_t_t in several_statuses_to_tags )
# [( status, tags )]
flattened_s_t_t = itertools.chain.from_iterable( s_t_t_tupled )
statuses_to_tags = HydrusData.default_dict_set()
for ( status, tags ) in flattened_s_t_t: statuses_to_tags[ status ].update( tags )
merged_service_keys_to_statuses_to_tags[ service_key ] = statuses_to_tags
2017-08-09 21:33:51 +00:00
return TagsManager( merged_service_keys_to_statuses_to_tags )
2015-08-05 18:42:35 +00:00
2017-05-31 21:50:53 +00:00
class DuplicatesManager( object ):
def __init__( self, service_keys_to_dupe_statuses_to_counts ):
self._service_keys_to_dupe_statuses_to_counts = service_keys_to_dupe_statuses_to_counts
def Duplicate( self ):
service_keys_to_dupe_statuses_to_counts = collections.defaultdict( collections.Counter )
return DuplicatesManager( service_keys_to_dupe_statuses_to_counts )
def GetDupeStatusesToCounts( self, service_key ):
return self._service_keys_to_dupe_statuses_to_counts[ service_key ]
class FileInfoManager( object ):
def __init__( self, hash, size = None, mime = None, width = None, height = None, duration = None, num_frames = None, num_words = None ):
if mime is None:
mime = HC.APPLICATION_UNKNOWN
2017-12-06 22:06:56 +00:00
self.hash = hash
self.size = size
self.mime = mime
self.width = width
self.height = height
self.duration = duration
self.num_frames = num_frames
self.num_words = num_words
2017-05-31 21:50:53 +00:00
def Duplicate( self ):
2017-12-06 22:06:56 +00:00
return FileInfoManager( self.hash, self.size, self.mime, self.width, self.height, self.duration, self.num_frames, self.num_words )
2017-05-31 21:50:53 +00:00
def ToTuple( self ):
2017-12-06 22:06:56 +00:00
return ( self.hash, self.size, self.mime, self.width, self.height, self.duration, self.num_frames, self.num_words )
2017-05-31 21:50:53 +00:00
2015-08-05 18:42:35 +00:00
class LocationsManager( object ):
2016-12-21 22:30:54 +00:00
LOCAL_LOCATIONS = { CC.LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }
2015-08-05 18:42:35 +00:00
2017-05-31 21:50:53 +00:00
def __init__( self, current, deleted, pending, petitioned, inbox = False, urls = None, service_keys_to_filenames = None, current_to_timestamps = None ):
2015-08-05 18:42:35 +00:00
self._current = current
self._deleted = deleted
self._pending = pending
self._petitioned = petitioned
2017-05-31 21:50:53 +00:00
self._inbox = inbox
2016-06-01 20:04:15 +00:00
if urls is None:
2017-05-10 21:33:58 +00:00
urls = set()
2016-06-01 20:04:15 +00:00
self._urls = urls
if service_keys_to_filenames is None:
service_keys_to_filenames = {}
self._service_keys_to_filenames = service_keys_to_filenames
2016-04-20 20:42:21 +00:00
if current_to_timestamps is None:
current_to_timestamps = {}
self._current_to_timestamps = current_to_timestamps
2015-08-05 18:42:35 +00:00
def DeletePending( self, service_key ):
self._pending.discard( service_key )
self._petitioned.discard( service_key )
2016-06-15 18:59:44 +00:00
def Duplicate( self ):
current = set( self._current )
deleted = set( self._deleted )
pending = set( self._pending )
petitioned = set( self._petitioned )
2017-08-02 21:32:54 +00:00
urls = set( self._urls )
2016-06-15 18:59:44 +00:00
service_keys_to_filenames = dict( self._service_keys_to_filenames )
current_to_timestamps = dict( self._current_to_timestamps )
2017-05-31 21:50:53 +00:00
return LocationsManager( current, deleted, pending, petitioned, self._inbox, urls, service_keys_to_filenames, current_to_timestamps )
2016-06-15 18:59:44 +00:00
2015-08-05 18:42:35 +00:00
def GetCDPP( self ): return ( self._current, self._deleted, self._pending, self._petitioned )
def GetCurrent( self ): return self._current
def GetCurrentRemote( self ):
return self._current - self.LOCAL_LOCATIONS
def GetDeleted( self ): return self._deleted
def GetDeletedRemote( self ):
return self._deleted - self.LOCAL_LOCATIONS
2017-05-31 21:50:53 +00:00
def GetInbox( self ):
return self._inbox
2016-02-24 21:42:54 +00:00
def GetPending( self ): return self._pending
def GetPendingRemote( self ):
return self._pending - self.LOCAL_LOCATIONS
def GetPetitioned( self ): return self._petitioned
def GetPetitionedRemote( self ):
return self._petitioned - self.LOCAL_LOCATIONS
def GetRemoteLocationStrings( self ):
2015-08-05 18:42:35 +00:00
current = self.GetCurrentRemote()
pending = self.GetPendingRemote()
petitioned = self.GetPetitionedRemote()
2017-06-28 20:23:21 +00:00
remote_services = HG.client_controller.services_manager.GetServices( ( HC.FILE_REPOSITORY, HC.IPFS ) )
2015-08-05 18:42:35 +00:00
2016-02-24 21:42:54 +00:00
remote_services = list( remote_services )
2015-08-05 18:42:35 +00:00
2016-02-24 21:42:54 +00:00
def key( s ):
return s.GetName()
2015-08-05 18:42:35 +00:00
2016-02-24 21:42:54 +00:00
remote_services.sort( key = key )
2015-08-05 18:42:35 +00:00
2016-02-24 21:42:54 +00:00
remote_service_strings = []
2015-08-05 18:42:35 +00:00
2016-02-24 21:42:54 +00:00
for remote_service in remote_services:
name = remote_service.GetName()
service_key = remote_service.GetServiceKey()
2015-08-05 18:42:35 +00:00
if service_key in pending:
2016-02-24 21:42:54 +00:00
remote_service_strings.append( name + ' (+)' )
2015-08-05 18:42:35 +00:00
elif service_key in current:
if service_key in petitioned:
2016-02-24 21:42:54 +00:00
remote_service_strings.append( name + ' (-)' )
2015-08-05 18:42:35 +00:00
else:
2016-02-24 21:42:54 +00:00
remote_service_strings.append( name )
2015-08-05 18:42:35 +00:00
2016-02-24 21:42:54 +00:00
return remote_service_strings
2015-08-05 18:42:35 +00:00
2016-06-01 20:04:15 +00:00
def GetTimestamp( self, service_key ):
2016-04-20 20:42:21 +00:00
if service_key in self._current_to_timestamps:
return self._current_to_timestamps[ service_key ]
else:
return None
2016-06-01 20:04:15 +00:00
def GetURLs( self ):
return self._urls
2016-12-28 22:24:52 +00:00
def IsDownloading( self ):
return CC.COMBINED_LOCAL_FILE_SERVICE_KEY in self._pending
2015-08-05 18:42:35 +00:00
2016-12-28 22:24:52 +00:00
def IsLocal( self ):
return CC.COMBINED_LOCAL_FILE_SERVICE_KEY in self._current
2017-11-29 21:48:23 +00:00
def IsRemote( self ):
return CC.COMBINED_LOCAL_FILE_SERVICE_KEY not in self._current
2016-12-28 22:24:52 +00:00
def IsTrashed( self ):
return CC.TRASH_SERVICE_KEY in self._current
2015-08-05 18:42:35 +00:00
def ProcessContentUpdate( self, service_key, content_update ):
( data_type, action, row ) = content_update.ToTuple()
2017-05-10 21:33:58 +00:00
if data_type == HC.CONTENT_TYPE_FILES:
2015-08-05 18:42:35 +00:00
2017-05-31 21:50:53 +00:00
if action == HC.CONTENT_UPDATE_ARCHIVE:
self._inbox = False
elif action == HC.CONTENT_UPDATE_INBOX:
self._inbox = True
elif action == HC.CONTENT_UPDATE_ADD:
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
self._current.add( service_key )
self._deleted.discard( service_key )
self._pending.discard( service_key )
2016-12-21 22:30:54 +00:00
2017-05-10 21:33:58 +00:00
if service_key == CC.LOCAL_FILE_SERVICE_KEY:
2016-12-21 22:30:54 +00:00
2017-05-10 21:33:58 +00:00
self._current.discard( CC.TRASH_SERVICE_KEY )
self._pending.discard( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
2016-12-21 22:30:54 +00:00
2017-05-10 21:33:58 +00:00
if CC.COMBINED_LOCAL_FILE_SERVICE_KEY not in self._current:
2017-05-31 21:50:53 +00:00
self._inbox = True
2017-05-10 21:33:58 +00:00
self._current.add( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
self._current_to_timestamps[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ] = HydrusData.GetNow()
2016-12-21 22:30:54 +00:00
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
self._current_to_timestamps[ service_key ] = HydrusData.GetNow()
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
elif action == HC.CONTENT_UPDATE_DELETE:
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
self._deleted.add( service_key )
2016-12-21 22:30:54 +00:00
2017-05-10 21:33:58 +00:00
self._current.discard( service_key )
self._petitioned.discard( service_key )
2016-12-21 22:30:54 +00:00
2017-05-10 21:33:58 +00:00
if service_key == CC.LOCAL_FILE_SERVICE_KEY:
self._current.add( CC.TRASH_SERVICE_KEY )
self._current_to_timestamps[ CC.TRASH_SERVICE_KEY ] = HydrusData.GetNow()
elif service_key == CC.TRASH_SERVICE_KEY:
2017-05-31 21:50:53 +00:00
self._inbox = False
2017-05-10 21:33:58 +00:00
self._current.discard( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
elif action == HC.CONTENT_UPDATE_UNDELETE:
self._current.discard( CC.TRASH_SERVICE_KEY )
self._deleted.discard( CC.LOCAL_FILE_SERVICE_KEY )
self._current.add( CC.LOCAL_FILE_SERVICE_KEY )
elif action == HC.CONTENT_UPDATE_PEND:
if service_key not in self._current: self._pending.add( service_key )
elif action == HC.CONTENT_UPDATE_PETITION:
if service_key not in self._deleted: self._petitioned.add( service_key )
elif action == HC.CONTENT_UPDATE_RESCIND_PEND:
self._pending.discard( service_key )
elif action == HC.CONTENT_UPDATE_RESCIND_PETITION:
self._petitioned.discard( service_key )
2016-04-20 20:42:21 +00:00
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
elif data_type == HC.CONTENT_TYPE_URLS:
2016-12-21 22:30:54 +00:00
2017-05-10 21:33:58 +00:00
if action == HC.CONTENT_UPDATE_ADD:
( hash, urls ) = row
self._urls.update( urls )
elif action == HC.CONTENT_UPDATE_DELETE:
( hash, urls ) = row
self._urls.difference_update( urls )
2016-12-21 22:30:54 +00:00
2015-08-05 18:42:35 +00:00
def ResetService( self, service_key ):
self._current.discard( service_key )
self._pending.discard( service_key )
self._deleted.discard( service_key )
self._petitioned.discard( service_key )
2016-04-06 19:52:45 +00:00
def ShouldHaveThumbnail( self ):
return len( self._current ) > 0
2015-03-18 21:46:29 +00:00
class Media( object ):
def __init__( self ):
2015-07-01 22:02:07 +00:00
self._id = HydrusData.GenerateKey()
2016-01-06 21:17:20 +00:00
self._id_hash = self._id.__hash__()
2015-03-18 21:46:29 +00:00
def __eq__( self, other ): return self.__hash__() == other.__hash__()
2018-01-03 22:37:30 +00:00
def __hash__( self ):
return self._id_hash
2015-03-18 21:46:29 +00:00
def __ne__( self, other ): return self.__hash__() != other.__hash__()
class MediaList( object ):
def __init__( self, file_service_key, media_results ):
self._file_service_key = file_service_key
2016-01-20 23:57:33 +00:00
self._hashes = set()
2017-09-20 19:47:31 +00:00
self._hashes_to_singleton_media = {}
self._hashes_to_collected_media = {}
2017-08-09 21:33:51 +00:00
self._media_sort = MediaSort( ( 'system', CC.SORT_FILES_BY_FILESIZE ), CC.SORT_ASC )
2017-05-24 20:28:24 +00:00
self._collect_by = []
2015-03-18 21:46:29 +00:00
self._collect_map_singletons = {}
self._collect_map_collected = {}
2015-03-25 22:04:19 +00:00
self._sorted_media = SortedList( [ self._GenerateMediaSingleton( media_result ) for media_result in media_results ] )
2015-03-18 21:46:29 +00:00
self._singleton_media = set( self._sorted_media )
self._collected_media = set()
2016-01-06 21:17:20 +00:00
self._RecalcHashes()
2015-03-18 21:46:29 +00:00
2017-03-22 22:38:15 +00:00
def __len__( self ):
return len( self._singleton_media ) + sum( map( len, self._collected_media ) )
2015-03-18 21:46:29 +00:00
def _CalculateCollectionKeysToMedias( self, collect_by, medias ):
2017-05-24 20:28:24 +00:00
keys_to_medias = collections.defaultdict( list )
2015-03-18 21:46:29 +00:00
namespaces_to_collect_by = [ data for ( collect_by_type, data ) in collect_by if collect_by_type == 'namespace' ]
ratings_to_collect_by = [ data for ( collect_by_type, data ) in collect_by if collect_by_type == 'rating' ]
2017-06-28 20:23:21 +00:00
services_manager = HG.client_controller.services_manager
2015-03-18 21:46:29 +00:00
for media in medias:
2016-06-15 18:59:44 +00:00
if len( namespaces_to_collect_by ) > 0:
2016-09-14 18:03:59 +00:00
namespace_key = media.GetTagsManager().GetNamespaceSlice( namespaces_to_collect_by )
2016-06-15 18:59:44 +00:00
else:
namespace_key = None
2015-03-18 21:46:29 +00:00
if len( ratings_to_collect_by ) > 0:
2016-06-15 18:59:44 +00:00
rating_key = media.GetRatingsManager().GetRatingSlice( ratings_to_collect_by )
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
else:
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
rating_key = None
2015-03-18 21:46:29 +00:00
keys_to_medias[ ( namespace_key, rating_key ) ].append( media )
return keys_to_medias
2017-09-20 19:47:31 +00:00
def _GenerateMediaCollection( self, media_results ):
return MediaCollection( self._file_service_key, media_results )
2015-03-18 21:46:29 +00:00
2017-09-20 19:47:31 +00:00
def _GenerateMediaSingleton( self, media_result ):
return MediaSingleton( media_result )
2015-03-18 21:46:29 +00:00
def _GetFirst( self ): return self._sorted_media[ 0 ]
def _GetLast( self ): return self._sorted_media[ -1 ]
def _GetMedia( self, hashes, discriminator = None ):
2017-09-20 19:47:31 +00:00
if hashes.isdisjoint( self._hashes ):
return []
medias = []
if discriminator is None or discriminator == 'singletons':
medias.extend( ( self._hashes_to_singleton_media[ hash ] for hash in hashes if hash in self._hashes_to_singleton_media ) )
if discriminator is None or discriminator == 'collections':
medias.extend( { self._hashes_to_collected_media[ hash ] for hash in hashes if hash in self._hashes_to_collected_media } )
return medias
'''
2017-08-02 21:32:54 +00:00
if discriminator is None:
medias = self._sorted_media
elif discriminator == 'singletons':
medias = self._singleton_media
elif discriminator == 'collections':
medias = self._collected_media
2015-03-18 21:46:29 +00:00
2017-09-20 19:47:31 +00:00
return [ media for media in medias if media.HasAnyOfTheseHashes( hashes ) ]
'''
2015-03-18 21:46:29 +00:00
def _GetNext( self, media ):
2017-11-29 21:48:23 +00:00
if media is None:
return None
2015-03-18 21:46:29 +00:00
next_index = self._sorted_media.index( media ) + 1
2017-11-29 21:48:23 +00:00
if next_index == len( self._sorted_media ):
return self._GetFirst()
else:
return self._sorted_media[ next_index ]
2015-03-18 21:46:29 +00:00
def _GetPrevious( self, media ):
if media is None: return None
previous_index = self._sorted_media.index( media ) - 1
if previous_index == -1: return self._GetLast()
else: return self._sorted_media[ previous_index ]
2017-08-09 21:33:51 +00:00
def _HasHashes( self, hashes ):
2016-07-27 21:53:34 +00:00
2017-08-09 21:33:51 +00:00
for hash in hashes:
2016-07-27 21:53:34 +00:00
2017-08-09 21:33:51 +00:00
if hash in self._hashes:
2017-01-25 22:56:55 +00:00
2017-08-09 21:33:51 +00:00
return True
2017-01-25 22:56:55 +00:00
2016-07-27 21:53:34 +00:00
2017-08-09 21:33:51 +00:00
return False
2017-08-02 21:32:54 +00:00
2016-01-06 21:17:20 +00:00
def _RecalcHashes( self ):
self._hashes = set()
2017-09-20 19:47:31 +00:00
self._hashes_to_singleton_media = {}
self._hashes_to_collected_media = {}
2016-01-13 22:08:19 +00:00
for media in self._collected_media:
2017-09-20 19:47:31 +00:00
hashes = media.GetHashes()
self._hashes.update( hashes )
for hash in hashes:
self._hashes_to_collected_media[ hash ] = media
2016-01-13 22:08:19 +00:00
for media in self._singleton_media:
2017-09-20 19:47:31 +00:00
hash = media.GetHash()
self._hashes.add( hash )
self._hashes_to_singleton_media[ hash ] = media
2016-01-13 22:08:19 +00:00
2016-01-06 21:17:20 +00:00
2017-05-17 21:53:02 +00:00
def _RemoveMediaByHashes( self, hashes ):
if not isinstance( hashes, set ):
hashes = set( hashes )
affected_singleton_media = self._GetMedia( hashes, discriminator = 'singletons' )
for media in self._collected_media:
media._RemoveMediaByHashes( hashes )
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
self._RemoveMediaDirectly( affected_singleton_media, affected_collected_media )
def _RemoveMediaDirectly( self, singleton_media, collected_media ):
2015-03-18 21:46:29 +00:00
2016-01-13 22:08:19 +00:00
if not isinstance( singleton_media, set ):
singleton_media = set( singleton_media )
if not isinstance( collected_media, set ):
collected_media = set( collected_media )
2015-03-18 21:46:29 +00:00
self._singleton_media.difference_update( singleton_media )
self._collected_media.difference_update( collected_media )
keys_to_remove = [ key for ( key, media ) in self._collect_map_singletons if media in singleton_media ]
2016-01-13 22:08:19 +00:00
for key in keys_to_remove:
del self._collect_map_singletons[ key ]
2015-03-18 21:46:29 +00:00
keys_to_remove = [ key for ( key, media ) in self._collect_map_collected if media in collected_media ]
2016-01-13 22:08:19 +00:00
for key in keys_to_remove:
del self._collect_map_collected[ key ]
2015-03-18 21:46:29 +00:00
self._sorted_media.remove_items( singleton_media.union( collected_media ) )
2016-01-13 22:08:19 +00:00
self._RecalcHashes()
2015-03-18 21:46:29 +00:00
2015-12-09 23:16:41 +00:00
def AddMedia( self, new_media, append = True ):
if append:
2016-01-06 21:17:20 +00:00
for media in new_media:
2017-09-20 19:47:31 +00:00
hash = media.GetHash()
self._hashes.add( hash )
self._hashes_to_singleton_media[ hash ] = media
2016-01-06 21:17:20 +00:00
2015-12-09 23:16:41 +00:00
self._singleton_media.update( new_media )
self._sorted_media.append_items( new_media )
else:
if self._collect_by is not None:
keys_to_medias = self._CalculateCollectionKeysToMedias( self._collect_by, new_media )
new_media = []
for ( key, medias ) in keys_to_medias.items():
if key in self._collect_map_singletons:
singleton_media = self._collect_map_singletons[ key ]
self._sorted_media.remove_items( singleton_media )
self._singleton_media.discard( singleton_media )
del self._collect_map_singletons[ key ]
medias.append( singleton_media )
collected_media = self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] )
2017-08-09 21:33:51 +00:00
collected_media.Sort( self._media_sort )
2015-12-09 23:16:41 +00:00
self._collected_media.add( collected_media )
self._collect_map_collected[ key ] = collected_media
new_media.append( collected_media )
elif key in self._collect_map_collected:
collected_media = self._collect_map_collected[ key ]
self._sorted_media.remove_items( collected_media )
collected_media.AddMedia( medias )
2017-08-09 21:33:51 +00:00
collected_media.Sort( self._media_sort )
2015-12-09 23:16:41 +00:00
new_media.append( collected_media )
elif len( medias ) == 1:
( singleton_media, ) = medias
self._singleton_media.add( singleton_media )
self._collect_map_singletons[ key ] = singleton_media
else:
collected_media = self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] )
2017-08-09 21:33:51 +00:00
collected_media.Sort( self._media_sort )
2015-12-09 23:16:41 +00:00
self._collected_media.add( collected_media )
self._collect_map_collected[ key ] = collected_media
new_media.append( collected_media )
self._sorted_media.insert_items( new_media )
2017-09-20 19:47:31 +00:00
self._RecalcHashes()
2015-12-09 23:16:41 +00:00
return new_media
2017-05-24 20:28:24 +00:00
def Collect( self, collect_by = None ):
2015-03-18 21:46:29 +00:00
2017-05-24 20:28:24 +00:00
if collect_by == None:
collect_by = self._collect_by
2015-03-18 21:46:29 +00:00
self._collect_by = collect_by
2017-05-24 20:28:24 +00:00
for media in self._collected_media:
self._singleton_media.update( [ self._GenerateMediaSingleton( media_result ) for media_result in media.GenerateMediaResults() ] )
2015-03-18 21:46:29 +00:00
self._collected_media = set()
self._collect_map_singletons = {}
self._collect_map_collected = {}
2017-05-24 20:28:24 +00:00
if len( collect_by ) > 0:
2015-03-18 21:46:29 +00:00
keys_to_medias = self._CalculateCollectionKeysToMedias( collect_by, self._singleton_media )
self._collect_map_singletons = { key : medias[0] for ( key, medias ) in keys_to_medias.items() if len( medias ) == 1 }
self._collect_map_collected = { key : self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] ) for ( key, medias ) in keys_to_medias.items() if len( medias ) > 1 }
self._singleton_media = set( self._collect_map_singletons.values() )
self._collected_media = set( self._collect_map_collected.values() )
2015-03-25 22:04:19 +00:00
self._sorted_media = SortedList( list( self._singleton_media ) + list( self._collected_media ) )
2015-03-18 21:46:29 +00:00
2017-09-20 19:47:31 +00:00
self._RecalcHashes()
2015-03-18 21:46:29 +00:00
def DeletePending( self, service_key ):
for media in self._collected_media: media.DeletePending( service_key )
2015-08-19 21:48:21 +00:00
def GenerateMediaResults( self, has_location = None, discriminant = None, selected_media = None, unrated = None, for_media_viewer = False ):
2015-03-18 21:46:29 +00:00
media_results = []
for media in self._sorted_media:
2015-07-08 21:45:38 +00:00
if has_location is not None:
locations_manager = media.GetLocationsManager()
if has_location not in locations_manager.GetCurrent():
continue
2016-12-28 22:24:52 +00:00
if selected_media is not None and media not in selected_media:
continue
2015-03-18 21:46:29 +00:00
2016-08-17 20:07:22 +00:00
if media.IsCollection():
media_results.extend( media.GenerateMediaResults( has_location = has_location, discriminant = discriminant, selected_media = selected_media, unrated = unrated, for_media_viewer = True ) )
2015-03-18 21:46:29 +00:00
else:
if discriminant is not None:
2015-07-15 20:28:26 +00:00
2015-07-22 19:40:39 +00:00
locations_manager = media.GetLocationsManager()
2016-12-28 22:24:52 +00:00
if discriminant == CC.DISCRIMINANT_INBOX:
p = media.HasInbox()
elif discriminant == CC.DISCRIMINANT_ARCHIVE:
p = not media.HasInbox()
elif discriminant == CC.DISCRIMINANT_LOCAL:
p = locations_manager.IsLocal()
elif discriminant == CC.DISCRIMINANT_LOCAL_BUT_NOT_IN_TRASH:
p = locations_manager.IsLocal() and not locations_manager.IsTrashed()
elif discriminant == CC.DISCRIMINANT_NOT_LOCAL:
p = not locations_manager.IsLocal()
elif discriminant == CC.DISCRIMINANT_DOWNLOADING:
p = locations_manager.IsDownloading()
2015-07-15 20:28:26 +00:00
2016-12-28 22:24:52 +00:00
if not p:
2016-12-21 22:30:54 +00:00
continue
2015-07-15 20:28:26 +00:00
2015-03-18 21:46:29 +00:00
if unrated is not None:
2016-06-15 18:59:44 +00:00
ratings_manager = media.GetRatingsManager()
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
if ratings_manager.GetRating( unrated ) is not None:
continue
2015-03-18 21:46:29 +00:00
2015-08-19 21:48:21 +00:00
if for_media_viewer:
2017-12-06 22:06:56 +00:00
new_options = HG.client_controller.new_options
2016-08-17 20:07:22 +00:00
media_show_action = new_options.GetMediaShowAction( media.GetMime() )
2017-03-08 23:23:12 +00:00
if media_show_action in ( CC.MEDIA_VIEWER_ACTION_DO_NOT_SHOW_ON_ACTIVATION_OPEN_EXTERNALLY, CC.MEDIA_VIEWER_ACTION_DO_NOT_SHOW ):
2015-08-19 21:48:21 +00:00
continue
2015-03-18 21:46:29 +00:00
media_results.append( media.GetMediaResult() )
return media_results
2017-03-22 22:38:15 +00:00
def GetFirst( self ):
return self._GetFirst()
2015-03-18 21:46:29 +00:00
def GetFlatMedia( self ):
flat_media = []
for media in self._sorted_media:
2017-05-31 21:50:53 +00:00
if media.IsCollection():
flat_media.extend( media.GetFlatMedia() )
else:
flat_media.append( media )
2015-03-18 21:46:29 +00:00
return flat_media
2017-03-22 22:38:15 +00:00
def GetLast( self ):
return self._GetLast()
2017-08-02 21:32:54 +00:00
def GetMediaIndex( self, media ):
return self._sorted_media.index( media )
2015-03-18 21:46:29 +00:00
2017-03-22 22:38:15 +00:00
def GetNext( self, media ):
return self._GetNext( media )
2017-08-02 21:32:54 +00:00
def GetNumFiles( self ):
return len( self._hashes )
2017-03-22 22:38:15 +00:00
def GetPrevious( self, media ):
return self._GetPrevious( media )
2017-08-02 21:32:54 +00:00
def GetSortedMedia( self ):
return self._sorted_media
2015-03-18 21:46:29 +00:00
2017-09-20 19:47:31 +00:00
def HasAnyOfTheseHashes( self, hashes ):
return not hashes.isdisjoint( self._hashes )
2015-03-18 21:46:29 +00:00
def HasMedia( self, media ):
2017-08-02 21:32:54 +00:00
if media is None:
return False
2015-03-18 21:46:29 +00:00
2017-08-02 21:32:54 +00:00
if media in self._singleton_media:
return True
elif media in self._collected_media:
return True
2015-03-18 21:46:29 +00:00
else:
for media_collection in self._collected_media:
2017-09-20 19:47:31 +00:00
if media_collection.HasMedia( media ):
return True
2015-03-18 21:46:29 +00:00
return False
def HasNoMedia( self ): return len( self._sorted_media ) == 0
def ProcessContentUpdate( self, service_key, content_update ):
( data_type, action, row ) = content_update.ToTuple()
hashes = content_update.GetHashes()
2016-12-21 22:30:54 +00:00
for media in self._GetMedia( hashes, 'collections' ):
media.ProcessContentUpdate( service_key, content_update )
2015-03-18 21:46:29 +00:00
2015-10-14 21:02:25 +00:00
if data_type == HC.CONTENT_TYPE_FILES:
2015-03-18 21:46:29 +00:00
2015-07-08 21:45:38 +00:00
if action == HC.CONTENT_UPDATE_DELETE:
2015-03-18 21:46:29 +00:00
2017-06-28 20:23:21 +00:00
local_file_domains = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) )
2015-07-15 20:28:26 +00:00
2017-01-04 22:48:23 +00:00
non_trash_local_file_services = list( local_file_domains ) + [ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ]
2015-03-18 21:46:29 +00:00
2017-01-04 22:48:23 +00:00
local_file_services = list( non_trash_local_file_services ) + [ CC.TRASH_SERVICE_KEY ]
2015-07-08 21:45:38 +00:00
2017-01-04 22:48:23 +00:00
deleted_from_trash_and_local_view = service_key == CC.TRASH_SERVICE_KEY and self._file_service_key in local_file_services
2015-07-15 20:28:26 +00:00
2017-01-04 22:48:23 +00:00
trashed_and_non_trash_local_view = HC.options[ 'remove_trashed_files' ] and service_key in non_trash_local_file_services and self._file_service_key in non_trash_local_file_services
deleted_from_repo_and_repo_view = service_key not in local_file_services and self._file_service_key == service_key
if deleted_from_trash_and_local_view or trashed_and_non_trash_local_view or deleted_from_repo_and_repo_view:
2015-07-08 21:45:38 +00:00
2017-05-17 21:53:02 +00:00
self._RemoveMediaByHashes( hashes )
2015-07-08 21:45:38 +00:00
2015-03-18 21:46:29 +00:00
def ProcessContentUpdates( self, service_keys_to_content_updates ):
for ( service_key, content_updates ) in service_keys_to_content_updates.items():
2016-12-21 22:30:54 +00:00
for content_update in content_updates:
self.ProcessContentUpdate( service_key, content_update )
2015-03-18 21:46:29 +00:00
def ProcessServiceUpdates( self, service_keys_to_service_updates ):
for ( service_key, service_updates ) in service_keys_to_service_updates.items():
for service_update in service_updates:
( action, row ) = service_update.ToTuple()
2017-03-02 02:14:56 +00:00
if action == HC.SERVICE_UPDATE_DELETE_PENDING:
self.DeletePending( service_key )
elif action == HC.SERVICE_UPDATE_RESET:
self.ResetService( service_key )
2015-03-18 21:46:29 +00:00
def ResetService( self, service_key ):
2016-01-06 21:17:20 +00:00
if service_key == self._file_service_key:
2017-05-17 21:53:02 +00:00
self._RemoveMediaDirectly( self._singleton_media, self._collected_media )
2016-01-06 21:17:20 +00:00
2015-03-18 21:46:29 +00:00
else:
for media in self._collected_media: media.ResetService( service_key )
2017-08-09 21:33:51 +00:00
def Sort( self, media_sort = None ):
2015-03-18 21:46:29 +00:00
2016-07-27 21:53:34 +00:00
for media in self._collected_media:
2017-08-09 21:33:51 +00:00
media.Sort( media_sort )
2016-07-27 21:53:34 +00:00
2015-03-18 21:46:29 +00:00
2017-08-09 21:33:51 +00:00
if media_sort is None:
2016-07-27 21:53:34 +00:00
2017-08-09 21:33:51 +00:00
media_sort = self._media_sort
2016-07-27 21:53:34 +00:00
2015-03-18 21:46:29 +00:00
2017-08-09 21:33:51 +00:00
self._media_sort = media_sort
2016-08-03 22:15:54 +00:00
2017-12-06 22:06:56 +00:00
media_sort_fallback = HG.client_controller.new_options.GetFallbackSort()
2016-08-03 22:15:54 +00:00
2017-08-09 21:33:51 +00:00
( sort_key, reverse ) = media_sort_fallback.GetSortKeyAndReverse( self._file_service_key )
2015-03-18 21:46:29 +00:00
2017-03-08 23:23:12 +00:00
self._sorted_media.sort( sort_key, reverse = reverse )
2015-03-18 21:46:29 +00:00
2016-07-27 21:53:34 +00:00
# this is a stable sort, so the fallback order above will remain for equal items
2017-08-09 21:33:51 +00:00
( sort_key, reverse ) = self._media_sort.GetSortKeyAndReverse( self._file_service_key )
2015-03-18 21:46:29 +00:00
2017-03-08 23:23:12 +00:00
self._sorted_media.sort( sort_key = sort_key, reverse = reverse )
2015-03-18 21:46:29 +00:00
class ListeningMediaList( MediaList ):
def __init__( self, file_service_key, media_results ):
MediaList.__init__( self, file_service_key, media_results )
2015-12-09 23:16:41 +00:00
self._file_query_result = ClientSearch.FileQueryResult( media_results )
2015-03-18 21:46:29 +00:00
2017-05-10 21:33:58 +00:00
HG.client_controller.sub( self, 'ProcessContentUpdates', 'content_updates_gui' )
HG.client_controller.sub( self, 'ProcessServiceUpdates', 'service_updates_gui' )
2015-03-18 21:46:29 +00:00
def AddMediaResults( self, media_results, append = True ):
self._file_query_result.AddMediaResults( media_results )
new_media = []
for media_result in media_results:
hash = media_result.GetHash()
2016-01-06 21:17:20 +00:00
if hash in self._hashes:
continue
2015-03-18 21:46:29 +00:00
new_media.append( self._GenerateMediaSingleton( media_result ) )
2015-12-09 23:16:41 +00:00
self.AddMedia( new_media, append = append )
2015-03-18 21:46:29 +00:00
return new_media
class MediaCollection( MediaList, Media ):
def __init__( self, file_service_key, media_results ):
Media.__init__( self )
MediaList.__init__( self, file_service_key, media_results )
self._archive = True
self._inbox = False
self._size = 0
self._size_definite = True
self._width = None
self._height = None
self._duration = None
self._num_frames = None
self._num_words = None
self._tags_manager = None
self._locations_manager = None
self._RecalcInternals()
def _RecalcInternals( self ):
2016-01-13 22:08:19 +00:00
self._RecalcHashes()
2015-03-18 21:46:29 +00:00
self._archive = True in ( media.HasArchive() for media in self._sorted_media )
self._inbox = True in ( media.HasInbox() for media in self._sorted_media )
self._size = sum( [ media.GetSize() for media in self._sorted_media ] )
self._size_definite = not False in ( media.IsSizeDefinite() for media in self._sorted_media )
duration_sum = sum( [ media.GetDuration() for media in self._sorted_media if media.HasDuration() ] )
if duration_sum > 0: self._duration = duration_sum
else: self._duration = None
tags_managers = [ m.GetTagsManager() for m in self._sorted_media ]
2015-08-05 18:42:35 +00:00
self._tags_manager = MergeTagsManagers( tags_managers )
2015-03-18 21:46:29 +00:00
# horrible compromise
2016-06-15 18:59:44 +00:00
if len( self._sorted_media ) > 0:
self._ratings_manager = self._sorted_media[0].GetRatingsManager()
else:
self._ratings_manager = ClientRatings.RatingsManager( {} )
2015-03-18 21:46:29 +00:00
all_locations_managers = [ media.GetLocationsManager() for media in self._sorted_media ]
2015-03-25 22:04:19 +00:00
current = HydrusData.IntelligentMassIntersect( [ locations_manager.GetCurrent() for locations_manager in all_locations_managers ] )
deleted = HydrusData.IntelligentMassIntersect( [ locations_manager.GetDeleted() for locations_manager in all_locations_managers ] )
pending = HydrusData.IntelligentMassIntersect( [ locations_manager.GetPending() for locations_manager in all_locations_managers ] )
petitioned = HydrusData.IntelligentMassIntersect( [ locations_manager.GetPetitioned() for locations_manager in all_locations_managers ] )
2015-03-18 21:46:29 +00:00
2015-08-05 18:42:35 +00:00
self._locations_manager = LocationsManager( current, deleted, pending, petitioned )
2015-03-18 21:46:29 +00:00
2015-12-09 23:16:41 +00:00
def AddMedia( self, new_media, append = True ):
MediaList.AddMedia( self, new_media, append = True )
self._RecalcInternals()
2015-03-18 21:46:29 +00:00
def DeletePending( self, service_key ):
MediaList.DeletePending( self, service_key )
self._RecalcInternals()
def GetDisplayMedia( self ): return self._GetFirst().GetDisplayMedia()
def GetDuration( self ): return self._duration
def GetHash( self ): return self.GetDisplayMedia().GetHash()
2015-09-16 18:11:00 +00:00
def GetHashes( self, has_location = None, discriminant = None, not_uploaded_to = None, ordered = False ):
2015-03-18 21:46:29 +00:00
2016-01-06 21:17:20 +00:00
if has_location is None and discriminant is None and not_uploaded_to is None and not ordered:
return self._hashes
2015-03-18 21:46:29 +00:00
else:
2015-09-16 18:11:00 +00:00
if ordered:
result = []
2018-01-03 22:37:30 +00:00
for media in self._sorted_media:
result.extend( media.GetHashes( has_location, discriminant, not_uploaded_to, ordered ) )
2015-09-16 18:11:00 +00:00
else:
result = set()
2018-01-03 22:37:30 +00:00
for media in self._sorted_media:
result.update( media.GetHashes( has_location, discriminant, not_uploaded_to, ordered ) )
2015-09-16 18:11:00 +00:00
2015-03-18 21:46:29 +00:00
return result
def GetLocationsManager( self ): return self._locations_manager
def GetMime( self ): return HC.APPLICATION_HYDRUS_CLIENT_COLLECTION
2016-01-06 21:17:20 +00:00
def GetNumFiles( self ):
return len( self._hashes )
2015-03-18 21:46:29 +00:00
def GetNumInbox( self ): return sum( ( media.GetNumInbox() for media in self._sorted_media ) )
def GetNumFrames( self ): return sum( ( media.GetNumFrames() for media in self._sorted_media ) )
def GetNumWords( self ): return sum( ( media.GetNumWords() for media in self._sorted_media ) )
2016-04-20 20:42:21 +00:00
def GetPrettyInfoLines( self ):
2015-03-18 21:46:29 +00:00
2015-03-25 22:04:19 +00:00
size = HydrusData.ConvertIntToBytes( self._size )
2015-03-18 21:46:29 +00:00
mime = HC.mime_string_lookup[ HC.APPLICATION_HYDRUS_CLIENT_COLLECTION ]
info_string = size + ' ' + mime
2015-03-25 22:04:19 +00:00
info_string += ' (' + HydrusData.ConvertIntToPrettyString( self.GetNumFiles() ) + ' files)'
2015-03-18 21:46:29 +00:00
2016-04-20 20:42:21 +00:00
return [ info_string ]
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
def GetRatingsManager( self ):
return self._ratings_manager
2015-03-18 21:46:29 +00:00
def GetResolution( self ): return ( self._width, self._height )
def GetSingletonsTagsManagers( self ):
tags_managers = [ m.GetTagsManager() for m in self._singleton_media ]
for m in self._collected_media: tags_managers.extend( m.GetSingletonsTagsManagers() )
return tags_managers
def GetSize( self ): return self._size
def GetTagsManager( self ): return self._tags_manager
2016-06-01 20:04:15 +00:00
def GetTimestamp( self, service_key ): return None
2015-03-18 21:46:29 +00:00
def HasArchive( self ): return self._archive
def HasDuration( self ): return self._duration is not None
2017-09-20 19:47:31 +00:00
def HasImages( self ): return True in ( media.HasImages() for media in self._sorted_media )
2015-03-18 21:46:29 +00:00
def HasInbox( self ): return self._inbox
def IsCollection( self ): return True
def IsImage( self ): return False
def IsNoisy( self ): return self.GetDisplayMedia().GetMime() in HC.NOISY_MIMES
def IsSizeDefinite( self ): return self._size_definite
def ProcessContentUpdate( self, service_key, content_update ):
MediaList.ProcessContentUpdate( self, service_key, content_update )
self._RecalcInternals()
2017-12-13 22:33:07 +00:00
def RefreshFileInfo( self ):
for media in self._sorted_media:
media.RefreshFileInfo()
self._RecalcInternals()
2015-03-18 21:46:29 +00:00
def ResetService( self, service_key ):
MediaList.ResetService( self, service_key )
self._RecalcInternals()
class MediaSingleton( Media ):
def __init__( self, media_result ):
Media.__init__( self )
self._media_result = media_result
2016-06-15 18:59:44 +00:00
def Duplicate( self ):
return MediaSingleton( self._media_result.Duplicate() )
2017-09-20 19:47:31 +00:00
def GetDisplayMedia( self ):
return self
2015-03-18 21:46:29 +00:00
2017-09-20 19:47:31 +00:00
def GetDuration( self ):
return self._media_result.GetDuration()
2015-03-18 21:46:29 +00:00
2017-09-20 19:47:31 +00:00
def GetHash( self ):
return self._media_result.GetHash()
2015-03-18 21:46:29 +00:00
2018-01-03 22:37:30 +00:00
def MatchesDiscriminant( self, has_location = None, discriminant = None, not_uploaded_to = None ):
2015-09-16 18:11:00 +00:00
2015-03-18 21:46:29 +00:00
if discriminant is not None:
inbox = self._media_result.GetInbox()
2016-01-06 21:17:20 +00:00
locations_manager = self._media_result.GetLocationsManager()
2016-12-28 22:24:52 +00:00
if discriminant == CC.DISCRIMINANT_INBOX:
p = inbox
elif discriminant == CC.DISCRIMINANT_ARCHIVE:
p = not inbox
elif discriminant == CC.DISCRIMINANT_LOCAL:
p = locations_manager.IsLocal()
elif discriminant == CC.DISCRIMINANT_LOCAL_BUT_NOT_IN_TRASH:
p = locations_manager.IsLocal() and not locations_manager.IsTrashed()
elif discriminant == CC.DISCRIMINANT_NOT_LOCAL:
p = not locations_manager.IsLocal()
elif discriminant == CC.DISCRIMINANT_DOWNLOADING:
p = locations_manager.IsDownloading()
if not p:
2016-01-06 21:17:20 +00:00
2018-01-03 22:37:30 +00:00
return False
2016-01-06 21:17:20 +00:00
2015-03-18 21:46:29 +00:00
2015-07-08 21:45:38 +00:00
if has_location is not None:
2015-03-18 21:46:29 +00:00
2016-01-06 21:17:20 +00:00
locations_manager = self._media_result.GetLocationsManager()
if has_location not in locations_manager.GetCurrent():
2018-01-03 22:37:30 +00:00
return False
2016-01-06 21:17:20 +00:00
2015-07-08 21:45:38 +00:00
if not_uploaded_to is not None:
2015-03-18 21:46:29 +00:00
2016-01-06 21:17:20 +00:00
locations_manager = self._media_result.GetLocationsManager()
if not_uploaded_to in locations_manager.GetCurrentRemote():
2018-01-03 22:37:30 +00:00
return False
2016-01-06 21:17:20 +00:00
2015-03-18 21:46:29 +00:00
2018-01-03 22:37:30 +00:00
return True
def GetHashes( self, has_location = None, discriminant = None, not_uploaded_to = None, ordered = False ):
if self.MatchesDiscriminant( has_location = has_location, discriminant = discriminant, not_uploaded_to = not_uploaded_to ):
2015-09-16 18:11:00 +00:00
2018-01-03 22:37:30 +00:00
if ordered:
return [ self._media_result.GetHash() ]
else:
return { self._media_result.GetHash() }
2015-09-16 18:11:00 +00:00
else:
2018-01-03 22:37:30 +00:00
if ordered:
return []
else:
return set()
2015-09-16 18:11:00 +00:00
2015-03-18 21:46:29 +00:00
def GetLocationsManager( self ): return self._media_result.GetLocationsManager()
def GetMediaResult( self ): return self._media_result
def GetMime( self ): return self._media_result.GetMime()
def GetNumFiles( self ): return 1
def GetNumFrames( self ): return self._media_result.GetNumFrames()
def GetNumInbox( self ):
if self.HasInbox(): return 1
else: return 0
2015-07-08 21:45:38 +00:00
2015-03-18 21:46:29 +00:00
def GetNumWords( self ): return self._media_result.GetNumWords()
2016-06-01 20:04:15 +00:00
def GetTimestamp( self, service_key ):
2015-03-18 21:46:29 +00:00
2016-04-20 20:42:21 +00:00
return self._media_result.GetLocationsManager().GetTimestamp( service_key )
2015-03-18 21:46:29 +00:00
2016-04-20 20:42:21 +00:00
def GetPrettyInfoLines( self ):
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
file_info_manager = self._media_result.GetFileInfoManager()
locations_manager = self._media_result.GetLocationsManager()
( hash, size, mime, width, height, duration, num_frames, num_words ) = file_info_manager.ToTuple()
2015-03-18 21:46:29 +00:00
2015-03-25 22:04:19 +00:00
info_string = HydrusData.ConvertIntToBytes( size ) + ' ' + HC.mime_string_lookup[ mime ]
2015-03-18 21:46:29 +00:00
2015-03-25 22:04:19 +00:00
if width is not None and height is not None: info_string += ' (' + HydrusData.ConvertIntToPrettyString( width ) + 'x' + HydrusData.ConvertIntToPrettyString( height ) + ')'
2015-03-18 21:46:29 +00:00
2015-03-25 22:04:19 +00:00
if duration is not None: info_string += ', ' + HydrusData.ConvertMillisecondsToPrettyTime( duration )
2015-03-18 21:46:29 +00:00
2015-03-25 22:04:19 +00:00
if num_frames is not None: info_string += ' (' + HydrusData.ConvertIntToPrettyString( num_frames ) + ' frames)'
2015-03-18 21:46:29 +00:00
2015-03-25 22:04:19 +00:00
if num_words is not None: info_string += ' (' + HydrusData.ConvertIntToPrettyString( num_words ) + ' words)'
2015-03-18 21:46:29 +00:00
2016-04-20 20:42:21 +00:00
lines = [ info_string ]
locations_manager = self._media_result.GetLocationsManager()
current_service_keys = locations_manager.GetCurrent()
2016-12-21 22:30:54 +00:00
if CC.COMBINED_LOCAL_FILE_SERVICE_KEY in current_service_keys:
2016-04-20 20:42:21 +00:00
2016-12-21 22:30:54 +00:00
timestamp = locations_manager.GetTimestamp( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
2016-04-20 20:42:21 +00:00
2017-09-20 19:47:31 +00:00
lines.append( 'imported ' + HydrusData.ConvertTimestampToPrettyAgo( timestamp ) + ' ago' )
2016-04-20 20:42:21 +00:00
if CC.TRASH_SERVICE_KEY in current_service_keys:
timestamp = locations_manager.GetTimestamp( CC.TRASH_SERVICE_KEY )
2017-09-20 19:47:31 +00:00
lines.append( 'trashed ' + HydrusData.ConvertTimestampToPrettyAgo( timestamp ) + ' ago' )
2016-04-20 20:42:21 +00:00
2016-06-01 20:04:15 +00:00
for service_key in current_service_keys:
2016-12-21 22:30:54 +00:00
if service_key in ( CC.COMBINED_LOCAL_FILE_SERVICE_KEY, CC.LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY ):
2016-06-01 20:04:15 +00:00
continue
timestamp = locations_manager.GetTimestamp( service_key )
2017-06-28 20:23:21 +00:00
service = HG.client_controller.services_manager.GetService( service_key )
2016-06-01 20:04:15 +00:00
service_type = service.GetServiceType()
if service_type == HC.IPFS:
status = 'pinned '
else:
status = 'uploaded '
2017-09-20 19:47:31 +00:00
lines.append( status + 'to ' + service.GetName() + ' ' + HydrusData.ConvertTimestampToPrettyAgo( timestamp ) + ' ago' )
2016-06-01 20:04:15 +00:00
2016-04-20 20:42:21 +00:00
return lines
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
def GetRatingsManager( self ): return self._media_result.GetRatingsManager()
2015-03-18 21:46:29 +00:00
def GetResolution( self ):
( width, height ) = self._media_result.GetResolution()
if width is None: return ( 0, 0 )
else: return ( width, height )
def GetSize( self ):
size = self._media_result.GetSize()
if size is None: return 0
else: return size
def GetTagsManager( self ): return self._media_result.GetTagsManager()
2015-04-08 18:10:50 +00:00
def GetTitleString( self ):
2018-02-28 22:30:36 +00:00
new_options = HG.client_controller.new_options
2015-04-08 18:10:50 +00:00
2018-02-28 22:30:36 +00:00
tags_summary_generator = new_options.GetTagSummaryGenerator( 'media_viewer_top' )
2015-04-08 18:10:50 +00:00
2018-02-28 22:30:36 +00:00
tm = self.GetTagsManager()
2015-04-08 18:10:50 +00:00
2018-02-28 22:30:36 +00:00
tags = tm.GetCurrent( CC.COMBINED_TAG_SERVICE_KEY ).union( tm.GetPending( CC.COMBINED_TAG_SERVICE_KEY ) )
if len( tags ) == 0:
return ''
2015-04-08 18:10:50 +00:00
2018-02-21 21:59:37 +00:00
siblings_manager = HG.client_controller.GetManager( 'tag_siblings' )
2015-04-08 18:10:50 +00:00
2018-02-21 21:59:37 +00:00
tags = siblings_manager.CollapseTags( CC.COMBINED_TAG_SERVICE_KEY, tags )
2015-04-08 18:10:50 +00:00
2018-02-21 21:59:37 +00:00
summary = tags_summary_generator.GenerateSummary( tags )
2015-04-08 18:10:50 +00:00
2018-02-21 21:59:37 +00:00
return summary
2015-04-08 18:10:50 +00:00
2017-09-20 19:47:31 +00:00
def HasAnyOfTheseHashes( self, hashes ):
return self._media_result.GetHash() in hashes
2015-03-18 21:46:29 +00:00
def HasArchive( self ): return not self._media_result.GetInbox()
def HasDuration( self ): return self._media_result.GetDuration() is not None and self._media_result.GetNumFrames() > 1
def HasImages( self ): return self.IsImage()
def HasInbox( self ): return self._media_result.GetInbox()
def IsCollection( self ): return False
2015-11-04 22:30:28 +00:00
def IsImage( self ): return self._media_result.GetMime() in HC.IMAGES
2015-03-18 21:46:29 +00:00
2015-11-04 22:30:28 +00:00
def IsNoisy( self ): return self._media_result.GetMime() in HC.NOISY_MIMES
2015-03-18 21:46:29 +00:00
def IsSizeDefinite( self ): return self._media_result.GetSize() is not None
2017-12-13 22:33:07 +00:00
def RefreshFileInfo( self ):
self._media_result.RefreshFileInfo()
2015-03-18 21:46:29 +00:00
class MediaResult( object ):
2017-05-31 21:50:53 +00:00
def __init__( self, file_info_manager, tags_manager, locations_manager, ratings_manager ):
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
self._file_info_manager = file_info_manager
self._tags_manager = tags_manager
self._locations_manager = locations_manager
self._ratings_manager = ratings_manager
2015-03-18 21:46:29 +00:00
def DeletePending( self, service_key ):
2017-06-28 20:23:21 +00:00
service = HG.client_controller.services_manager.GetService( service_key )
2015-03-18 21:46:29 +00:00
service_type = service.GetServiceType()
2016-12-21 22:30:54 +00:00
if service_type in HC.TAG_SERVICES:
2017-05-31 21:50:53 +00:00
self._tags_manager.DeletePending( service_key )
2016-12-21 22:30:54 +00:00
elif service_type in HC.FILE_SERVICES:
2017-05-31 21:50:53 +00:00
self._locations_manager.DeletePending( service_key )
2016-12-21 22:30:54 +00:00
2015-03-18 21:46:29 +00:00
2016-06-15 18:59:44 +00:00
def Duplicate( self ):
2017-05-31 21:50:53 +00:00
file_info_manager = self._file_info_manager.Duplicate()
tags_manager = self._tags_manager.Duplicate()
locations_manager = self._locations_manager.Duplicate()
ratings_manager = self._ratings_manager.Duplicate()
2016-06-15 18:59:44 +00:00
2017-05-31 21:50:53 +00:00
return MediaResult( file_info_manager, tags_manager, locations_manager, ratings_manager )
2016-06-15 18:59:44 +00:00
2017-05-31 21:50:53 +00:00
def GetDuration( self ):
2016-06-15 18:59:44 +00:00
2017-12-06 22:06:56 +00:00
return self._file_info_manager.duration
2016-06-15 18:59:44 +00:00
2017-05-31 21:50:53 +00:00
def GetFileInfoManager( self ):
return self._file_info_manager
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetHash( self ):
2017-12-06 22:06:56 +00:00
return self._file_info_manager.hash
2017-05-31 21:50:53 +00:00
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetInbox( self ):
return self._locations_manager.GetInbox()
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetLocationsManager( self ):
return self._locations_manager
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetMime( self ):
2017-12-06 22:06:56 +00:00
return self._file_info_manager.mime
2017-05-31 21:50:53 +00:00
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetNumFrames( self ):
2017-12-06 22:06:56 +00:00
return self._file_info_manager.num_frames
2017-05-31 21:50:53 +00:00
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetNumWords( self ):
2017-12-06 22:06:56 +00:00
return self._file_info_manager.num_words
2017-05-31 21:50:53 +00:00
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetRatingsManager( self ):
return self._ratings_manager
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetResolution( self ):
2017-12-06 22:06:56 +00:00
return ( self._file_info_manager.width, self._file_info_manager.height )
2017-05-31 21:50:53 +00:00
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetSize( self ):
2017-12-06 22:06:56 +00:00
return self._file_info_manager.size
2017-05-31 21:50:53 +00:00
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def GetTagsManager( self ):
return self._tags_manager
2015-03-18 21:46:29 +00:00
def ProcessContentUpdate( self, service_key, content_update ):
( data_type, action, row ) = content_update.ToTuple()
2017-06-28 20:23:21 +00:00
service = HG.client_controller.services_manager.GetService( service_key )
2015-03-18 21:46:29 +00:00
service_type = service.GetServiceType()
2016-06-15 18:59:44 +00:00
if service_type in HC.TAG_SERVICES:
2017-05-31 21:50:53 +00:00
self._tags_manager.ProcessContentUpdate( service_key, content_update )
2016-06-15 18:59:44 +00:00
2016-12-21 22:30:54 +00:00
elif service_type in HC.FILE_SERVICES:
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
self._locations_manager.ProcessContentUpdate( service_key, content_update )
2016-12-21 22:30:54 +00:00
2015-03-18 21:46:29 +00:00
elif service_type in HC.RATINGS_SERVICES:
2017-05-31 21:50:53 +00:00
self._ratings_manager.ProcessContentUpdate( service_key, content_update )
2015-03-18 21:46:29 +00:00
2017-12-13 22:33:07 +00:00
def RefreshFileInfo( self ):
media_results = HG.client_controller.Read( 'media_results', ( self._file_info_manager.hash, ) )
if len( media_results ) > 0:
media_result = media_results[0]
self._file_info_manager = media_result._file_info_manager
2015-03-18 21:46:29 +00:00
def ResetService( self, service_key ):
2017-05-31 21:50:53 +00:00
self._tags_manager.ResetService( service_key )
self._locations_manager.ResetService( service_key )
2015-03-18 21:46:29 +00:00
2017-05-31 21:50:53 +00:00
def ToTuple( self ):
return ( self._file_info_manager, self._tags_manager, self._locations_manager, self._ratings_manager )
2015-03-18 21:46:29 +00:00
2017-08-09 21:33:51 +00:00
class MediaSort( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_MEDIA_SORT
2017-11-29 21:48:23 +00:00
SERIALISABLE_NAME = 'Media Sort'
2017-08-09 21:33:51 +00:00
SERIALISABLE_VERSION = 1
def __init__( self, sort_type = None, sort_asc = None ):
if sort_type is None:
sort_type = ( 'system', CC.SORT_FILES_BY_FILESIZE )
if sort_asc is None:
sort_asc = CC.SORT_ASC
self.sort_type = sort_type
self.sort_asc = sort_asc
def _GetSerialisableInfo( self ):
( sort_metatype, sort_data ) = self.sort_type
if sort_metatype == 'system':
serialisable_sort_data = sort_data
elif sort_metatype == 'namespaces':
serialisable_sort_data = sort_data
elif sort_metatype == 'rating':
service_key = sort_data
serialisable_sort_data = service_key.encode( 'hex' )
return ( sort_metatype, serialisable_sort_data, self.sort_asc )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( sort_metatype, serialisable_sort_data, self.sort_asc ) = serialisable_info
if sort_metatype == 'system':
sort_data = serialisable_sort_data
elif sort_metatype == 'namespaces':
sort_data = tuple( serialisable_sort_data )
elif sort_metatype == 'rating':
sort_data = serialisable_sort_data.decode( 'hex' )
self.sort_type = ( sort_metatype, sort_data )
def CanAsc( self ):
( sort_metatype, sort_data ) = self.sort_type
if sort_metatype == 'system':
if sort_data in ( CC.SORT_FILES_BY_MIME, CC.SORT_FILES_BY_RANDOM ):
return False
elif sort_metatype == 'namespaces':
return False
return True
def GetSortKeyAndReverse( self, file_service_key ):
reverse = False
( sort_metadata, sort_data ) = self.sort_type
def deal_with_none( x ):
if x is None: return -1
else: return x
if sort_metadata == 'system':
if sort_data == CC.SORT_FILES_BY_RANDOM:
def sort_key( x ):
return random.random()
elif sort_data == CC.SORT_FILES_BY_FILESIZE:
def sort_key( x ):
return deal_with_none( x.GetSize() )
elif sort_data == CC.SORT_FILES_BY_DURATION:
def sort_key( x ):
return deal_with_none( x.GetDuration() )
elif sort_data == CC.SORT_FILES_BY_IMPORT_TIME:
file_service = HG.client_controller.services_manager.GetService( file_service_key )
file_service_type = file_service.GetServiceType()
if file_service_type == HC.LOCAL_FILE_DOMAIN:
file_service_key = CC.COMBINED_LOCAL_FILE_SERVICE_KEY
def sort_key( x ):
return deal_with_none( x.GetTimestamp( file_service_key ) )
elif sort_data == CC.SORT_FILES_BY_HEIGHT:
def sort_key( x ):
return deal_with_none( x.GetResolution()[1] )
elif sort_data == CC.SORT_FILES_BY_WIDTH:
def sort_key( x ):
return deal_with_none( x.GetResolution()[0] )
elif sort_data == CC.SORT_FILES_BY_RATIO:
def sort_key( x ):
( width, height ) = x.GetResolution()
if width is None or height is None or width == 0 or height == 0:
return -1
else:
return float( width ) / float( height )
elif sort_data == CC.SORT_FILES_BY_NUM_PIXELS:
def sort_key( x ):
( width, height ) = x.GetResolution()
if width is None or height is None:
return -1
else:
return width * height
elif sort_data == CC.SORT_FILES_BY_NUM_TAGS:
def sort_key( x ):
tags_manager = x.GetTagsManager()
return( len( tags_manager.GetCurrent() ) + len( tags_manager.GetPending() ) )
elif sort_data == CC.SORT_FILES_BY_MIME:
def sort_key( x ):
return x.GetMime()
elif sort_metadata == 'namespaces':
namespaces = sort_data
def sort_key( x ):
x_tags_manager = x.GetTagsManager()
return [ x_tags_manager.GetComparableNamespaceSlice( ( namespace, ) ) for namespace in namespaces ]
elif sort_metadata == 'rating':
service_key = sort_data
def sort_key( x ):
x_ratings_manager = x.GetRatingsManager()
rating = deal_with_none( x_ratings_manager.GetRating( service_key ) )
return rating
return ( sort_key, self.sort_asc )
def GetSortTypeString( self ):
( sort_metatype, sort_data ) = self.sort_type
sort_string = 'sort by '
if sort_metatype == 'system':
sort_string_lookup = {}
sort_string_lookup[ CC.SORT_FILES_BY_FILESIZE ] = 'filesize'
sort_string_lookup[ CC.SORT_FILES_BY_DURATION ] = 'duration'
2017-08-16 21:58:06 +00:00
sort_string_lookup[ CC.SORT_FILES_BY_IMPORT_TIME ] = 'time imported'
2017-08-09 21:33:51 +00:00
sort_string_lookup[ CC.SORT_FILES_BY_MIME ] = 'mime'
sort_string_lookup[ CC.SORT_FILES_BY_RANDOM ] = 'random'
sort_string_lookup[ CC.SORT_FILES_BY_WIDTH ] = 'width'
sort_string_lookup[ CC.SORT_FILES_BY_HEIGHT ] = 'height'
sort_string_lookup[ CC.SORT_FILES_BY_RATIO ] = 'resolution ratio'
sort_string_lookup[ CC.SORT_FILES_BY_NUM_PIXELS ] = 'number of pixels'
sort_string_lookup[ CC.SORT_FILES_BY_NUM_TAGS ] = 'number of tags'
sort_string += sort_string_lookup[ sort_data ]
elif sort_metatype == 'namespaces':
namespaces = sort_data
sort_string += '-'.join( namespaces )
elif sort_metatype == 'rating':
service_key = sort_data
service = HG.client_controller.services_manager.GetService( service_key )
sort_string += service.GetName()
return sort_string
def GetSortAscStrings( self ):
( sort_metatype, sort_data ) = self.sort_type
if sort_metatype == 'system':
sort_string_lookup = {}
sort_string_lookup[ CC.SORT_FILES_BY_FILESIZE ] = ( 'smallest first', 'largest first' )
sort_string_lookup[ CC.SORT_FILES_BY_DURATION ] = ( 'shortest first', 'longest first' )
sort_string_lookup[ CC.SORT_FILES_BY_IMPORT_TIME ] = ( 'oldest first', 'newest first' )
sort_string_lookup[ CC.SORT_FILES_BY_MIME ] = ( 'mime', 'mime' )
sort_string_lookup[ CC.SORT_FILES_BY_RANDOM ] = ( 'random', 'random' )
sort_string_lookup[ CC.SORT_FILES_BY_WIDTH ] = ( 'slimmest first', 'widest first' )
sort_string_lookup[ CC.SORT_FILES_BY_HEIGHT ] = ( 'shortest first', 'tallest first' )
sort_string_lookup[ CC.SORT_FILES_BY_RATIO ] = ( 'tallest first', 'widest first' )
sort_string_lookup[ CC.SORT_FILES_BY_NUM_PIXELS ] = ( 'ascending', 'descending' )
sort_string_lookup[ CC.SORT_FILES_BY_NUM_TAGS ] = ( 'ascending', 'descending' )
return sort_string_lookup[ sort_data ]
else:
return ( 'ascending', 'descending' )
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_MEDIA_SORT ] = MediaSort
2015-03-25 22:04:19 +00:00
class SortedList( object ):
2017-01-25 22:56:55 +00:00
def __init__( self, initial_items = None ):
2015-03-25 22:04:19 +00:00
2017-03-08 23:23:12 +00:00
if initial_items is None:
initial_items = []
2015-03-25 22:04:19 +00:00
2017-03-08 23:23:12 +00:00
self._sort_key = None
2017-01-25 22:56:55 +00:00
self._sort_reverse = False
2015-03-25 22:04:19 +00:00
self._sorted_list = list( initial_items )
self._items_to_indices = None
2016-01-20 23:57:33 +00:00
def __contains__( self, item ):
return self._items_to_indices.__contains__( item )
2015-03-25 22:04:19 +00:00
2016-01-20 23:57:33 +00:00
def __getitem__( self, value ):
return self._sorted_list.__getitem__( value )
2015-03-25 22:04:19 +00:00
def __iter__( self ):
2017-08-09 21:33:51 +00:00
return iter( self._sorted_list )
2015-03-25 22:04:19 +00:00
2016-01-06 21:17:20 +00:00
def __len__( self ):
2017-08-09 21:33:51 +00:00
return len( self._sorted_list )
2016-01-06 21:17:20 +00:00
2015-03-25 22:04:19 +00:00
2016-01-06 21:17:20 +00:00
def _DirtyIndices( self ):
self._items_to_indices = None
2015-03-25 22:04:19 +00:00
2016-01-06 21:17:20 +00:00
def _RecalcIndices( self ):
self._items_to_indices = { item : index for ( index, item ) in enumerate( self._sorted_list ) }
2015-03-25 22:04:19 +00:00
def append_items( self, items ):
2016-01-06 21:17:20 +00:00
if self._items_to_indices is None:
self._RecalcIndices()
for ( i, item ) in enumerate( items, start = len( self._sorted_list ) ):
self._items_to_indices[ item ] = i
2015-03-25 22:04:19 +00:00
2016-01-06 21:17:20 +00:00
self._sorted_list.extend( items )
2015-03-25 22:04:19 +00:00
def index( self, item ):
2016-01-06 21:17:20 +00:00
if self._items_to_indices is None:
self._RecalcIndices()
2015-03-25 22:04:19 +00:00
try:
result = self._items_to_indices[ item ]
except KeyError:
2016-02-17 22:06:47 +00:00
raise HydrusExceptions.DataMissing()
2015-03-25 22:04:19 +00:00
return result
def insert_items( self, items ):
self.append_items( items )
self.sort()
def remove_items( self, items ):
deletee_indices = [ self.index( item ) for item in items ]
deletee_indices.sort()
deletee_indices.reverse()
2016-01-20 23:57:33 +00:00
for index in deletee_indices:
del self._sorted_list[ index ]
2015-03-25 22:04:19 +00:00
self._DirtyIndices()
2017-03-08 23:23:12 +00:00
def sort( self, sort_key = None, reverse = False ):
2015-03-25 22:04:19 +00:00
2017-03-08 23:23:12 +00:00
if sort_key is None:
2017-01-25 22:56:55 +00:00
2017-03-08 23:23:12 +00:00
sort_key = self._sort_key
2017-01-25 22:56:55 +00:00
reverse = self._sort_reverse
else:
2016-01-20 23:57:33 +00:00
2017-03-08 23:23:12 +00:00
self._sort_key = sort_key
2017-01-25 22:56:55 +00:00
self._sort_reverse = reverse
2016-01-20 23:57:33 +00:00
2015-03-25 22:04:19 +00:00
2017-03-08 23:23:12 +00:00
self._sorted_list.sort( key = sort_key, reverse = reverse )
2015-03-25 22:04:19 +00:00
self._DirtyIndices()
2015-03-18 21:46:29 +00:00
2015-08-05 18:42:35 +00:00
class TagsManagerSimple( object ):
def __init__( self, service_keys_to_statuses_to_tags ):
self._service_keys_to_statuses_to_tags = service_keys_to_statuses_to_tags
self._combined_namespaces_cache = None
2016-09-14 18:03:59 +00:00
def _RecalcCombinedIfNeeded( self ):
pass
2016-06-15 18:59:44 +00:00
def Duplicate( self ):
dupe_service_keys_to_statuses_to_tags = collections.defaultdict( HydrusData.default_dict_set )
for ( service_key, statuses_to_tags ) in self._service_keys_to_statuses_to_tags.items():
dupe_statuses_to_tags = HydrusData.default_dict_set()
for ( status, tags ) in statuses_to_tags.items():
dupe_statuses_to_tags[ status ] = set( tags )
dupe_service_keys_to_statuses_to_tags[ service_key ] = dupe_statuses_to_tags
return TagsManagerSimple( dupe_service_keys_to_statuses_to_tags )
2015-08-05 18:42:35 +00:00
def GetCombinedNamespaces( self, namespaces ):
2016-09-14 18:03:59 +00:00
self._RecalcCombinedIfNeeded()
2015-08-05 18:42:35 +00:00
if self._combined_namespaces_cache is None:
combined_statuses_to_tags = self._service_keys_to_statuses_to_tags[ CC.COMBINED_TAG_SERVICE_KEY ]
2017-03-02 02:14:56 +00:00
combined_current = combined_statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]
combined_pending = combined_statuses_to_tags[ HC.CONTENT_STATUS_PENDING ]
2015-08-05 18:42:35 +00:00
2017-02-08 22:27:00 +00:00
pairs = ( HydrusTags.SplitTag( tag ) for tag in combined_current.union( combined_pending ) )
self._combined_namespaces_cache = HydrusData.BuildKeyToSetDict( ( namespace, subtag ) for ( namespace, subtag ) in pairs if namespace != '' )
2015-08-05 18:42:35 +00:00
result = { namespace : self._combined_namespaces_cache[ namespace ] for namespace in namespaces }
return result
2016-09-14 18:03:59 +00:00
def GetComparableNamespaceSlice( self, namespaces ):
self._RecalcCombinedIfNeeded()
2015-08-05 18:42:35 +00:00
combined_statuses_to_tags = self._service_keys_to_statuses_to_tags[ CC.COMBINED_TAG_SERVICE_KEY ]
2017-03-02 02:14:56 +00:00
combined_current = combined_statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]
combined_pending = combined_statuses_to_tags[ HC.CONTENT_STATUS_PENDING ]
2015-08-05 18:42:35 +00:00
combined = combined_current.union( combined_pending )
2017-02-08 22:27:00 +00:00
pairs = [ HydrusTags.SplitTag( tag ) for tag in combined ]
2015-08-05 18:42:35 +00:00
slice = []
2017-02-08 22:27:00 +00:00
for desired_namespace in namespaces:
2015-08-05 18:42:35 +00:00
2017-02-08 22:27:00 +00:00
subtags = [ HydrusTags.ConvertTagToSortable( subtag ) for ( namespace, subtag ) in pairs if namespace == desired_namespace ]
2015-08-05 18:42:35 +00:00
2017-02-08 22:27:00 +00:00
subtags.sort()
2015-08-05 18:42:35 +00:00
2017-02-08 22:27:00 +00:00
slice.append( tuple( subtags ) )
2015-08-05 18:42:35 +00:00
return tuple( slice )
2017-08-09 21:33:51 +00:00
def GetCurrent( self, service_key = CC.COMBINED_TAG_SERVICE_KEY ):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
self._RecalcCombinedIfNeeded()
statuses_to_tags = self._service_keys_to_statuses_to_tags[ service_key ]
2018-02-28 22:30:36 +00:00
return statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]
2017-08-09 21:33:51 +00:00
def GetDeleted( self, service_key = CC.COMBINED_TAG_SERVICE_KEY ):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
self._RecalcCombinedIfNeeded()
statuses_to_tags = self._service_keys_to_statuses_to_tags[ service_key ]
2018-02-28 22:30:36 +00:00
return statuses_to_tags[ HC.CONTENT_STATUS_DELETED ]
2017-08-09 21:33:51 +00:00
2016-09-14 18:03:59 +00:00
def GetNamespaceSlice( self, namespaces ):
self._RecalcCombinedIfNeeded()
2015-08-05 18:42:35 +00:00
combined_statuses_to_tags = self._service_keys_to_statuses_to_tags[ CC.COMBINED_TAG_SERVICE_KEY ]
2017-03-02 02:14:56 +00:00
combined_current = combined_statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]
combined_pending = combined_statuses_to_tags[ HC.CONTENT_STATUS_PENDING ]
2015-08-05 18:42:35 +00:00
2016-07-27 21:53:34 +00:00
combined = combined_current.union( combined_pending )
2015-08-05 18:42:35 +00:00
2016-07-27 21:53:34 +00:00
slice = { tag for tag in combined if True in ( tag.startswith( namespace + ':' ) for namespace in namespaces ) }
2015-08-05 18:42:35 +00:00
slice = frozenset( slice )
return slice
2017-08-09 21:33:51 +00:00
def GetPending( self, service_key = CC.COMBINED_TAG_SERVICE_KEY ):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
self._RecalcCombinedIfNeeded()
statuses_to_tags = self._service_keys_to_statuses_to_tags[ service_key ]
2018-02-28 22:30:36 +00:00
return statuses_to_tags[ HC.CONTENT_STATUS_PENDING ]
2017-08-09 21:33:51 +00:00
def GetPetitioned( self, service_key = CC.COMBINED_TAG_SERVICE_KEY ):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
self._RecalcCombinedIfNeeded()
statuses_to_tags = self._service_keys_to_statuses_to_tags[ service_key ]
2018-02-28 22:30:36 +00:00
return statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ]
2017-08-09 21:33:51 +00:00
2015-08-05 18:42:35 +00:00
class TagsManager( TagsManagerSimple ):
def __init__( self, service_keys_to_statuses_to_tags ):
TagsManagerSimple.__init__( self, service_keys_to_statuses_to_tags )
2016-09-14 18:03:59 +00:00
self._combined_is_calculated = False
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
HG.client_controller.sub( self, 'NewSiblings', 'notify_new_siblings_data' )
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
def _RecalcCombinedIfNeeded( self ):
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
if not self._combined_is_calculated:
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
# Combined tags are pre-collapsed by siblings
2015-08-05 18:42:35 +00:00
2017-05-10 21:33:58 +00:00
siblings_manager = HG.client_controller.GetManager( 'tag_siblings' )
2016-09-14 18:03:59 +00:00
combined_statuses_to_tags = collections.defaultdict( set )
for ( service_key, statuses_to_tags ) in self._service_keys_to_statuses_to_tags.items():
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
continue
statuses_to_tags = siblings_manager.CollapseStatusesToTags( service_key, statuses_to_tags )
2017-03-02 02:14:56 +00:00
combined_statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ].update( statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ] )
combined_statuses_to_tags[ HC.CONTENT_STATUS_PENDING ].update( statuses_to_tags[ HC.CONTENT_STATUS_PENDING ] )
combined_statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ].update( statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ] )
combined_statuses_to_tags[ HC.CONTENT_STATUS_DELETED ].update( statuses_to_tags[ HC.CONTENT_STATUS_DELETED ] )
2016-09-14 18:03:59 +00:00
self._service_keys_to_statuses_to_tags[ CC.COMBINED_TAG_SERVICE_KEY ] = combined_statuses_to_tags
self._combined_namespaces_cache = None
self._combined_is_calculated = True
2015-08-05 18:42:35 +00:00
def DeletePending( self, service_key ):
statuses_to_tags = self._service_keys_to_statuses_to_tags[ service_key ]
2017-03-02 02:14:56 +00:00
if len( statuses_to_tags[ HC.CONTENT_STATUS_PENDING ] ) + len( statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ] ) > 0:
2015-08-05 18:42:35 +00:00
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_PENDING ] = set()
statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ] = set()
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
self._combined_is_calculated = False
2015-08-05 18:42:35 +00:00
2016-06-15 18:59:44 +00:00
def Duplicate( self ):
dupe_service_keys_to_statuses_to_tags = collections.defaultdict( HydrusData.default_dict_set )
for ( service_key, statuses_to_tags ) in self._service_keys_to_statuses_to_tags.items():
dupe_statuses_to_tags = HydrusData.default_dict_set()
for ( status, tags ) in statuses_to_tags.items():
dupe_statuses_to_tags[ status ] = set( tags )
dupe_service_keys_to_statuses_to_tags[ service_key ] = dupe_statuses_to_tags
return TagsManager( dupe_service_keys_to_statuses_to_tags )
2015-08-05 18:42:35 +00:00
def GetNumTags( self, service_key, include_current_tags = True, include_pending_tags = False ):
2016-09-14 18:03:59 +00:00
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
self._RecalcCombinedIfNeeded()
2015-08-05 18:42:35 +00:00
num_tags = 0
statuses_to_tags = self.GetStatusesToTags( service_key )
2017-03-02 02:14:56 +00:00
if include_current_tags: num_tags += len( statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ] )
if include_pending_tags: num_tags += len( statuses_to_tags[ HC.CONTENT_STATUS_PENDING ] )
2015-08-05 18:42:35 +00:00
return num_tags
2016-09-14 18:03:59 +00:00
def GetServiceKeysToStatusesToTags( self ):
self._RecalcCombinedIfNeeded()
return self._service_keys_to_statuses_to_tags
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
def GetStatusesToTags( self, service_key ):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
self._RecalcCombinedIfNeeded()
return self._service_keys_to_statuses_to_tags[ service_key ]
2015-08-05 18:42:35 +00:00
def HasTag( self, tag ):
2016-09-14 18:03:59 +00:00
self._RecalcCombinedIfNeeded()
2015-08-05 18:42:35 +00:00
combined_statuses_to_tags = self._service_keys_to_statuses_to_tags[ CC.COMBINED_TAG_SERVICE_KEY ]
2017-03-02 02:14:56 +00:00
return tag in combined_statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ] or tag in combined_statuses_to_tags[ HC.CONTENT_STATUS_PENDING ]
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
def NewSiblings( self ):
self._combined_is_calculated = False
2015-08-05 18:42:35 +00:00
def ProcessContentUpdate( self, service_key, content_update ):
statuses_to_tags = self._service_keys_to_statuses_to_tags[ service_key ]
( data_type, action, row ) = content_update.ToTuple()
2016-12-21 22:30:54 +00:00
if action == HC.CONTENT_UPDATE_PETITION:
( tag, hashes, reason ) = row
else:
( tag, hashes ) = row
2015-08-05 18:42:35 +00:00
if action == HC.CONTENT_UPDATE_ADD:
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ].add( tag )
2015-08-05 18:42:35 +00:00
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_DELETED ].discard( tag )
statuses_to_tags[ HC.CONTENT_STATUS_PENDING ].discard( tag )
2015-08-05 18:42:35 +00:00
elif action == HC.CONTENT_UPDATE_DELETE:
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_DELETED ].add( tag )
2015-08-05 18:42:35 +00:00
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ].discard( tag )
statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ].discard( tag )
2015-08-05 18:42:35 +00:00
2015-12-09 23:16:41 +00:00
elif action == HC.CONTENT_UPDATE_PEND:
2017-03-02 02:14:56 +00:00
if tag not in statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]:
2015-12-09 23:16:41 +00:00
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_PENDING ].add( tag )
2015-12-09 23:16:41 +00:00
2016-12-21 22:30:54 +00:00
elif action == HC.CONTENT_UPDATE_RESCIND_PEND:
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_PENDING ].discard( tag )
2016-12-21 22:30:54 +00:00
2015-12-09 23:16:41 +00:00
elif action == HC.CONTENT_UPDATE_PETITION:
2017-03-02 02:14:56 +00:00
if tag in statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]:
2015-12-09 23:16:41 +00:00
2017-03-02 02:14:56 +00:00
statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ].add( tag )
2015-12-09 23:16:41 +00:00
2017-03-02 02:14:56 +00:00
elif action == HC.CONTENT_UPDATE_RESCIND_PETITION: statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ].discard( tag )
2015-08-05 18:42:35 +00:00
2016-09-14 18:03:59 +00:00
self._combined_is_calculated = False
2015-08-05 18:42:35 +00:00
def ResetService( self, service_key ):
if service_key in self._service_keys_to_statuses_to_tags:
del self._service_keys_to_statuses_to_tags[ service_key ]
2016-09-14 18:03:59 +00:00
self._combined_is_calculated = False
2015-08-05 18:42:35 +00:00
2016-12-21 22:30:54 +00:00