2015-03-18 21:46:29 +00:00
|
|
|
import collections
|
2021-07-14 20:42:19 +00:00
|
|
|
import itertools
|
2020-05-20 21:36:02 +00:00
|
|
|
import random
|
2020-05-06 21:31:41 +00:00
|
|
|
import typing
|
|
|
|
|
2020-04-22 21:00:35 +00:00
|
|
|
from hydrus.core import HydrusConstants as HC
|
|
|
|
from hydrus.core import HydrusText
|
|
|
|
from hydrus.core import HydrusData
|
|
|
|
from hydrus.core import HydrusExceptions
|
|
|
|
from hydrus.core import HydrusGlobals as HG
|
|
|
|
from hydrus.core import HydrusImageHandling
|
|
|
|
from hydrus.core import HydrusSerialisable
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-07-29 20:52:44 +00:00
|
|
|
from hydrus.client import ClientConstants as CC
|
|
|
|
from hydrus.client import ClientData
|
2022-01-19 21:28:59 +00:00
|
|
|
from hydrus.client import ClientLocation
|
|
|
|
from hydrus.client import ClientSearch
|
2020-07-29 20:52:44 +00:00
|
|
|
from hydrus.client.media import ClientMediaManagers
|
|
|
|
from hydrus.client.media import ClientMediaResult
|
2020-08-05 20:10:36 +00:00
|
|
|
from hydrus.client.metadata import ClientTags
|
2020-07-29 20:52:44 +00:00
|
|
|
|
2019-06-05 19:42:39 +00:00
|
|
|
hashes_to_jpeg_quality = {}
|
2019-07-24 21:39:02 +00:00
|
|
|
hashes_to_pixel_hashes = {}
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def FilterServiceKeysToContentUpdates( full_service_keys_to_content_updates, hashes ):
|
|
|
|
|
|
|
|
if not isinstance( hashes, set ):
|
|
|
|
|
|
|
|
hashes = set( hashes )
|
|
|
|
|
|
|
|
|
|
|
|
filtered_service_keys_to_content_updates = collections.defaultdict( list )
|
|
|
|
|
|
|
|
for ( service_key, full_content_updates ) in full_service_keys_to_content_updates.items():
|
|
|
|
|
|
|
|
filtered_content_updates = []
|
|
|
|
|
|
|
|
for content_update in full_content_updates:
|
|
|
|
|
|
|
|
if not hashes.isdisjoint( content_update.GetHashes() ):
|
|
|
|
|
|
|
|
filtered_content_updates.append( content_update )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if len( filtered_content_updates ) > 0:
|
|
|
|
|
|
|
|
filtered_service_keys_to_content_updates[ service_key ] = filtered_content_updates
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return filtered_service_keys_to_content_updates
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
def FlattenMedia( media_list ):
|
|
|
|
|
|
|
|
flat_media = []
|
|
|
|
|
|
|
|
for media in media_list:
|
|
|
|
|
|
|
|
if media.IsCollection():
|
|
|
|
|
|
|
|
flat_media.extend( media.GetFlatMedia() )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
flat_media.append( media )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return flat_media
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
def GetDuplicateComparisonScore( shown_media, comparison_media ):
|
|
|
|
|
|
|
|
statements_and_scores = GetDuplicateComparisonStatements( shown_media, comparison_media )
|
|
|
|
|
|
|
|
total_score = sum( ( score for ( statement, score ) in statements_and_scores.values() ) )
|
|
|
|
|
|
|
|
return total_score
|
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
def GetDuplicateComparisonStatements( shown_media, comparison_media ):
|
|
|
|
|
2018-07-04 20:48:28 +00:00
|
|
|
new_options = HG.client_controller.new_options
|
|
|
|
|
2019-06-26 21:27:18 +00:00
|
|
|
duplicate_comparison_score_higher_jpeg_quality = new_options.GetInteger( 'duplicate_comparison_score_higher_jpeg_quality' )
|
|
|
|
duplicate_comparison_score_much_higher_jpeg_quality = new_options.GetInteger( 'duplicate_comparison_score_much_higher_jpeg_quality' )
|
2018-07-04 20:48:28 +00:00
|
|
|
duplicate_comparison_score_higher_filesize = new_options.GetInteger( 'duplicate_comparison_score_higher_filesize' )
|
|
|
|
duplicate_comparison_score_much_higher_filesize = new_options.GetInteger( 'duplicate_comparison_score_much_higher_filesize' )
|
|
|
|
duplicate_comparison_score_higher_resolution = new_options.GetInteger( 'duplicate_comparison_score_higher_resolution' )
|
|
|
|
duplicate_comparison_score_much_higher_resolution = new_options.GetInteger( 'duplicate_comparison_score_much_higher_resolution' )
|
|
|
|
duplicate_comparison_score_more_tags = new_options.GetInteger( 'duplicate_comparison_score_more_tags' )
|
|
|
|
duplicate_comparison_score_older = new_options.GetInteger( 'duplicate_comparison_score_older' )
|
2021-08-25 21:59:05 +00:00
|
|
|
duplicate_comparison_score_nicer_ratio = new_options.GetInteger( 'duplicate_comparison_score_nicer_ratio' )
|
2018-07-04 20:48:28 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
statements_and_scores = {}
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-07-24 21:39:02 +00:00
|
|
|
s_hash = shown_media.GetHash()
|
|
|
|
c_hash = comparison_media.GetHash()
|
|
|
|
|
|
|
|
s_mime = shown_media.GetMime()
|
|
|
|
c_mime = comparison_media.GetMime()
|
|
|
|
|
2017-05-24 20:28:24 +00:00
|
|
|
# size
|
|
|
|
|
|
|
|
s_size = shown_media.GetSize()
|
|
|
|
c_size = comparison_media.GetSize()
|
|
|
|
|
2020-05-13 19:03:16 +00:00
|
|
|
is_a_pixel_dupe = False
|
|
|
|
|
|
|
|
if shown_media.IsStaticImage() and comparison_media.IsStaticImage() and shown_media.GetResolution() == comparison_media.GetResolution():
|
|
|
|
|
|
|
|
global hashes_to_pixel_hashes
|
|
|
|
|
|
|
|
if s_hash not in hashes_to_pixel_hashes:
|
|
|
|
|
|
|
|
path = HG.client_controller.client_files_manager.GetFilePath( s_hash, s_mime )
|
|
|
|
|
|
|
|
hashes_to_pixel_hashes[ s_hash ] = HydrusImageHandling.GetImagePixelHash( path, s_mime )
|
|
|
|
|
|
|
|
|
|
|
|
if c_hash not in hashes_to_pixel_hashes:
|
|
|
|
|
|
|
|
path = HG.client_controller.client_files_manager.GetFilePath( c_hash, c_mime )
|
|
|
|
|
|
|
|
hashes_to_pixel_hashes[ c_hash ] = HydrusImageHandling.GetImagePixelHash( path, c_mime )
|
|
|
|
|
|
|
|
|
|
|
|
s_pixel_hash = hashes_to_pixel_hashes[ s_hash ]
|
|
|
|
c_pixel_hash = hashes_to_pixel_hashes[ c_hash ]
|
|
|
|
|
|
|
|
if s_pixel_hash == c_pixel_hash:
|
|
|
|
|
|
|
|
is_a_pixel_dupe = True
|
|
|
|
|
|
|
|
if s_mime == HC.IMAGE_PNG and c_mime != HC.IMAGE_PNG:
|
|
|
|
|
|
|
|
statement = 'this is a pixel-for-pixel duplicate png!'
|
|
|
|
|
|
|
|
score = -100
|
|
|
|
|
|
|
|
elif s_mime != HC.IMAGE_PNG and c_mime == HC.IMAGE_PNG:
|
|
|
|
|
|
|
|
statement = 'other file is a pixel-for-pixel duplicate png!'
|
|
|
|
|
|
|
|
score = 100
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
statement = 'images are pixel-for-pixel duplicates!'
|
|
|
|
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
|
|
|
|
statements_and_scores[ 'pixel_duplicates' ] = ( statement, score )
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
if s_size != c_size:
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2021-12-22 22:31:23 +00:00
|
|
|
absolute_size_ratio = max( s_size, c_size ) / min( s_size, c_size )
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2021-12-22 22:31:23 +00:00
|
|
|
if absolute_size_ratio > 2.0:
|
2019-05-08 21:06:42 +00:00
|
|
|
|
2021-12-22 22:31:23 +00:00
|
|
|
if s_size > c_size:
|
|
|
|
|
|
|
|
operator = '>>'
|
|
|
|
score = duplicate_comparison_score_much_higher_filesize
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
operator = '<<'
|
|
|
|
score = -duplicate_comparison_score_much_higher_filesize
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
|
2021-12-22 22:31:23 +00:00
|
|
|
elif absolute_size_ratio > 1.05:
|
2019-05-08 21:06:42 +00:00
|
|
|
|
2021-12-22 22:31:23 +00:00
|
|
|
if s_size > c_size:
|
|
|
|
|
|
|
|
operator = '>'
|
|
|
|
score = duplicate_comparison_score_higher_filesize
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
operator = '<'
|
|
|
|
score = -duplicate_comparison_score_higher_filesize
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
2021-09-22 21:12:34 +00:00
|
|
|
operator = CC.UNICODE_ALMOST_EQUAL_TO
|
2019-05-08 21:06:42 +00:00
|
|
|
score = 0
|
|
|
|
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2020-05-13 19:03:16 +00:00
|
|
|
if is_a_pixel_dupe:
|
|
|
|
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
statement = '{} {} {}'.format( HydrusData.ToHumanBytes( s_size ), operator, HydrusData.ToHumanBytes( c_size ) )
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
statements_and_scores[ 'filesize' ] = ( statement, score )
|
2017-05-24 20:28:24 +00:00
|
|
|
|
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
# higher/same res
|
|
|
|
|
|
|
|
s_resolution = shown_media.GetResolution()
|
|
|
|
c_resolution = comparison_media.GetResolution()
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
if s_resolution is not None and c_resolution is not None and s_resolution != c_resolution:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-07-31 22:01:02 +00:00
|
|
|
s_res = shown_media.GetResolution()
|
|
|
|
c_res = comparison_media.GetResolution()
|
|
|
|
|
|
|
|
( s_w, s_h ) = s_res
|
|
|
|
( c_w, c_h ) = c_res
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-01-09 22:59:03 +00:00
|
|
|
resolution_ratio = ( s_w * s_h ) / ( c_w * c_h )
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
if resolution_ratio == 1.0:
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '!='
|
|
|
|
score = 0
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2017-05-24 20:28:24 +00:00
|
|
|
elif resolution_ratio > 2.0:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '>>'
|
|
|
|
score = duplicate_comparison_score_much_higher_resolution
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
elif resolution_ratio > 1.00:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '>'
|
|
|
|
score = duplicate_comparison_score_higher_resolution
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2017-05-24 20:28:24 +00:00
|
|
|
elif resolution_ratio < 0.5:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '<<'
|
|
|
|
score = -duplicate_comparison_score_much_higher_resolution
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
else:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '<'
|
|
|
|
score = -duplicate_comparison_score_higher_resolution
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
if s_res in HC.NICE_RESOLUTIONS:
|
2019-07-31 22:01:02 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
s_string = HC.NICE_RESOLUTIONS[ s_res ]
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
s_string = HydrusData.ConvertResolutionToPrettyString( s_resolution )
|
|
|
|
|
|
|
|
if s_w % 2 == 1 or s_h % 2 == 1:
|
|
|
|
|
|
|
|
s_string += ' (unusual)'
|
|
|
|
|
|
|
|
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
if c_res in HC.NICE_RESOLUTIONS:
|
2019-07-31 22:01:02 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
c_string = HC.NICE_RESOLUTIONS[ c_res ]
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
c_string = HydrusData.ConvertResolutionToPrettyString( c_resolution )
|
|
|
|
|
|
|
|
if c_w % 2 == 1 or c_h % 2 == 1:
|
|
|
|
|
|
|
|
c_string += ' (unusual)'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
statement = '{} {} {}'.format( s_string, operator, c_string )
|
2019-05-08 21:06:42 +00:00
|
|
|
|
|
|
|
statements_and_scores[ 'resolution' ] = ( statement, score )
|
|
|
|
|
2019-07-31 22:01:02 +00:00
|
|
|
#
|
|
|
|
|
|
|
|
s_ratio = s_w / s_h
|
|
|
|
c_ratio = c_w / c_h
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
s_nice = s_ratio in HC.NICE_RATIOS
|
|
|
|
c_nice = c_ratio in HC.NICE_RATIOS
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
if s_nice or c_nice:
|
|
|
|
|
|
|
|
if s_nice:
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
s_string = HC.NICE_RATIOS[ s_ratio ]
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
s_string = 'unusual'
|
|
|
|
|
|
|
|
|
|
|
|
if c_nice:
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
c_string = HC.NICE_RATIOS[ c_ratio ]
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
c_string = 'unusual'
|
|
|
|
|
|
|
|
|
|
|
|
if s_nice and c_nice:
|
|
|
|
|
|
|
|
operator = '-'
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
elif s_nice:
|
|
|
|
|
|
|
|
operator = '>'
|
2021-08-25 21:59:05 +00:00
|
|
|
score = duplicate_comparison_score_nicer_ratio
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
elif c_nice:
|
|
|
|
|
|
|
|
operator = '<'
|
2021-08-25 21:59:05 +00:00
|
|
|
score = -duplicate_comparison_score_nicer_ratio
|
2019-07-31 22:01:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
if s_string == c_string:
|
|
|
|
|
|
|
|
statement = 'both {}'.format( s_string )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
statement = '{} {} {}'.format( s_string, operator, c_string )
|
|
|
|
|
|
|
|
|
|
|
|
statements_and_scores[ 'ratio' ] = ( statement, score )
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
# same/diff mime
|
|
|
|
|
|
|
|
if s_mime != c_mime:
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
statement = '{} vs {}'.format( HC.mime_string_lookup[ s_mime ], HC.mime_string_lookup[ c_mime ] )
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
statements_and_scores[ 'mime' ] = ( statement, score )
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
# more tags
|
|
|
|
|
2020-10-21 22:22:10 +00:00
|
|
|
s_num_tags = len( shown_media.GetTagsManager().GetCurrentAndPending( CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_ACTUAL ) )
|
|
|
|
c_num_tags = len( comparison_media.GetTagsManager().GetCurrentAndPending( CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_ACTUAL ) )
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
if s_num_tags != c_num_tags:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
if s_num_tags > 0 and c_num_tags > 0:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
if s_num_tags > c_num_tags:
|
|
|
|
|
|
|
|
operator = '>'
|
|
|
|
score = duplicate_comparison_score_more_tags
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
operator = '<'
|
|
|
|
score = -duplicate_comparison_score_more_tags
|
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
elif s_num_tags > 0:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '>>'
|
|
|
|
score = duplicate_comparison_score_more_tags
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
elif c_num_tags > 0:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = '<<'
|
|
|
|
score = -duplicate_comparison_score_more_tags
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
statement = '{} tags {} {} tags'.format( HydrusData.ToHumanInt( s_num_tags ), operator, HydrusData.ToHumanInt( c_num_tags ) )
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
statements_and_scores[ 'num_tags' ] = ( statement, score )
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
# older
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
s_ts = shown_media.GetLocationsManager().GetCurrentTimestamp( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
|
|
|
|
c_ts = comparison_media.GetLocationsManager().GetCurrentTimestamp( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
one_month = 86400 * 30
|
|
|
|
|
|
|
|
if s_ts is not None and c_ts is not None and abs( s_ts - c_ts ) > one_month:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
if s_ts < c_ts:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = 'older than'
|
|
|
|
score = duplicate_comparison_score_older
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
else:
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
operator = 'newer than'
|
|
|
|
score = -duplicate_comparison_score_older
|
2017-05-17 21:53:02 +00:00
|
|
|
|
|
|
|
|
2020-05-13 19:03:16 +00:00
|
|
|
if is_a_pixel_dupe:
|
|
|
|
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
statement = '{}, {} {}'.format( ClientData.TimestampToPrettyTimeDelta( s_ts, history_suffix = ' old' ), operator, ClientData.TimestampToPrettyTimeDelta( c_ts, history_suffix = ' old' ) )
|
2019-05-08 21:06:42 +00:00
|
|
|
|
|
|
|
statements_and_scores[ 'time_imported' ] = ( statement, score )
|
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
if s_mime == HC.IMAGE_JPEG and c_mime == HC.IMAGE_JPEG:
|
|
|
|
|
|
|
|
global hashes_to_jpeg_quality
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
if s_hash not in hashes_to_jpeg_quality:
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
path = HG.client_controller.client_files_manager.GetFilePath( s_hash, s_mime )
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
hashes_to_jpeg_quality[ s_hash ] = HydrusImageHandling.GetJPEGQuantizationQualityEstimate( path )
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
|
|
|
|
if c_hash not in hashes_to_jpeg_quality:
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
path = HG.client_controller.client_files_manager.GetFilePath( c_hash, c_mime )
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
hashes_to_jpeg_quality[ c_hash ] = HydrusImageHandling.GetJPEGQuantizationQualityEstimate( path )
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
|
|
|
|
( s_label, s_jpeg_quality ) = hashes_to_jpeg_quality[ s_hash ]
|
|
|
|
( c_label, c_jpeg_quality ) = hashes_to_jpeg_quality[ c_hash ]
|
|
|
|
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
if s_label != c_label:
|
2019-06-05 19:42:39 +00:00
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
if c_jpeg_quality is None or s_jpeg_quality is None:
|
|
|
|
|
|
|
|
score = 0
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
# other way around, low score is good here
|
|
|
|
quality_ratio = c_jpeg_quality / s_jpeg_quality
|
2019-06-05 19:42:39 +00:00
|
|
|
|
|
|
|
if quality_ratio > 2.0:
|
|
|
|
|
2019-06-26 21:27:18 +00:00
|
|
|
score = duplicate_comparison_score_much_higher_jpeg_quality
|
2019-06-05 19:42:39 +00:00
|
|
|
|
|
|
|
elif quality_ratio > 1.0:
|
|
|
|
|
2019-06-26 21:27:18 +00:00
|
|
|
score = duplicate_comparison_score_higher_jpeg_quality
|
2019-06-05 19:42:39 +00:00
|
|
|
|
|
|
|
elif quality_ratio < 0.5:
|
|
|
|
|
2019-06-26 21:27:18 +00:00
|
|
|
score = -duplicate_comparison_score_much_higher_jpeg_quality
|
2019-06-05 19:42:39 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
2019-06-26 21:27:18 +00:00
|
|
|
score = -duplicate_comparison_score_higher_jpeg_quality
|
2019-06-05 19:42:39 +00:00
|
|
|
|
|
|
|
|
2019-06-19 22:08:48 +00:00
|
|
|
|
|
|
|
statement = '{} vs {} jpeg quality'.format( s_label, c_label )
|
|
|
|
|
|
|
|
statements_and_scores[ 'jpeg_quality' ] = ( statement, score )
|
2019-06-05 19:42:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-05-08 21:06:42 +00:00
|
|
|
return statements_and_scores
|
2017-05-17 21:53:02 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def GetMediasTags( pool, tag_service_key, tag_display_type, content_statuses ):
|
|
|
|
|
|
|
|
tags_managers = []
|
|
|
|
|
|
|
|
for media in pool:
|
|
|
|
|
|
|
|
if media.IsCollection():
|
|
|
|
|
|
|
|
tags_managers.extend( media.GetSingletonsTagsManagers() )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
tags_managers.append( media.GetTagsManager() )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tags = set()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
for tags_manager in tags_managers:
|
|
|
|
|
|
|
|
statuses_to_tags = tags_manager.GetStatusesToTags( tag_service_key, tag_display_type )
|
|
|
|
|
|
|
|
for content_status in content_statuses:
|
|
|
|
|
|
|
|
tags.update( statuses_to_tags[ content_status ] )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return tags
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def GetMediaResultsTagCount( media_results, tag_service_key, tag_display_type ):
|
|
|
|
|
|
|
|
tags_managers = [ media_result.GetTagsManager() for media_result in media_results ]
|
|
|
|
|
|
|
|
return GetTagsManagersTagCount( tags_managers, tag_service_key, tag_display_type )
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def GetMediasTagCount( pool, tag_service_key, tag_display_type ):
|
2019-10-02 23:38:59 +00:00
|
|
|
|
|
|
|
tags_managers = []
|
|
|
|
|
|
|
|
for media in pool:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
if media.IsCollection():
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
tags_managers.extend( media.GetSingletonsTagsManagers() )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
tags_managers.append( media.GetTagsManager() )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
return GetTagsManagersTagCount( tags_managers, tag_service_key, tag_display_type )
|
|
|
|
|
|
|
|
def GetTagsManagersTagCount( tags_managers, tag_service_key, tag_display_type ):
|
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
current_tags_to_count = collections.Counter()
|
|
|
|
deleted_tags_to_count = collections.Counter()
|
|
|
|
pending_tags_to_count = collections.Counter()
|
|
|
|
petitioned_tags_to_count = collections.Counter()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
for tags_manager in tags_managers:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
statuses_to_tags = tags_manager.GetStatusesToTags( tag_service_key, tag_display_type )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
current_tags_to_count.update( statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ] )
|
|
|
|
deleted_tags_to_count.update( statuses_to_tags[ HC.CONTENT_STATUS_DELETED ] )
|
|
|
|
pending_tags_to_count.update( statuses_to_tags[ HC.CONTENT_STATUS_PENDING ] )
|
|
|
|
petitioned_tags_to_count.update( statuses_to_tags[ HC.CONTENT_STATUS_PETITIONED ] )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
return ( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
class Media( object ):
|
2017-05-31 21:50:53 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def __init__( self ):
|
2017-05-31 21:50:53 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
self._id = HydrusData.GenerateKey()
|
|
|
|
self._id_hash = self._id.__hash__()
|
2017-05-31 21:50:53 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def __eq__( self, other ):
|
2017-05-31 21:50:53 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
if isinstance( other, Media ):
|
2017-05-31 21:50:53 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
return self.__hash__() == other.__hash__()
|
2017-05-31 21:50:53 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
return NotImplemented
|
2017-05-31 21:50:53 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def __hash__( self ):
|
2017-05-31 21:50:53 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
return self._id_hash
|
2017-05-31 21:50:53 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def __ne__( self, other ):
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
return self.__hash__() != other.__hash__()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetDisplayMedia( self ) -> 'Media':
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetDuration( self ) -> typing.Optional[ int ]:
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetFileViewingStatsManager( self ) -> ClientMediaManagers.FileViewingStatsManager:
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetHash( self ) -> bytes:
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetHashes( self, has_location = None, discriminant = None, not_uploaded_to = None, ordered = False ):
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetLocationsManager( self ) -> ClientMediaManagers.LocationsManager:
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetMime( self ) -> int:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2019-09-25 21:34:18 +00:00
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetNumFiles( self ) -> int:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetNumFrames( self ) -> typing.Optional[ int ]:
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-06-15 18:59:44 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetNumInbox( self ) -> int:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetNumWords( self ) -> typing.Optional[ int ]:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def GetCurrentTimestamp( self, service_key: bytes ) -> typing.Optional[ int ]:
|
|
|
|
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
|
|
|
|
def GetDeletedTimestamps( self, service_key: bytes ) -> typing.Tuple[ typing.Optional[ int ], typing.Optional[ int ] ]:
|
2019-09-25 21:34:18 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2019-09-25 21:34:18 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetPrettyInfoLines( self ) -> typing.List[ str ]:
|
2017-05-31 21:50:53 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2017-05-31 21:50:53 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetRatingsManager( self ) -> ClientMediaManagers.RatingsManager:
|
2016-02-24 21:42:54 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-02-24 21:42:54 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetResolution( self ) -> typing.Tuple[ int, int ]:
|
2016-02-24 21:42:54 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-02-24 21:42:54 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetSize( self ) -> int:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetTagsManager( self ) -> ClientMediaManagers.TagsManager:
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-04-20 20:42:21 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasAnyOfTheseHashes( self, hashes ) -> bool:
|
2016-06-01 20:04:15 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-06-01 20:04:15 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasArchive( self ) -> bool:
|
2016-12-28 22:24:52 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-12-28 22:24:52 +00:00
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasAudio( self ) -> bool:
|
2016-12-28 22:24:52 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-12-28 22:24:52 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasDuration( self ) -> bool:
|
2017-11-29 21:48:23 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2017-11-29 21:48:23 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasImages( self ) -> bool:
|
2016-12-28 22:24:52 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-12-28 22:24:52 +00:00
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasInbox( self ) -> bool:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasNotes( self ) -> bool:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def IsCollection( self ) -> bool:
|
2016-04-06 19:52:45 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2016-04-06 19:52:45 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def IsImage( self ) -> bool:
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def IsSizeDefinite( self ) -> bool:
|
2020-01-22 21:04:43 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2020-01-22 21:04:43 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def UpdateFileInfo( self, hashes_to_media_results ):
|
2018-01-03 22:37:30 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
raise NotImplementedError()
|
2018-01-03 22:37:30 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
class MediaCollect( HydrusSerialisable.SerialisableBase ):
|
|
|
|
|
|
|
|
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_MEDIA_COLLECT
|
|
|
|
SERIALISABLE_NAME = 'Media Collect'
|
|
|
|
SERIALISABLE_VERSION = 1
|
|
|
|
|
|
|
|
def __init__( self, namespaces = None, rating_service_keys = None, collect_unmatched = None ):
|
|
|
|
|
|
|
|
if namespaces is None:
|
|
|
|
|
|
|
|
namespaces = []
|
|
|
|
|
|
|
|
|
|
|
|
if rating_service_keys is None:
|
|
|
|
|
|
|
|
rating_service_keys = []
|
|
|
|
|
|
|
|
|
|
|
|
if collect_unmatched is None:
|
|
|
|
|
|
|
|
collect_unmatched = True
|
|
|
|
|
|
|
|
|
|
|
|
self.namespaces = namespaces
|
|
|
|
self.rating_service_keys = rating_service_keys
|
|
|
|
self.collect_unmatched = collect_unmatched
|
|
|
|
|
|
|
|
|
|
|
|
def _GetSerialisableInfo( self ):
|
|
|
|
|
|
|
|
serialisable_rating_service_keys = [ key.hex() for key in self.rating_service_keys ]
|
|
|
|
|
|
|
|
return ( self.namespaces, serialisable_rating_service_keys, self.collect_unmatched )
|
|
|
|
|
|
|
|
|
|
|
|
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
|
|
|
|
|
|
|
|
( self.namespaces, serialisable_rating_service_keys, self.collect_unmatched ) = serialisable_info
|
|
|
|
|
|
|
|
self.rating_service_keys = [ bytes.fromhex( serialisable_key ) for serialisable_key in serialisable_rating_service_keys ]
|
|
|
|
|
|
|
|
|
|
|
|
def DoesACollect( self ):
|
|
|
|
|
|
|
|
return len( self.namespaces ) > 0 or len( self.rating_service_keys ) > 0
|
|
|
|
|
|
|
|
|
2020-03-11 21:52:11 +00:00
|
|
|
def ToString( self ):
|
|
|
|
|
|
|
|
s_list = list( self.namespaces )
|
|
|
|
s_list.extend( [ HG.client_controller.services_manager.GetName( service_key ) for service_key in self.rating_service_keys if HG.client_controller.services_manager.ServiceExists( service_key ) ] )
|
|
|
|
|
|
|
|
if len( s_list ) == 0:
|
|
|
|
|
|
|
|
return 'no collections'
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return ', '.join( s_list )
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_MEDIA_COLLECT ] = MediaCollect
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
class MediaList( object ):
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
def __init__( self, location_context: ClientLocation.LocationContext, media_results ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-10-09 22:03:03 +00:00
|
|
|
hashes_seen = set()
|
|
|
|
|
|
|
|
media_results_dedupe = []
|
|
|
|
|
|
|
|
for media_result in media_results:
|
|
|
|
|
|
|
|
hash = media_result.GetHash()
|
|
|
|
|
|
|
|
if hash in hashes_seen:
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
media_results_dedupe.append( media_result )
|
|
|
|
hashes_seen.add( hash )
|
|
|
|
|
|
|
|
|
|
|
|
media_results = media_results_dedupe
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
self._location_context = location_context
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-01-20 23:57:33 +00:00
|
|
|
self._hashes = set()
|
2021-07-14 20:42:19 +00:00
|
|
|
self._hashes_ordered = []
|
2016-01-20 23:57:33 +00:00
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
self._hashes_to_singleton_media = {}
|
|
|
|
self._hashes_to_collected_media = {}
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
self._media_sort = MediaSort( ( 'system', CC.SORT_FILES_BY_FILESIZE ), CC.SORT_ASC )
|
2019-08-21 21:34:01 +00:00
|
|
|
self._media_collect = MediaCollect()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2015-03-25 22:04:19 +00:00
|
|
|
self._sorted_media = SortedList( [ self._GenerateMediaSingleton( media_result ) for media_result in media_results ] )
|
2020-01-02 03:05:35 +00:00
|
|
|
self._selected_media = set()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
self._singleton_media = set( self._sorted_media )
|
|
|
|
self._collected_media = set()
|
|
|
|
|
2016-01-06 21:17:20 +00:00
|
|
|
self._RecalcHashes()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-03-22 22:38:15 +00:00
|
|
|
def __len__( self ):
|
|
|
|
|
|
|
|
return len( self._singleton_media ) + sum( map( len, self._collected_media ) )
|
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
def _CalculateCollectionKeysToMedias( self, media_collect, medias ):
|
2017-05-24 20:28:24 +00:00
|
|
|
|
|
|
|
keys_to_medias = collections.defaultdict( list )
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
namespaces_to_collect_by = list( media_collect.namespaces )
|
|
|
|
ratings_to_collect_by = list( media_collect.rating_service_keys )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
for media in medias:
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
if len( namespaces_to_collect_by ) > 0:
|
|
|
|
|
2020-10-21 22:22:10 +00:00
|
|
|
namespace_key = media.GetTagsManager().GetNamespaceSlice( namespaces_to_collect_by, ClientTags.TAG_DISPLAY_ACTUAL )
|
2016-06-15 18:59:44 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
namespace_key = frozenset()
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
if len( ratings_to_collect_by ) > 0:
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
rating_key = media.GetRatingsManager().GetRatingSlice( ratings_to_collect_by )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
else:
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
rating_key = frozenset()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
keys_to_medias[ ( namespace_key, rating_key ) ].append( media )
|
|
|
|
|
|
|
|
|
|
|
|
return keys_to_medias
|
|
|
|
|
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
def _GenerateMediaCollection( self, media_results ):
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
return MediaCollection( self._location_context, media_results )
|
2017-09-20 19:47:31 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
def _GenerateMediaSingleton( self, media_result ):
|
|
|
|
|
|
|
|
return MediaSingleton( media_result )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
def _GetFirst( self ):
|
|
|
|
|
|
|
|
if len( self._sorted_media ) > 0:
|
|
|
|
|
|
|
|
return self._sorted_media[ 0 ]
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
def _GetLast( self ):
|
|
|
|
|
|
|
|
if len( self._sorted_media ) > 0:
|
|
|
|
|
|
|
|
return self._sorted_media[ -1 ]
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def _GetMedia( self, hashes, discriminator = None ):
|
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
if hashes.isdisjoint( self._hashes ):
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
medias = []
|
|
|
|
|
|
|
|
if discriminator is None or discriminator == 'singletons':
|
|
|
|
|
|
|
|
medias.extend( ( self._hashes_to_singleton_media[ hash ] for hash in hashes if hash in self._hashes_to_singleton_media ) )
|
|
|
|
|
|
|
|
|
|
|
|
if discriminator is None or discriminator == 'collections':
|
|
|
|
|
|
|
|
medias.extend( { self._hashes_to_collected_media[ hash ] for hash in hashes if hash in self._hashes_to_collected_media } )
|
|
|
|
|
|
|
|
|
|
|
|
return medias
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def _GetNext( self, media ):
|
|
|
|
|
2017-11-29 21:48:23 +00:00
|
|
|
if media is None:
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
next_index = self._sorted_media.index( media ) + 1
|
|
|
|
|
2017-11-29 21:48:23 +00:00
|
|
|
if next_index == len( self._sorted_media ):
|
|
|
|
|
|
|
|
return self._GetFirst()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return self._sorted_media[ next_index ]
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _GetPrevious( self, media ):
|
|
|
|
|
|
|
|
if media is None: return None
|
|
|
|
|
|
|
|
previous_index = self._sorted_media.index( media ) - 1
|
|
|
|
|
2018-05-30 20:13:21 +00:00
|
|
|
if previous_index == -1:
|
|
|
|
|
|
|
|
return self._GetLast()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return self._sorted_media[ previous_index ]
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
def _HasHashes( self, hashes ):
|
2016-07-27 21:53:34 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
for hash in hashes:
|
2016-07-27 21:53:34 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
if hash in self._hashes:
|
2017-01-25 22:56:55 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
return True
|
2017-01-25 22:56:55 +00:00
|
|
|
|
2016-07-27 21:53:34 +00:00
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
return False
|
2017-08-02 21:32:54 +00:00
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def _RecalcAfterContentUpdates( self, service_keys_to_content_updates ):
|
|
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2021-05-12 20:49:20 +00:00
|
|
|
def _RecalcAfterMediaRemove( self ):
|
|
|
|
|
|
|
|
self._RecalcHashes()
|
|
|
|
|
|
|
|
|
2016-01-06 21:17:20 +00:00
|
|
|
def _RecalcHashes( self ):
|
|
|
|
|
|
|
|
self._hashes = set()
|
2021-07-14 20:42:19 +00:00
|
|
|
self._hashes_ordered = []
|
2016-01-06 21:17:20 +00:00
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
self._hashes_to_singleton_media = {}
|
|
|
|
self._hashes_to_collected_media = {}
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
for m in self._sorted_media:
|
2017-09-20 19:47:31 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
if isinstance( m, MediaCollection ):
|
2017-09-20 19:47:31 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
hashes = m.GetHashes( ordered = True )
|
|
|
|
|
|
|
|
self._hashes.update( hashes )
|
|
|
|
self._hashes_ordered.extend( hashes )
|
|
|
|
|
|
|
|
for hash in hashes:
|
|
|
|
|
|
|
|
self._hashes_to_collected_media[ hash ] = m
|
|
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
hash = m.GetHash()
|
|
|
|
|
|
|
|
self._hashes.add( hash )
|
|
|
|
self._hashes_ordered.append( hash )
|
|
|
|
|
|
|
|
self._hashes_to_singleton_media[ hash ] = m
|
2017-09-20 19:47:31 +00:00
|
|
|
|
2016-01-13 22:08:19 +00:00
|
|
|
|
2016-01-06 21:17:20 +00:00
|
|
|
|
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
def _RemoveMediaByHashes( self, hashes ):
|
|
|
|
|
|
|
|
if not isinstance( hashes, set ):
|
|
|
|
|
|
|
|
hashes = set( hashes )
|
|
|
|
|
|
|
|
|
|
|
|
affected_singleton_media = self._GetMedia( hashes, discriminator = 'singletons' )
|
|
|
|
|
|
|
|
for media in self._collected_media:
|
|
|
|
|
|
|
|
media._RemoveMediaByHashes( hashes )
|
|
|
|
|
|
|
|
|
|
|
|
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
|
|
|
|
|
|
|
|
self._RemoveMediaDirectly( affected_singleton_media, affected_collected_media )
|
|
|
|
|
|
|
|
|
|
|
|
def _RemoveMediaDirectly( self, singleton_media, collected_media ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-01-13 22:08:19 +00:00
|
|
|
if not isinstance( singleton_media, set ):
|
|
|
|
|
|
|
|
singleton_media = set( singleton_media )
|
|
|
|
|
|
|
|
|
|
|
|
if not isinstance( collected_media, set ):
|
|
|
|
|
|
|
|
collected_media = set( collected_media )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
self._singleton_media.difference_update( singleton_media )
|
|
|
|
self._collected_media.difference_update( collected_media )
|
|
|
|
|
|
|
|
self._sorted_media.remove_items( singleton_media.union( collected_media ) )
|
|
|
|
|
2021-05-12 20:49:20 +00:00
|
|
|
self._RecalcAfterMediaRemove()
|
2016-01-13 22:08:19 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
def AddMedia( self, new_media ):
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
new_media = FlattenMedia( new_media )
|
|
|
|
|
|
|
|
addable_media = []
|
|
|
|
|
|
|
|
for media in new_media:
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
hash = media.GetHash()
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
if hash in self._hashes:
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
continue
|
2015-12-09 23:16:41 +00:00
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
addable_media.append( media )
|
|
|
|
|
|
|
|
self._hashes.add( hash )
|
2021-07-14 20:42:19 +00:00
|
|
|
self._hashes_ordered.append( hash )
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
self._hashes_to_singleton_media[ hash ] = media
|
2017-09-20 19:47:31 +00:00
|
|
|
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
self._singleton_media.update( addable_media )
|
|
|
|
self._sorted_media.append_items( addable_media )
|
|
|
|
|
2015-12-09 23:16:41 +00:00
|
|
|
return new_media
|
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
def Collect( self, media_collect = None ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
if media_collect == None:
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
media_collect = self._media_collect
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
self._media_collect = media_collect
|
|
|
|
|
|
|
|
flat_media = list( self._singleton_media )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-05-24 20:28:24 +00:00
|
|
|
for media in self._collected_media:
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
flat_media.extend( [ self._GenerateMediaSingleton( media_result ) for media_result in media.GenerateMediaResults() ] )
|
2017-05-24 20:28:24 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
if self._media_collect.DoesACollect():
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
keys_to_medias = self._CalculateCollectionKeysToMedias( media_collect, flat_media )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
# add an option here I think, to media_collect to say if collections with one item should be singletons or not
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
self._singleton_media = set()#{ medias[0] for ( key, medias ) in keys_to_medias.items() if len( medias ) == 1 }
|
|
|
|
|
|
|
|
if not self._media_collect.collect_unmatched:
|
|
|
|
|
|
|
|
unmatched_key = ( frozenset(), frozenset() )
|
|
|
|
|
|
|
|
if unmatched_key in keys_to_medias:
|
|
|
|
|
|
|
|
unmatched_medias = keys_to_medias[ unmatched_key ]
|
|
|
|
|
|
|
|
self._singleton_media.update( unmatched_medias )
|
|
|
|
|
|
|
|
del keys_to_medias[ unmatched_key ]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self._collected_media = { self._GenerateMediaCollection( [ media.GetMediaResult() for media in medias ] ) for ( key, medias ) in keys_to_medias.items() }# if len( medias ) > 1 }
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
self._singleton_media = set( flat_media )
|
|
|
|
|
|
|
|
self._collected_media = set()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2015-03-25 22:04:19 +00:00
|
|
|
self._sorted_media = SortedList( list( self._singleton_media ) + list( self._collected_media ) )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
self._RecalcHashes()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def DeletePending( self, service_key ):
|
|
|
|
|
2019-07-17 22:10:19 +00:00
|
|
|
for media in self._collected_media:
|
|
|
|
|
|
|
|
media.DeletePending( service_key )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
def GetFilteredFileCount( self, file_filter ):
|
|
|
|
|
|
|
|
if file_filter.filter_type == FILE_FILTER_ALL:
|
|
|
|
|
|
|
|
return self.GetNumFiles()
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_SELECTED:
|
|
|
|
|
|
|
|
return sum( ( m.GetNumFiles() for m in self._selected_media ) )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_NOT_SELECTED:
|
|
|
|
|
|
|
|
return self.GetNumFiles() - sum( ( m.GetNumFiles() for m in self._selected_media ) )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_NONE:
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_INBOX:
|
|
|
|
|
|
|
|
return sum( ( m.GetNumInbox() for m in self._selected_media ) )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_ARCHIVE:
|
|
|
|
|
|
|
|
return self.GetNumFiles() - sum( ( m.GetNumInbox() for m in self._selected_media ) )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
flat_media = self.GetFlatMedia()
|
|
|
|
|
|
|
|
if file_filter.filter_type == FILE_FILTER_FILE_SERVICE:
|
|
|
|
|
|
|
|
file_service_key = file_filter.filter_data
|
|
|
|
|
|
|
|
return sum( ( 1 for m in flat_media if file_service_key in m.GetLocationsManager().GetCurrent() ) )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_LOCAL:
|
|
|
|
|
|
|
|
return sum( ( 1 for m in flat_media if m.GetLocationsManager().IsLocal() ) )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_REMOTE:
|
|
|
|
|
|
|
|
return sum( ( 1 for m in flat_media if m.GetLocationsManager().IsRemote() ) )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_TAGS:
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
( tag_service_key, and_or_or, select_tags ) = file_filter.filter_data
|
2020-01-22 21:04:43 +00:00
|
|
|
|
|
|
|
if and_or_or == 'AND':
|
|
|
|
|
2020-10-28 22:20:33 +00:00
|
|
|
select_tags = set( select_tags )
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
return sum( ( 1 for m in flat_media if select_tags.issubset( m.GetTagsManager().GetCurrentAndPending( tag_service_key, ClientTags.TAG_DISPLAY_ACTUAL ) ) ) )
|
2020-01-22 21:04:43 +00:00
|
|
|
|
|
|
|
elif and_or_or == 'OR':
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
return sum( ( 1 for m in flat_media if HydrusData.SetsIntersect( m.GetTagsManager().GetCurrentAndPending( tag_service_key, ClientTags.TAG_DISPLAY_ACTUAL ), select_tags ) ) )
|
2020-01-22 21:04:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
def GetFilteredHashes( self, file_filter ):
|
|
|
|
|
|
|
|
if file_filter.filter_type == FILE_FILTER_ALL:
|
|
|
|
|
|
|
|
return self._hashes
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_SELECTED:
|
|
|
|
|
|
|
|
hashes = set()
|
|
|
|
|
|
|
|
for m in self._selected_media:
|
|
|
|
|
|
|
|
hashes.update( m.GetHashes() )
|
|
|
|
|
|
|
|
|
|
|
|
return hashes
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_NOT_SELECTED:
|
|
|
|
|
|
|
|
hashes = set()
|
|
|
|
|
|
|
|
for m in self._sorted_media:
|
|
|
|
|
|
|
|
if m not in self._selected_media:
|
|
|
|
|
|
|
|
hashes.update( m.GetHashes() )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return hashes
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_NONE:
|
|
|
|
|
|
|
|
return set()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
flat_media = self.GetFlatMedia()
|
|
|
|
|
|
|
|
if file_filter.filter_type == FILE_FILTER_INBOX:
|
|
|
|
|
|
|
|
filtered_media = [ m for m in flat_media if m.HasInbox() ]
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_ARCHIVE:
|
|
|
|
|
|
|
|
filtered_media = [ m for m in flat_media if not m.HasInbox() ]
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_FILE_SERVICE:
|
|
|
|
|
|
|
|
file_service_key = file_filter.filter_data
|
|
|
|
|
|
|
|
filtered_media = [ m for m in flat_media if file_service_key in m.GetLocationsManager().GetCurrent() ]
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_LOCAL:
|
|
|
|
|
|
|
|
filtered_media = [ m for m in flat_media if m.GetLocationsManager().IsLocal() ]
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_REMOTE:
|
|
|
|
|
|
|
|
filtered_media = [ m for m in flat_media if m.GetLocationsManager().IsRemote() ]
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_TAGS:
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
( tag_service_key, and_or_or, select_tags ) = file_filter.filter_data
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
if and_or_or == 'AND':
|
|
|
|
|
2020-10-28 22:20:33 +00:00
|
|
|
select_tags = set( select_tags )
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
filtered_media = [ m for m in flat_media if select_tags.issubset( m.GetTagsManager().GetCurrentAndPending( tag_service_key, ClientTags.TAG_DISPLAY_ACTUAL ) ) ]
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
elif and_or_or == 'OR':
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
filtered_media = [ m for m in flat_media if HydrusData.SetsIntersect( m.GetTagsManager().GetCurrentAndPending( tag_service_key, ClientTags.TAG_DISPLAY_ACTUAL ), select_tags ) ]
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
hashes = { m.GetHash() for m in filtered_media }
|
|
|
|
|
|
|
|
return hashes
|
|
|
|
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
return set()
|
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
def GetFilteredMedia( self, file_filter ):
|
|
|
|
|
|
|
|
if file_filter.filter_type == FILE_FILTER_ALL:
|
|
|
|
|
|
|
|
return set( self._sorted_media )
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_SELECTED:
|
|
|
|
|
|
|
|
return self._selected_media
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_NOT_SELECTED:
|
|
|
|
|
|
|
|
return { m for m in self._sorted_media if m not in self._selected_media }
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_NONE:
|
|
|
|
|
|
|
|
return set()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
if file_filter.filter_type == FILE_FILTER_INBOX:
|
|
|
|
|
|
|
|
filtered_media = { m for m in self._sorted_media if m.HasInbox() }
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_ARCHIVE:
|
|
|
|
|
|
|
|
filtered_media = { m for m in self._sorted_media if not m.HasInbox() }
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_FILE_SERVICE:
|
|
|
|
|
|
|
|
file_service_key = file_filter.filter_data
|
|
|
|
|
|
|
|
filtered_media = { m for m in self._sorted_media if file_service_key in m.GetLocationsManager().GetCurrent() }
|
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_LOCAL:
|
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
filtered_media = { m for m in self._sorted_media if m.GetLocationsManager().IsLocal() }
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_REMOTE:
|
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
filtered_media = { m for m in self._sorted_media if m.GetLocationsManager().IsRemote() }
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
elif file_filter.filter_type == FILE_FILTER_TAGS:
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
( tag_service_key, and_or_or, select_tags ) = file_filter.filter_data
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
if and_or_or == 'AND':
|
|
|
|
|
2020-10-28 22:20:33 +00:00
|
|
|
select_tags = set( select_tags )
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
filtered_media = { m for m in self._sorted_media if select_tags.issubset( m.GetTagsManager().GetCurrentAndPending( tag_service_key, ClientTags.TAG_DISPLAY_ACTUAL ) ) }
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
elif and_or_or == 'OR':
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
filtered_media = { m for m in self._sorted_media if HydrusData.SetsIntersect( m.GetTagsManager().GetCurrentAndPending( tag_service_key, ClientTags.TAG_DISPLAY_ACTUAL ), select_tags ) }
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return filtered_media
|
|
|
|
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
return set()
|
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
|
2015-08-19 21:48:21 +00:00
|
|
|
def GenerateMediaResults( self, has_location = None, discriminant = None, selected_media = None, unrated = None, for_media_viewer = False ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
media_results = []
|
|
|
|
|
|
|
|
for media in self._sorted_media:
|
|
|
|
|
2015-07-08 21:45:38 +00:00
|
|
|
if has_location is not None:
|
|
|
|
|
|
|
|
locations_manager = media.GetLocationsManager()
|
|
|
|
|
|
|
|
if has_location not in locations_manager.GetCurrent():
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-12-28 22:24:52 +00:00
|
|
|
if selected_media is not None and media not in selected_media:
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-08-17 20:07:22 +00:00
|
|
|
if media.IsCollection():
|
|
|
|
|
2018-04-05 01:22:26 +00:00
|
|
|
# don't include selected_media here as it is not valid at the deeper collection level
|
|
|
|
|
|
|
|
media_results.extend( media.GenerateMediaResults( has_location = has_location, discriminant = discriminant, unrated = unrated, for_media_viewer = True ) )
|
2016-08-17 20:07:22 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
if discriminant is not None:
|
2015-07-15 20:28:26 +00:00
|
|
|
|
2015-07-22 19:40:39 +00:00
|
|
|
locations_manager = media.GetLocationsManager()
|
|
|
|
|
2016-12-28 22:24:52 +00:00
|
|
|
if discriminant == CC.DISCRIMINANT_INBOX:
|
|
|
|
|
|
|
|
p = media.HasInbox()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_ARCHIVE:
|
|
|
|
|
|
|
|
p = not media.HasInbox()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_LOCAL:
|
|
|
|
|
|
|
|
p = locations_manager.IsLocal()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_LOCAL_BUT_NOT_IN_TRASH:
|
|
|
|
|
|
|
|
p = locations_manager.IsLocal() and not locations_manager.IsTrashed()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_NOT_LOCAL:
|
|
|
|
|
|
|
|
p = not locations_manager.IsLocal()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_DOWNLOADING:
|
|
|
|
|
|
|
|
p = locations_manager.IsDownloading()
|
|
|
|
|
2015-07-15 20:28:26 +00:00
|
|
|
|
2016-12-28 22:24:52 +00:00
|
|
|
if not p:
|
2016-12-21 22:30:54 +00:00
|
|
|
|
|
|
|
continue
|
|
|
|
|
2015-07-15 20:28:26 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
if unrated is not None:
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
ratings_manager = media.GetRatingsManager()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
if ratings_manager.GetRating( unrated ) is not None:
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2015-08-19 21:48:21 +00:00
|
|
|
if for_media_viewer:
|
|
|
|
|
2017-12-06 22:06:56 +00:00
|
|
|
new_options = HG.client_controller.new_options
|
2016-08-17 20:07:22 +00:00
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
( media_show_action, media_start_paused, media_start_with_embed ) = new_options.GetMediaShowAction( media.GetMime() )
|
2016-08-17 20:07:22 +00:00
|
|
|
|
2017-03-08 23:23:12 +00:00
|
|
|
if media_show_action in ( CC.MEDIA_VIEWER_ACTION_DO_NOT_SHOW_ON_ACTIVATION_OPEN_EXTERNALLY, CC.MEDIA_VIEWER_ACTION_DO_NOT_SHOW ):
|
2015-08-19 21:48:21 +00:00
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
media_results.append( media.GetMediaResult() )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return media_results
|
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
def GetAPIInfoDict( self, simple ):
|
|
|
|
|
|
|
|
d = {}
|
|
|
|
|
|
|
|
d[ 'num_files' ] = self.GetNumFiles()
|
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
flat_media = self.GetFlatMedia()
|
|
|
|
|
|
|
|
d[ 'hash_ids' ] = [ m.GetMediaResult().GetHashId() for m in flat_media ]
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
if not simple:
|
|
|
|
|
|
|
|
hashes = self.GetHashes( ordered = True )
|
|
|
|
|
|
|
|
d[ 'hashes' ] = [ hash.hex() for hash in hashes ]
|
|
|
|
|
|
|
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
2017-03-22 22:38:15 +00:00
|
|
|
def GetFirst( self ):
|
|
|
|
|
|
|
|
return self._GetFirst()
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
def GetFlatMedia( self ):
|
|
|
|
|
|
|
|
flat_media = []
|
|
|
|
|
|
|
|
for media in self._sorted_media:
|
|
|
|
|
2017-05-31 21:50:53 +00:00
|
|
|
if media.IsCollection():
|
|
|
|
|
|
|
|
flat_media.extend( media.GetFlatMedia() )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
flat_media.append( media )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
return flat_media
|
|
|
|
|
|
|
|
|
2018-05-30 20:13:21 +00:00
|
|
|
def GetHashes( self, has_location = None, discriminant = None, not_uploaded_to = None, ordered = False ):
|
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
if has_location is None and discriminant is None and not_uploaded_to is None:
|
2018-05-30 20:13:21 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
if ordered:
|
|
|
|
|
|
|
|
return self._hashes_ordered
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return self._hashes
|
|
|
|
|
2018-05-30 20:13:21 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
if ordered:
|
|
|
|
|
|
|
|
result = []
|
|
|
|
|
|
|
|
for media in self._sorted_media:
|
|
|
|
|
|
|
|
result.extend( media.GetHashes( has_location, discriminant, not_uploaded_to, ordered ) )
|
|
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
result = set()
|
|
|
|
|
|
|
|
for media in self._sorted_media:
|
|
|
|
|
|
|
|
result.update( media.GetHashes( has_location, discriminant, not_uploaded_to, ordered ) )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-03-22 22:38:15 +00:00
|
|
|
def GetLast( self ):
|
|
|
|
|
|
|
|
return self._GetLast()
|
|
|
|
|
|
|
|
|
2017-08-02 21:32:54 +00:00
|
|
|
def GetMediaIndex( self, media ):
|
|
|
|
|
|
|
|
return self._sorted_media.index( media )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-03-22 22:38:15 +00:00
|
|
|
def GetNext( self, media ):
|
|
|
|
|
|
|
|
return self._GetNext( media )
|
|
|
|
|
|
|
|
|
2019-09-05 00:05:32 +00:00
|
|
|
def GetNumArchive( self ):
|
|
|
|
|
|
|
|
num_archive = sum( ( 1 for m in self._singleton_media if not m.HasInbox() ) ) + sum( ( m.GetNumArchive() for m in self._collected_media ) )
|
|
|
|
|
|
|
|
return num_archive
|
|
|
|
|
|
|
|
|
2017-08-02 21:32:54 +00:00
|
|
|
def GetNumFiles( self ):
|
|
|
|
|
|
|
|
return len( self._hashes )
|
|
|
|
|
|
|
|
|
2019-09-05 00:05:32 +00:00
|
|
|
def GetNumInbox( self ):
|
|
|
|
|
|
|
|
num_inbox = sum( ( 1 for m in self._singleton_media if m.HasInbox() ) ) + sum( ( m.GetNumInbox() for m in self._collected_media ) )
|
|
|
|
|
|
|
|
return num_inbox
|
|
|
|
|
|
|
|
|
2017-03-22 22:38:15 +00:00
|
|
|
def GetPrevious( self, media ):
|
|
|
|
|
|
|
|
return self._GetPrevious( media )
|
|
|
|
|
|
|
|
|
2017-08-02 21:32:54 +00:00
|
|
|
def GetSortedMedia( self ):
|
|
|
|
|
|
|
|
return self._sorted_media
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
def HasAnyOfTheseHashes( self, hashes: set ):
|
2017-09-20 19:47:31 +00:00
|
|
|
|
|
|
|
return not hashes.isdisjoint( self._hashes )
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
def HasMedia( self, media ):
|
|
|
|
|
2017-08-02 21:32:54 +00:00
|
|
|
if media is None:
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-08-02 21:32:54 +00:00
|
|
|
if media in self._singleton_media:
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
elif media in self._collected_media:
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
for media_collection in self._collected_media:
|
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
if media_collection.HasMedia( media ):
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-05-22 22:35:06 +00:00
|
|
|
def HasNoMedia( self ):
|
|
|
|
|
|
|
|
return len( self._sorted_media ) == 0
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def ProcessContentUpdates( self, full_service_keys_to_content_updates ):
|
|
|
|
|
|
|
|
service_keys_to_content_updates = FilterServiceKeysToContentUpdates( full_service_keys_to_content_updates, self._hashes )
|
|
|
|
|
|
|
|
if len( service_keys_to_content_updates ) == 0:
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
for m in self._collected_media:
|
2016-12-21 22:30:54 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
m.ProcessContentUpdates( service_keys_to_content_updates )
|
2016-12-21 22:30:54 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
for ( service_key, content_updates ) in service_keys_to_content_updates.items():
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
for content_update in content_updates:
|
2015-07-15 20:28:26 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
( data_type, action, row ) = content_update.ToTuple()
|
2017-01-04 22:48:23 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
hashes = content_update.GetHashes()
|
2017-01-04 22:48:23 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
if data_type == HC.CONTENT_TYPE_FILES:
|
2015-07-08 21:45:38 +00:00
|
|
|
|
2018-08-15 20:40:30 +00:00
|
|
|
if action == HC.CONTENT_UPDATE_DELETE:
|
|
|
|
|
|
|
|
local_file_domains = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) )
|
2022-01-19 21:28:59 +00:00
|
|
|
all_local_file_services = set( list( local_file_domains ) + [ CC.COMBINED_LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY ] )
|
2018-08-15 20:40:30 +00:00
|
|
|
|
2019-12-18 22:06:34 +00:00
|
|
|
#
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
physically_deleted = service_key == CC.COMBINED_LOCAL_FILE_SERVICE_KEY
|
2019-05-29 21:34:43 +00:00
|
|
|
trashed = service_key in local_file_domains
|
2022-01-19 21:28:59 +00:00
|
|
|
deleted_from_our_domain = self._location_context.IsOneDomain() and service_key in self._location_context.current_service_keys
|
2018-08-15 20:40:30 +00:00
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
our_view_is_all_local = self._location_context.IncludesCurrent() and not self._location_context.IncludesDeleted() and self._location_context.current_service_keys.issubset( all_local_file_services )
|
2018-08-15 20:40:30 +00:00
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
physically_deleted_and_local_view = physically_deleted and our_view_is_all_local
|
|
|
|
|
|
|
|
user_says_remove_and_trashed_from_non_trash_local_view = HC.options[ 'remove_trashed_files' ] and trashed and our_view_is_all_local and CC.TRASH_SERVICE_KEY not in self._location_context.current_service_keys
|
2018-08-15 20:40:30 +00:00
|
|
|
|
2019-05-29 21:34:43 +00:00
|
|
|
deleted_from_repo_and_repo_view = service_key not in all_local_file_services and deleted_from_our_domain
|
|
|
|
|
2019-12-18 22:06:34 +00:00
|
|
|
if physically_deleted_and_local_view or user_says_remove_and_trashed_from_non_trash_local_view or deleted_from_repo_and_repo_view:
|
2018-08-15 20:40:30 +00:00
|
|
|
|
|
|
|
self._RemoveMediaByHashes( hashes )
|
|
|
|
|
|
|
|
|
2015-07-08 21:45:38 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
self._RecalcAfterContentUpdates( service_keys_to_content_updates )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def ProcessServiceUpdates( self, service_keys_to_service_updates ):
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
for ( service_key, service_updates ) in service_keys_to_service_updates.items():
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
for service_update in service_updates:
|
|
|
|
|
|
|
|
( action, row ) = service_update.ToTuple()
|
|
|
|
|
2017-03-02 02:14:56 +00:00
|
|
|
if action == HC.SERVICE_UPDATE_DELETE_PENDING:
|
|
|
|
|
|
|
|
self.DeletePending( service_key )
|
|
|
|
|
|
|
|
elif action == HC.SERVICE_UPDATE_RESET:
|
|
|
|
|
|
|
|
self.ResetService( service_key )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def ResetService( self, service_key ):
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
if self._location_context.IsOneDomain() and service_key in self._location_context.current_service_keys:
|
2016-01-06 21:17:20 +00:00
|
|
|
|
2017-05-17 21:53:02 +00:00
|
|
|
self._RemoveMediaDirectly( self._singleton_media, self._collected_media )
|
2016-01-06 21:17:20 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
for media in self._collected_media: media.ResetService( service_key )
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
def Sort( self, media_sort = None ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-07-27 21:53:34 +00:00
|
|
|
for media in self._collected_media:
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
media.Sort( media_sort )
|
2016-07-27 21:53:34 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
if media_sort is None:
|
2016-07-27 21:53:34 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
media_sort = self._media_sort
|
2016-07-27 21:53:34 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
self._media_sort = media_sort
|
2016-08-03 22:15:54 +00:00
|
|
|
|
2017-12-06 22:06:56 +00:00
|
|
|
media_sort_fallback = HG.client_controller.new_options.GetFallbackSort()
|
2016-08-03 22:15:54 +00:00
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
media_sort_fallback.Sort( self._location_context, self._sorted_media )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-07-27 21:53:34 +00:00
|
|
|
# this is a stable sort, so the fallback order above will remain for equal items
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
self._media_sort.Sort( self._location_context, self._sorted_media )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2021-08-18 21:10:01 +00:00
|
|
|
self._RecalcHashes()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
FILE_FILTER_ALL = 0
|
|
|
|
FILE_FILTER_NOT_SELECTED = 1
|
|
|
|
FILE_FILTER_NONE = 2
|
|
|
|
FILE_FILTER_INBOX = 3
|
|
|
|
FILE_FILTER_ARCHIVE = 4
|
|
|
|
FILE_FILTER_FILE_SERVICE = 5
|
|
|
|
FILE_FILTER_LOCAL = 6
|
|
|
|
FILE_FILTER_REMOTE = 7
|
|
|
|
FILE_FILTER_TAGS = 8
|
|
|
|
FILE_FILTER_SELECTED = 9
|
2020-01-16 02:08:23 +00:00
|
|
|
FILE_FILTER_MIME = 10
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
file_filter_str_lookup = {}
|
|
|
|
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_ALL ] = 'all'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_NOT_SELECTED ] = 'not selected'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_SELECTED ] = 'selected'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_NONE ] = 'none'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_INBOX ] = 'inbox'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_ARCHIVE ] = 'archive'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_FILE_SERVICE ] = 'file service'
|
|
|
|
file_filter_str_lookup[ FILE_FILTER_LOCAL ] = 'local'
|
2021-05-19 21:30:28 +00:00
|
|
|
file_filter_str_lookup[ FILE_FILTER_REMOTE ] = 'not local'
|
2020-01-02 03:05:35 +00:00
|
|
|
file_filter_str_lookup[ FILE_FILTER_TAGS ] = 'tags'
|
2020-01-16 02:08:23 +00:00
|
|
|
file_filter_str_lookup[ FILE_FILTER_MIME ] = 'filetype'
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
class FileFilter( object ):
|
|
|
|
|
|
|
|
def __init__( self, filter_type, filter_data = None ):
|
|
|
|
|
|
|
|
self.filter_type = filter_type
|
|
|
|
self.filter_data = filter_data
|
|
|
|
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
def __eq__( self, other ):
|
|
|
|
|
|
|
|
if isinstance( other, FileFilter ):
|
|
|
|
|
|
|
|
return self.__hash__() == other.__hash__()
|
|
|
|
|
|
|
|
|
|
|
|
return NotImplemented
|
|
|
|
|
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
def __hash__( self ):
|
|
|
|
|
|
|
|
if self.filter_data is None:
|
|
|
|
|
|
|
|
return self.filter_type.__hash__()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return ( self.filter_type, self.filter_data ).__hash__()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def PopulateFilterCounts( self, media_list: MediaList, filter_counts: dict ):
|
|
|
|
|
|
|
|
if self not in filter_counts:
|
|
|
|
|
|
|
|
if self.filter_type == FILE_FILTER_NONE:
|
|
|
|
|
|
|
|
filter_counts[ self ] = 0
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
quick_inverse_lookups= {}
|
2020-01-02 03:05:35 +00:00
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
quick_inverse_lookups[ FileFilter( FILE_FILTER_INBOX ) ] = FileFilter( FILE_FILTER_ARCHIVE )
|
|
|
|
quick_inverse_lookups[ FileFilter( FILE_FILTER_ARCHIVE ) ] = FileFilter( FILE_FILTER_INBOX )
|
|
|
|
quick_inverse_lookups[ FileFilter( FILE_FILTER_SELECTED ) ] = FileFilter( FILE_FILTER_NOT_SELECTED )
|
|
|
|
quick_inverse_lookups[ FileFilter( FILE_FILTER_NOT_SELECTED ) ] = FileFilter( FILE_FILTER_SELECTED )
|
|
|
|
quick_inverse_lookups[ FileFilter( FILE_FILTER_LOCAL ) ] = FileFilter( FILE_FILTER_REMOTE )
|
|
|
|
quick_inverse_lookups[ FileFilter( FILE_FILTER_REMOTE ) ] = FileFilter( FILE_FILTER_LOCAL )
|
2020-01-02 03:05:35 +00:00
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
if self in quick_inverse_lookups:
|
2020-01-02 03:05:35 +00:00
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
inverse = quick_inverse_lookups[ self ]
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
all_filter = FileFilter( FILE_FILTER_ALL )
|
|
|
|
|
|
|
|
if all_filter in filter_counts and inverse in filter_counts:
|
|
|
|
|
|
|
|
filter_counts[ self ] = filter_counts[ all_filter ] - filter_counts[ inverse ]
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
count = media_list.GetFilteredFileCount( self )
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
filter_counts[ self ] = count
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def GetCount( self, media_list: MediaList, filter_counts: dict ):
|
|
|
|
|
|
|
|
self.PopulateFilterCounts( media_list, filter_counts )
|
|
|
|
|
|
|
|
return filter_counts[ self ]
|
|
|
|
|
|
|
|
|
|
|
|
def ToString( self, media_list: MediaList, filter_counts: dict ):
|
|
|
|
|
|
|
|
if self.filter_type == FILE_FILTER_FILE_SERVICE:
|
|
|
|
|
|
|
|
file_service_key = self.filter_data
|
|
|
|
|
|
|
|
s = HG.client_controller.services_manager.GetName( file_service_key )
|
|
|
|
|
|
|
|
elif self.filter_type == FILE_FILTER_TAGS:
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
( tag_service_key, and_or_or, select_tags ) = self.filter_data
|
2020-01-02 03:05:35 +00:00
|
|
|
|
|
|
|
s = and_or_or.join( select_tags )
|
|
|
|
|
2021-11-03 20:49:56 +00:00
|
|
|
if tag_service_key != CC.COMBINED_TAG_SERVICE_KEY:
|
|
|
|
|
|
|
|
s = '{} on {}'.format( s, HG.client_controller.services_manager.GetName( tag_service_key ) )
|
|
|
|
|
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
s = HydrusText.ElideText( s, 64 )
|
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
elif self.filter_type == FILE_FILTER_MIME:
|
|
|
|
|
|
|
|
mime = self.filter_data
|
|
|
|
|
|
|
|
s = HC.mime_string_lookup[ mime ]
|
|
|
|
|
2020-01-02 03:05:35 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
s = file_filter_str_lookup[ self.filter_type ]
|
|
|
|
|
|
|
|
|
|
|
|
self.PopulateFilterCounts( media_list, filter_counts )
|
|
|
|
|
|
|
|
my_count = filter_counts[ self ]
|
|
|
|
|
|
|
|
s += ' ({})'.format( HydrusData.ToHumanInt( my_count ) )
|
|
|
|
|
|
|
|
if self.filter_type == FILE_FILTER_ALL:
|
|
|
|
|
|
|
|
inbox_filter = FileFilter( FILE_FILTER_INBOX )
|
|
|
|
archive_filter = FileFilter( FILE_FILTER_ARCHIVE )
|
|
|
|
|
|
|
|
inbox_filter.PopulateFilterCounts( media_list, filter_counts )
|
|
|
|
archive_filter.PopulateFilterCounts( media_list, filter_counts )
|
|
|
|
|
|
|
|
inbox_count = filter_counts[ inbox_filter ]
|
|
|
|
|
|
|
|
if inbox_count > 0 and inbox_count == my_count:
|
|
|
|
|
|
|
|
s += ' (all in inbox)'
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
archive_count = filter_counts[ archive_filter ]
|
|
|
|
|
|
|
|
if archive_count > 0 and archive_count == my_count:
|
|
|
|
|
|
|
|
s += ' (all in archive)'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
class ListeningMediaList( MediaList ):
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
def __init__( self, location_context: ClientLocation.LocationContext, media_results ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
MediaList.__init__( self, location_context, media_results )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-05-10 21:33:58 +00:00
|
|
|
HG.client_controller.sub( self, 'ProcessContentUpdates', 'content_updates_gui' )
|
|
|
|
HG.client_controller.sub( self, 'ProcessServiceUpdates', 'service_updates_gui' )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
def AddMediaResults( self, media_results ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
new_media = []
|
|
|
|
|
|
|
|
for media_result in media_results:
|
|
|
|
|
|
|
|
hash = media_result.GetHash()
|
|
|
|
|
2016-01-06 21:17:20 +00:00
|
|
|
if hash in self._hashes:
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
new_media.append( self._GenerateMediaSingleton( media_result ) )
|
|
|
|
|
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
self.AddMedia( new_media )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
return new_media
|
|
|
|
|
|
|
|
|
|
|
|
class MediaCollection( MediaList, Media ):
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
def __init__( self, location_context: ClientLocation.LocationContext, media_results ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
# note for later: ideal here is to stop this multiple inheritance mess and instead have this be a media that *has* a list, not *is* a list
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
Media.__init__( self )
|
2022-01-19 21:28:59 +00:00
|
|
|
MediaList.__init__( self, location_context, media_results )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
self._archive = True
|
|
|
|
self._inbox = False
|
|
|
|
|
|
|
|
self._size = 0
|
|
|
|
self._size_definite = True
|
|
|
|
|
|
|
|
self._width = None
|
|
|
|
self._height = None
|
|
|
|
self._duration = None
|
|
|
|
self._num_frames = None
|
|
|
|
self._num_words = None
|
2019-09-11 21:51:09 +00:00
|
|
|
self._has_audio = None
|
2015-03-18 21:46:29 +00:00
|
|
|
self._tags_manager = None
|
|
|
|
self._locations_manager = None
|
2019-03-06 23:06:22 +00:00
|
|
|
self._file_viewing_stats_manager = None
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
self._internals_dirty = False
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def _RecalcAfterContentUpdates( self, service_keys_to_content_updates ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
archive_or_inbox = False
|
|
|
|
|
|
|
|
data_types = set()
|
|
|
|
|
|
|
|
for ( service_key, content_updates ) in service_keys_to_content_updates.items():
|
|
|
|
|
|
|
|
for content_update in content_updates:
|
|
|
|
|
|
|
|
data_type = content_update.GetDataType()
|
|
|
|
|
|
|
|
if data_type in ( HC.CONTENT_TYPE_URLS, HC.CONTENT_TYPE_NOTES ):
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
elif data_type == HC.CONTENT_TYPE_FILES:
|
|
|
|
|
|
|
|
action = content_update.GetAction()
|
|
|
|
|
|
|
|
if action in ( HC.CONTENT_UPDATE_ARCHIVE, HC.CONTENT_UPDATE_INBOX ):
|
|
|
|
|
|
|
|
archive_or_inbox = True
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
data_types.add( data_type )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if archive_or_inbox and data_types.issubset( {
|
|
|
|
HC.CONTENT_TYPE_RATINGS,
|
|
|
|
HC.CONTENT_TYPE_FILE_VIEWING_STATS,
|
|
|
|
HC.CONTENT_TYPE_MAPPINGS
|
|
|
|
}):
|
|
|
|
|
|
|
|
if archive_or_inbox:
|
|
|
|
|
|
|
|
self._RecalcArchiveInbox()
|
|
|
|
|
|
|
|
|
|
|
|
for data_type in data_types:
|
|
|
|
|
|
|
|
if data_type == HC.CONTENT_TYPE_RATINGS:
|
|
|
|
|
|
|
|
self._RecalcRatings()
|
|
|
|
|
|
|
|
elif data_type == HC.CONTENT_TYPE_FILE_VIEWING_STATS:
|
|
|
|
|
|
|
|
self._RecalcFileViewingStats()
|
|
|
|
|
|
|
|
elif data_type == HC.CONTENT_TYPE_MAPPINGS:
|
|
|
|
|
|
|
|
self._RecalcTags()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
elif len( data_types ) > 0:
|
|
|
|
|
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
|
|
|
|
2021-05-12 20:49:20 +00:00
|
|
|
def _RecalcAfterMediaRemove( self ):
|
|
|
|
|
|
|
|
MediaList._RecalcAfterMediaRemove( self )
|
|
|
|
|
|
|
|
self._RecalcArchiveInbox()
|
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def _RecalcArchiveInbox( self ):
|
2016-01-13 22:08:19 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
self._archive = True in ( media.HasArchive() for media in self._sorted_media )
|
|
|
|
self._inbox = True in ( media.HasInbox() for media in self._sorted_media )
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
|
|
|
|
def _RecalcFileViewingStats( self ):
|
|
|
|
|
2022-01-26 21:57:04 +00:00
|
|
|
self._file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateCombinedManager( [ m.GetFileViewingStatsManager() for m in self._sorted_media ] )
|
2021-04-28 21:43:16 +00:00
|
|
|
|
|
|
|
|
2021-05-12 20:49:20 +00:00
|
|
|
def _RecalcHashes( self ):
|
|
|
|
|
|
|
|
MediaList._RecalcHashes( self )
|
|
|
|
|
|
|
|
all_locations_managers = [ media.GetLocationsManager() for media in self._sorted_media ]
|
|
|
|
|
|
|
|
current_to_timestamps = {}
|
|
|
|
deleted_to_timestamps = {}
|
|
|
|
|
|
|
|
for service_key in HG.client_controller.services_manager.GetServiceKeys( HC.FILE_SERVICES ):
|
|
|
|
|
|
|
|
current_timestamps = [ timestamp for timestamp in ( locations_manager.GetCurrentTimestamp( service_key ) for locations_manager in all_locations_managers ) if timestamp is not None ]
|
|
|
|
|
|
|
|
if len( current_timestamps ) > 0:
|
|
|
|
|
|
|
|
current_to_timestamps[ service_key ] = max( current_timestamps )
|
|
|
|
|
|
|
|
|
|
|
|
deleted_timestamps = [ timestamps for timestamps in ( locations_manager.GetDeletedTimestamps( service_key ) for locations_manager in all_locations_managers ) if timestamps is not None and timestamps[0] is not None ]
|
|
|
|
|
|
|
|
if len( deleted_timestamps ) > 0:
|
|
|
|
|
|
|
|
deleted_to_timestamps[ service_key ] = max( deleted_timestamps, key = lambda ts: ts[0] )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pending = HydrusData.MassUnion( [ locations_manager.GetPending() for locations_manager in all_locations_managers ] )
|
|
|
|
petitioned = HydrusData.MassUnion( [ locations_manager.GetPetitioned() for locations_manager in all_locations_managers ] )
|
|
|
|
|
|
|
|
self._locations_manager = ClientMediaManagers.LocationsManager( current_to_timestamps, deleted_to_timestamps, pending, petitioned )
|
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def _RecalcInternals( self ):
|
|
|
|
|
|
|
|
self._RecalcHashes()
|
|
|
|
|
|
|
|
self._RecalcTags()
|
|
|
|
|
|
|
|
self._RecalcArchiveInbox()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
self._size = sum( [ media.GetSize() for media in self._sorted_media ] )
|
|
|
|
self._size_definite = not False in ( media.IsSizeDefinite() for media in self._sorted_media )
|
|
|
|
|
|
|
|
duration_sum = sum( [ media.GetDuration() for media in self._sorted_media if media.HasDuration() ] )
|
|
|
|
|
|
|
|
if duration_sum > 0: self._duration = duration_sum
|
|
|
|
else: self._duration = None
|
|
|
|
|
2019-09-11 21:51:09 +00:00
|
|
|
self._has_audio = True in ( media.HasAudio() for media in self._sorted_media )
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
self._has_notes = True in ( media.HasNotes() for media in self._sorted_media )
|
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
self._RecalcRatings()
|
|
|
|
self._RecalcFileViewingStats()
|
|
|
|
|
|
|
|
|
|
|
|
def _RecalcRatings( self ):
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
# horrible compromise
|
2016-06-15 18:59:44 +00:00
|
|
|
if len( self._sorted_media ) > 0:
|
|
|
|
|
|
|
|
self._ratings_manager = self._sorted_media[0].GetRatingsManager()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
self._ratings_manager = ClientMediaManagers.RatingsManager( {} )
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def _RecalcTags( self ):
|
2019-03-06 23:06:22 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
tags_managers = [ m.GetTagsManager() for m in self._sorted_media ]
|
2019-03-06 23:06:22 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
self._tags_manager = ClientMediaManagers.TagsManager.MergeTagsManagers( tags_managers )
|
2019-03-06 23:06:22 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
def AddMedia( self, new_media ):
|
2015-12-09 23:16:41 +00:00
|
|
|
|
2019-08-21 21:34:01 +00:00
|
|
|
MediaList.AddMedia( self, new_media )
|
2015-12-09 23:16:41 +00:00
|
|
|
|
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
def DeletePending( self, service_key ):
|
|
|
|
|
|
|
|
MediaList.DeletePending( self, service_key )
|
|
|
|
|
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
2021-05-12 20:49:20 +00:00
|
|
|
def GetCurrentTimestamp( self, service_key: bytes ) -> typing.Optional[ int ]:
|
|
|
|
|
|
|
|
return self._locations_manager.GetCurrentTimestamp( service_key )
|
|
|
|
|
|
|
|
|
|
|
|
def GetDeletedTimestamps( self, service_key: bytes ) -> typing.Tuple[ typing.Optional[ int ], typing.Optional[ int ] ]:
|
|
|
|
|
|
|
|
return self._locations_manager.GetDeletedTimestamps( service_key )
|
|
|
|
|
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
def GetDisplayMedia( self ):
|
|
|
|
|
2020-01-16 02:08:23 +00:00
|
|
|
first = self._GetFirst()
|
|
|
|
|
|
|
|
if first is None:
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return first.GetDisplayMedia()
|
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
def GetDuration( self ):
|
|
|
|
|
|
|
|
return self._duration
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-03-06 23:06:22 +00:00
|
|
|
def GetFileViewingStatsManager( self ):
|
|
|
|
|
|
|
|
return self._file_viewing_stats_manager
|
|
|
|
|
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
def GetHash( self ):
|
|
|
|
|
2020-04-01 21:51:42 +00:00
|
|
|
display_media = self.GetDisplayMedia()
|
|
|
|
|
|
|
|
if display_media is None:
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return display_media.GetHash()
|
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
def GetLocationsManager( self ):
|
|
|
|
|
|
|
|
return self._locations_manager
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
def GetMime( self ):
|
2016-01-06 21:17:20 +00:00
|
|
|
|
2019-11-14 03:56:30 +00:00
|
|
|
return HC.APPLICATION_HYDRUS_CLIENT_COLLECTION
|
2016-01-06 21:17:20 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-05-22 22:35:06 +00:00
|
|
|
def GetNumInbox( self ):
|
|
|
|
|
|
|
|
return sum( ( media.GetNumInbox() for media in self._sorted_media ) )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-05-22 22:35:06 +00:00
|
|
|
def GetNumFrames( self ):
|
|
|
|
|
|
|
|
num_frames = ( media.GetNumFrames() for media in self._sorted_media )
|
|
|
|
|
|
|
|
return sum( ( nf for nf in num_frames if nf is not None ) )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-05-22 22:35:06 +00:00
|
|
|
def GetNumWords( self ):
|
|
|
|
|
|
|
|
num_words = ( media.GetNumWords() for media in self._sorted_media )
|
|
|
|
|
|
|
|
return sum( ( nw for nw in num_words if nw is not None ) )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
def GetPrettyInfoLines( self ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-01-09 22:59:03 +00:00
|
|
|
size = HydrusData.ToHumanBytes( self._size )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
mime = HC.mime_string_lookup[ HC.APPLICATION_HYDRUS_CLIENT_COLLECTION ]
|
|
|
|
|
|
|
|
info_string = size + ' ' + mime
|
|
|
|
|
2018-07-04 20:48:28 +00:00
|
|
|
info_string += ' (' + HydrusData.ToHumanInt( self.GetNumFiles() ) + ' files)'
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
return [ info_string ]
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
def GetRatingsManager( self ):
|
|
|
|
|
|
|
|
return self._ratings_manager
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-05-22 22:35:06 +00:00
|
|
|
def GetResolution( self ):
|
|
|
|
|
|
|
|
if self._width is None:
|
|
|
|
|
|
|
|
return ( 0, 0 )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return ( self._width, self._height )
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def GetSingletonsTagsManagers( self ):
|
|
|
|
|
|
|
|
tags_managers = [ m.GetTagsManager() for m in self._singleton_media ]
|
|
|
|
|
|
|
|
for m in self._collected_media: tags_managers.extend( m.GetSingletonsTagsManagers() )
|
|
|
|
|
|
|
|
return tags_managers
|
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetSize( self ):
|
|
|
|
|
|
|
|
return self._size
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetTagsManager( self ):
|
|
|
|
|
|
|
|
return self._tags_manager
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasArchive( self ):
|
|
|
|
|
|
|
|
return self._archive
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-09-11 21:51:09 +00:00
|
|
|
def HasAudio( self ):
|
|
|
|
|
|
|
|
return self._has_audio
|
|
|
|
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasDuration( self ):
|
|
|
|
|
|
|
|
return self._duration is not None
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasImages( self ):
|
|
|
|
|
|
|
|
return True in ( media.HasImages() for media in self._sorted_media )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasInbox( self ):
|
|
|
|
|
|
|
|
return self._inbox
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasNotes( self ):
|
|
|
|
|
|
|
|
return self._has_notes
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def IsCollection( self ):
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def IsImage( self ):
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def IsSizeDefinite( self ):
|
|
|
|
|
|
|
|
return self._size_definite
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
def RecalcInternals( self ):
|
|
|
|
|
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
def ResetService( self, service_key ):
|
2017-12-13 22:33:07 +00:00
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
MediaList.ResetService( self, service_key )
|
2017-12-13 22:33:07 +00:00
|
|
|
|
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
def UpdateFileInfo( self, hashes_to_media_results ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
for media in self._sorted_media:
|
|
|
|
|
|
|
|
media.UpdateFileInfo( hashes_to_media_results )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
self._RecalcInternals()
|
|
|
|
|
|
|
|
|
|
|
|
class MediaSingleton( Media ):
|
|
|
|
|
2020-05-20 21:36:02 +00:00
|
|
|
def __init__( self, media_result: ClientMediaResult.MediaResult ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
Media.__init__( self )
|
|
|
|
|
|
|
|
self._media_result = media_result
|
|
|
|
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
def Duplicate( self ):
|
|
|
|
|
|
|
|
return MediaSingleton( self._media_result.Duplicate() )
|
|
|
|
|
|
|
|
|
2020-04-01 21:51:42 +00:00
|
|
|
def GetDisplayMedia( self ) -> 'MediaSingleton':
|
2017-09-20 19:47:31 +00:00
|
|
|
|
|
|
|
return self
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
def GetDuration( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetDuration()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-03-06 23:06:22 +00:00
|
|
|
def GetFileViewingStatsManager( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetFileViewingStatsManager()
|
|
|
|
|
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
def GetHash( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetHash()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-12-05 05:29:32 +00:00
|
|
|
def GetHashId( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetHashId()
|
|
|
|
|
|
|
|
|
2018-01-03 22:37:30 +00:00
|
|
|
def GetHashes( self, has_location = None, discriminant = None, not_uploaded_to = None, ordered = False ):
|
|
|
|
|
|
|
|
if self.MatchesDiscriminant( has_location = has_location, discriminant = discriminant, not_uploaded_to = not_uploaded_to ):
|
2015-09-16 18:11:00 +00:00
|
|
|
|
2018-01-03 22:37:30 +00:00
|
|
|
if ordered:
|
|
|
|
|
|
|
|
return [ self._media_result.GetHash() ]
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return { self._media_result.GetHash() }
|
|
|
|
|
2015-09-16 18:11:00 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
2018-01-03 22:37:30 +00:00
|
|
|
if ordered:
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return set()
|
|
|
|
|
2015-09-16 18:11:00 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
def GetLocationsManager( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetLocationsManager()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def GetMediaResult( self ): return self._media_result
|
|
|
|
|
|
|
|
def GetMime( self ): return self._media_result.GetMime()
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def GetNotesManager( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetNotesManager()
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
def GetNumFiles( self ): return 1
|
|
|
|
|
|
|
|
def GetNumFrames( self ): return self._media_result.GetNumFrames()
|
|
|
|
|
|
|
|
def GetNumInbox( self ):
|
|
|
|
|
|
|
|
if self.HasInbox(): return 1
|
|
|
|
else: return 0
|
2015-07-08 21:45:38 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def GetNumWords( self ): return self._media_result.GetNumWords()
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def GetCurrentTimestamp( self, service_key ) -> typing.Optional[ int ]:
|
|
|
|
|
|
|
|
return self._media_result.GetLocationsManager().GetCurrentTimestamp( service_key )
|
|
|
|
|
|
|
|
|
|
|
|
def GetDeletedTimestamps( self, service_key: bytes ) -> typing.Tuple[ typing.Optional[ int ], typing.Optional[ int ] ]:
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
return self._media_result.GetLocationsManager().GetDeletedTimestamps( service_key )
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
def GetPrettyInfoLines( self ):
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2017-05-31 21:50:53 +00:00
|
|
|
file_info_manager = self._media_result.GetFileInfoManager()
|
|
|
|
locations_manager = self._media_result.GetLocationsManager()
|
|
|
|
|
2019-08-07 22:59:53 +00:00
|
|
|
( hash_id, hash, size, mime, width, height, duration, num_frames, has_audio, num_words ) = file_info_manager.ToTuple()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-01-09 22:59:03 +00:00
|
|
|
info_string = HydrusData.ToHumanBytes( size ) + ' ' + HC.mime_string_lookup[ mime ]
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2021-07-14 20:42:19 +00:00
|
|
|
if width is not None and height is not None:
|
|
|
|
|
|
|
|
info_string += ' ({})'.format( HydrusData.ConvertResolutionToPrettyString( ( width, height ) ) )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-04-01 21:51:42 +00:00
|
|
|
if duration is not None:
|
|
|
|
|
|
|
|
info_string += ', ' + HydrusData.ConvertMillisecondsToPrettyTime( duration )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2020-04-01 21:51:42 +00:00
|
|
|
if num_frames is not None:
|
|
|
|
|
|
|
|
if duration is None or duration == 0 or num_frames == 0:
|
|
|
|
|
|
|
|
framerate_insert = ''
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
framerate_insert = ', {}fps'.format( round( num_frames / ( duration / 1000 ) ) )
|
|
|
|
|
|
|
|
|
|
|
|
info_string += ' ({} frames{})'.format( HydrusData.ToHumanInt( num_frames ), framerate_insert )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-08-07 22:59:53 +00:00
|
|
|
if has_audio:
|
|
|
|
|
|
|
|
info_string += ', {}'.format( HG.client_controller.new_options.GetString( 'has_audio_label' ) )
|
|
|
|
|
|
|
|
|
2018-07-04 20:48:28 +00:00
|
|
|
if num_words is not None: info_string += ' (' + HydrusData.ToHumanInt( num_words ) + ' words)'
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
lines = [ info_string ]
|
|
|
|
|
|
|
|
locations_manager = self._media_result.GetLocationsManager()
|
|
|
|
|
|
|
|
current_service_keys = locations_manager.GetCurrent()
|
2019-06-26 21:27:18 +00:00
|
|
|
deleted_service_keys = locations_manager.GetDeleted()
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
local_file_services = HG.client_controller.services_manager.GetLocalMediaFileServices()
|
|
|
|
|
|
|
|
current_local_file_services = [ service for service in local_file_services if service.GetServiceKey() in current_service_keys ]
|
|
|
|
|
|
|
|
if len( current_local_file_services ) > 0:
|
|
|
|
|
|
|
|
for local_file_service in current_local_file_services:
|
|
|
|
|
|
|
|
timestamp = locations_manager.GetCurrentTimestamp( local_file_service.GetServiceKey() )
|
|
|
|
|
|
|
|
lines.append( 'added to {} {}'.format( local_file_service.GetName(), ClientData.TimestampToPrettyTimeDelta( timestamp ) ) )
|
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
elif CC.COMBINED_LOCAL_FILE_SERVICE_KEY in current_service_keys:
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
timestamp = locations_manager.GetCurrentTimestamp( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
lines.append( 'imported {}'.format( ClientData.TimestampToPrettyTimeDelta( timestamp ) ) )
|
|
|
|
|
|
|
|
|
|
|
|
deleted_local_file_services = [ service for service in local_file_services if service.GetServiceKey() in deleted_service_keys ]
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
if CC.COMBINED_LOCAL_FILE_SERVICE_KEY in deleted_service_keys:
|
|
|
|
|
|
|
|
( timestamp, original_timestamp ) = locations_manager.GetDeletedTimestamps( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
|
|
|
|
|
|
|
|
lines.append( 'deleted from this client {}'.format( ClientData.TimestampToPrettyTimeDelta( timestamp ) ) )
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
elif len( deleted_local_file_services ) > 0:
|
2016-04-20 20:42:21 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
for local_file_service in deleted_local_file_services:
|
|
|
|
|
|
|
|
( timestamp, original_timestamp ) = locations_manager.GetDeletedTimestamps( local_file_service.GetServiceKey() )
|
|
|
|
|
|
|
|
lines.append( 'removed from {} {}'.format( local_file_service.GetName(), ClientData.TimestampToPrettyTimeDelta( timestamp ) ) )
|
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
if locations_manager.IsTrashed():
|
2019-06-26 21:27:18 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
lines.append( 'in the trash' )
|
2019-06-26 21:27:18 +00:00
|
|
|
|
|
|
|
|
2019-09-25 21:34:18 +00:00
|
|
|
file_modified_timestamp = locations_manager.GetFileModifiedTimestamp()
|
|
|
|
|
|
|
|
if file_modified_timestamp is not None:
|
|
|
|
|
2020-06-24 21:25:24 +00:00
|
|
|
lines.append( 'file modified: {}'.format( ClientData.TimestampToPrettyTimeDelta( file_modified_timestamp ) ) )
|
2019-09-25 21:34:18 +00:00
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
for service_key in current_service_keys.intersection( HG.client_controller.services_manager.GetServiceKeys( HC.REMOTE_FILE_SERVICES ) ):
|
2016-06-01 20:04:15 +00:00
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
timestamp = locations_manager.GetCurrentTimestamp( service_key )
|
2016-06-01 20:04:15 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
try:
|
|
|
|
|
|
|
|
service = HG.client_controller.services_manager.GetService( service_key )
|
|
|
|
|
|
|
|
except HydrusExceptions.DataMissing:
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2016-06-01 20:04:15 +00:00
|
|
|
|
|
|
|
service_type = service.GetServiceType()
|
|
|
|
|
|
|
|
if service_type == HC.IPFS:
|
|
|
|
|
|
|
|
status = 'pinned '
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
status = 'uploaded '
|
|
|
|
|
|
|
|
|
2020-06-24 21:25:24 +00:00
|
|
|
lines.append( status + 'to ' + service.GetName() + ' ' + ClientData.TimestampToPrettyTimeDelta( timestamp ) )
|
2016-06-01 20:04:15 +00:00
|
|
|
|
|
|
|
|
2016-04-20 20:42:21 +00:00
|
|
|
return lines
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
def GetRatingsManager( self ): return self._media_result.GetRatingsManager()
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def GetResolution( self ):
|
|
|
|
|
|
|
|
( width, height ) = self._media_result.GetResolution()
|
|
|
|
|
2019-05-22 22:35:06 +00:00
|
|
|
if width is None:
|
|
|
|
|
|
|
|
return ( 0, 0 )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return ( width, height )
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
def GetSize( self ):
|
|
|
|
|
|
|
|
size = self._media_result.GetSize()
|
|
|
|
|
|
|
|
if size is None: return 0
|
|
|
|
else: return size
|
|
|
|
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
def GetTagsManager( self ):
|
|
|
|
|
|
|
|
return self._media_result.GetTagsManager()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2015-04-08 18:10:50 +00:00
|
|
|
def GetTitleString( self ):
|
|
|
|
|
2018-02-28 22:30:36 +00:00
|
|
|
new_options = HG.client_controller.new_options
|
2015-04-08 18:10:50 +00:00
|
|
|
|
2018-03-07 22:48:29 +00:00
|
|
|
tag_summary_generator = new_options.GetTagSummaryGenerator( 'media_viewer_top' )
|
2015-04-08 18:10:50 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
tags = self.GetTagsManager().GetCurrentAndPending( CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_SINGLE_MEDIA )
|
2018-02-28 22:30:36 +00:00
|
|
|
|
|
|
|
if len( tags ) == 0:
|
|
|
|
|
|
|
|
return ''
|
|
|
|
|
2015-04-08 18:10:50 +00:00
|
|
|
|
2018-03-07 22:48:29 +00:00
|
|
|
summary = tag_summary_generator.GenerateSummary( tags )
|
2015-04-08 18:10:50 +00:00
|
|
|
|
2018-02-21 21:59:37 +00:00
|
|
|
return summary
|
2015-04-08 18:10:50 +00:00
|
|
|
|
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
def HasAnyOfTheseHashes( self, hashes ):
|
|
|
|
|
|
|
|
return self._media_result.GetHash() in hashes
|
|
|
|
|
|
|
|
|
2019-08-07 22:59:53 +00:00
|
|
|
def HasArchive( self ):
|
|
|
|
|
|
|
|
return not self._media_result.GetInbox()
|
|
|
|
|
|
|
|
|
|
|
|
def HasAudio( self ):
|
|
|
|
|
|
|
|
return self._media_result.HasAudio()
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
2019-03-06 23:06:22 +00:00
|
|
|
def HasDuration( self ):
|
|
|
|
|
|
|
|
duration = self._media_result.GetDuration()
|
|
|
|
|
2020-02-19 21:48:36 +00:00
|
|
|
return duration is not None and duration > 0
|
2019-03-06 23:06:22 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def HasImages( self ): return self.IsImage()
|
|
|
|
|
|
|
|
def HasInbox( self ): return self._media_result.GetInbox()
|
|
|
|
|
2020-05-06 21:31:41 +00:00
|
|
|
def HasNotes( self ):
|
|
|
|
|
|
|
|
return self._media_result.HasNotes()
|
|
|
|
|
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
def IsCollection( self ): return False
|
|
|
|
|
2018-05-02 20:45:20 +00:00
|
|
|
def IsImage( self ):
|
|
|
|
|
2020-01-22 21:04:43 +00:00
|
|
|
return self._media_result.GetMime() in HC.IMAGES
|
2018-05-02 20:45:20 +00:00
|
|
|
|
2015-03-18 21:46:29 +00:00
|
|
|
|
|
|
|
def IsSizeDefinite( self ): return self._media_result.GetSize() is not None
|
2017-12-13 22:33:07 +00:00
|
|
|
|
2019-07-24 21:39:02 +00:00
|
|
|
def IsStaticImage( self ):
|
|
|
|
|
|
|
|
return self._media_result.IsStaticImage()
|
|
|
|
|
|
|
|
|
2018-04-05 01:22:26 +00:00
|
|
|
def MatchesDiscriminant( self, has_location = None, discriminant = None, not_uploaded_to = None ):
|
|
|
|
|
|
|
|
if discriminant is not None:
|
|
|
|
|
|
|
|
inbox = self._media_result.GetInbox()
|
|
|
|
|
|
|
|
locations_manager = self._media_result.GetLocationsManager()
|
|
|
|
|
|
|
|
if discriminant == CC.DISCRIMINANT_INBOX:
|
|
|
|
|
|
|
|
p = inbox
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_ARCHIVE:
|
|
|
|
|
|
|
|
p = not inbox
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_LOCAL:
|
|
|
|
|
|
|
|
p = locations_manager.IsLocal()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_LOCAL_BUT_NOT_IN_TRASH:
|
|
|
|
|
|
|
|
p = locations_manager.IsLocal() and not locations_manager.IsTrashed()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_NOT_LOCAL:
|
|
|
|
|
|
|
|
p = not locations_manager.IsLocal()
|
|
|
|
|
|
|
|
elif discriminant == CC.DISCRIMINANT_DOWNLOADING:
|
|
|
|
|
|
|
|
p = locations_manager.IsDownloading()
|
|
|
|
|
|
|
|
|
|
|
|
if not p:
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if has_location is not None:
|
|
|
|
|
|
|
|
locations_manager = self._media_result.GetLocationsManager()
|
|
|
|
|
|
|
|
if has_location not in locations_manager.GetCurrent():
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if not_uploaded_to is not None:
|
|
|
|
|
|
|
|
locations_manager = self._media_result.GetLocationsManager()
|
|
|
|
|
2021-04-28 21:43:16 +00:00
|
|
|
if not_uploaded_to in locations_manager.GetCurrent():
|
2018-04-05 01:22:26 +00:00
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
def UpdateFileInfo( self, hashes_to_media_results ):
|
2017-12-13 22:33:07 +00:00
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
hash = self.GetHash()
|
2019-08-15 00:40:48 +00:00
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
if hash in hashes_to_media_results:
|
2019-08-15 00:40:48 +00:00
|
|
|
|
2019-12-11 23:18:37 +00:00
|
|
|
media_result = hashes_to_media_results[ hash ]
|
2019-08-15 00:40:48 +00:00
|
|
|
|
|
|
|
self._media_result = media_result
|
|
|
|
|
2017-12-13 22:33:07 +00:00
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
class MediaSort( HydrusSerialisable.SerialisableBase ):
|
|
|
|
|
|
|
|
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_MEDIA_SORT
|
2017-11-29 21:48:23 +00:00
|
|
|
SERIALISABLE_NAME = 'Media Sort'
|
2021-05-27 00:09:06 +00:00
|
|
|
SERIALISABLE_VERSION = 2
|
2017-08-09 21:33:51 +00:00
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
def __init__( self, sort_type = None, sort_order = None ):
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
if sort_type is None:
|
|
|
|
|
|
|
|
sort_type = ( 'system', CC.SORT_FILES_BY_FILESIZE )
|
|
|
|
|
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
if sort_order is None:
|
2017-08-09 21:33:51 +00:00
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
sort_order = CC.SORT_ASC
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
( sort_metatype, sort_data ) = sort_type
|
|
|
|
|
|
|
|
if sort_metatype == 'namespaces':
|
|
|
|
|
|
|
|
( namespaces, tag_display_type ) = sort_data
|
|
|
|
|
|
|
|
sort_data = ( tuple( namespaces ), tag_display_type )
|
|
|
|
|
|
|
|
sort_type = ( sort_metatype, sort_data )
|
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
self.sort_type = sort_type
|
2020-07-22 20:59:16 +00:00
|
|
|
self.sort_order = sort_order
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _GetSerialisableInfo( self ):
|
|
|
|
|
|
|
|
( sort_metatype, sort_data ) = self.sort_type
|
|
|
|
|
|
|
|
if sort_metatype == 'system':
|
|
|
|
|
|
|
|
serialisable_sort_data = sort_data
|
|
|
|
|
|
|
|
elif sort_metatype == 'namespaces':
|
|
|
|
|
|
|
|
serialisable_sort_data = sort_data
|
|
|
|
|
|
|
|
elif sort_metatype == 'rating':
|
|
|
|
|
|
|
|
service_key = sort_data
|
|
|
|
|
2019-01-09 22:59:03 +00:00
|
|
|
serialisable_sort_data = service_key.hex()
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
return ( sort_metatype, serialisable_sort_data, self.sort_order )
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
|
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
( sort_metatype, serialisable_sort_data, self.sort_order ) = serialisable_info
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
if sort_metatype == 'system':
|
|
|
|
|
|
|
|
sort_data = serialisable_sort_data
|
|
|
|
|
|
|
|
elif sort_metatype == 'namespaces':
|
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
( namespaces, tag_display_type ) = serialisable_sort_data
|
|
|
|
|
|
|
|
sort_data = ( tuple( namespaces ), tag_display_type )
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
elif sort_metatype == 'rating':
|
|
|
|
|
2019-01-09 22:59:03 +00:00
|
|
|
sort_data = bytes.fromhex( serialisable_sort_data )
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
self.sort_type = ( sort_metatype, sort_data )
|
|
|
|
|
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
|
|
|
|
|
|
|
|
if version == 1:
|
|
|
|
|
|
|
|
( sort_metatype, serialisable_sort_data, sort_order ) = old_serialisable_info
|
|
|
|
|
|
|
|
if sort_metatype == 'namespaces':
|
|
|
|
|
|
|
|
namespaces = serialisable_sort_data
|
|
|
|
serialisable_sort_data = ( namespaces, ClientTags.TAG_DISPLAY_ACTUAL )
|
|
|
|
|
|
|
|
|
|
|
|
new_serialisable_info = ( sort_metatype, serialisable_sort_data, sort_order )
|
|
|
|
|
|
|
|
return ( 2, new_serialisable_info )
|
|
|
|
|
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
def CanAsc( self ):
|
|
|
|
|
|
|
|
( sort_metatype, sort_data ) = self.sort_type
|
|
|
|
|
|
|
|
if sort_metatype == 'system':
|
|
|
|
|
|
|
|
if sort_data in ( CC.SORT_FILES_BY_MIME, CC.SORT_FILES_BY_RANDOM ):
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
def GetNamespaces( self ):
|
|
|
|
|
|
|
|
( sort_metadata, sort_data ) = self.sort_type
|
|
|
|
|
|
|
|
if sort_metadata == 'namespaces':
|
|
|
|
|
|
|
|
( namespaces, tag_display_type ) = sort_data
|
|
|
|
|
|
|
|
return list( namespaces )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
def GetSortKeyAndReverse( self, location_context: ClientLocation.LocationContext ):
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
( sort_metadata, sort_data ) = self.sort_type
|
|
|
|
|
|
|
|
def deal_with_none( x ):
|
|
|
|
|
|
|
|
if x is None: return -1
|
|
|
|
else: return x
|
|
|
|
|
|
|
|
|
|
|
|
if sort_metadata == 'system':
|
|
|
|
|
|
|
|
if sort_data == CC.SORT_FILES_BY_RANDOM:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return random.random()
|
|
|
|
|
|
|
|
|
2019-01-30 22:14:54 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_APPROX_BITRATE:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
2019-05-15 20:35:00 +00:00
|
|
|
# videos > images > pdfs
|
|
|
|
# heavy vids first, heavy images first
|
|
|
|
|
2019-01-30 22:14:54 +00:00
|
|
|
duration = x.GetDuration()
|
2019-05-15 20:35:00 +00:00
|
|
|
num_frames = x.GetNumFrames()
|
|
|
|
size = x.GetSize()
|
|
|
|
resolution = x.GetResolution()
|
2019-01-30 22:14:54 +00:00
|
|
|
|
2019-02-27 23:03:30 +00:00
|
|
|
if duration is None or duration == 0:
|
2019-01-30 22:14:54 +00:00
|
|
|
|
2019-05-15 20:35:00 +00:00
|
|
|
if size is None or size == 0:
|
|
|
|
|
|
|
|
duration_bitrate = -1
|
|
|
|
frame_bitrate = -1
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
duration_bitrate = 0
|
|
|
|
|
|
|
|
if resolution is None:
|
|
|
|
|
|
|
|
frame_bitrate = 0
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
( width, height ) = x.GetResolution()
|
|
|
|
|
|
|
|
num_pixels = width * height
|
|
|
|
|
|
|
|
if size is None or size == 0 or num_pixels == 0:
|
|
|
|
|
|
|
|
frame_bitrate = -1
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
frame_bitrate = size / num_pixels
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-01-30 22:14:54 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
2019-02-27 23:03:30 +00:00
|
|
|
if size is None or size == 0:
|
2019-01-30 22:14:54 +00:00
|
|
|
|
2019-05-15 20:35:00 +00:00
|
|
|
duration_bitrate = -1
|
|
|
|
frame_bitrate = -1
|
2019-01-30 22:14:54 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
2019-05-15 20:35:00 +00:00
|
|
|
duration_bitrate = size / duration
|
|
|
|
|
|
|
|
if num_frames is None or num_frames == 0:
|
|
|
|
|
|
|
|
frame_bitrate = 0
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
frame_bitrate = duration_bitrate / num_frames
|
|
|
|
|
2019-01-30 22:14:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-05-15 20:35:00 +00:00
|
|
|
return ( duration_bitrate, frame_bitrate )
|
|
|
|
|
2019-01-30 22:14:54 +00:00
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_FILESIZE:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return deal_with_none( x.GetSize() )
|
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_DURATION:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return deal_with_none( x.GetDuration() )
|
|
|
|
|
|
|
|
|
2020-03-04 22:12:53 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_FRAMERATE:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
num_frames = x.GetNumFrames()
|
|
|
|
|
|
|
|
if num_frames is None or num_frames == 0:
|
|
|
|
|
|
|
|
return -1
|
|
|
|
|
|
|
|
|
|
|
|
duration = x.GetDuration()
|
|
|
|
|
|
|
|
if duration is None or duration == 0:
|
|
|
|
|
|
|
|
return -1
|
|
|
|
|
|
|
|
|
|
|
|
return num_frames / duration
|
|
|
|
|
|
|
|
|
2020-09-23 21:02:02 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_NUM_COLLECTION_FILES:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return ( x.GetNumFiles(), isinstance( x, MediaCollection ) )
|
|
|
|
|
|
|
|
|
2020-04-01 21:51:42 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_NUM_FRAMES:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return deal_with_none( x.GetNumFrames() )
|
|
|
|
|
|
|
|
|
2019-09-11 21:51:09 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_HAS_AUDIO:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return - deal_with_none( x.HasAudio() )
|
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_IMPORT_TIME:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
2022-01-19 21:52:32 +00:00
|
|
|
return deal_with_none( x.GetLocationsManager().GetBestCurrentTimestamp( location_context ) )
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
2019-09-25 21:34:18 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_FILE_MODIFIED_TIMESTAMP:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return deal_with_none( x.GetLocationsManager().GetFileModifiedTimestamp() )
|
|
|
|
|
|
|
|
|
2022-01-26 21:57:04 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_LAST_VIEWED_TIME:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
fvsm = x.GetFileViewingStatsManager()
|
|
|
|
|
|
|
|
# do not do viewtime as a secondary sort here, to allow for user secondary sort to help out
|
|
|
|
|
|
|
|
return deal_with_none( fvsm.GetLastViewedTime( CC.CANVAS_MEDIA_VIEWER ) )
|
|
|
|
|
|
|
|
|
2017-08-09 21:33:51 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_HEIGHT:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return deal_with_none( x.GetResolution()[1] )
|
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_WIDTH:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return deal_with_none( x.GetResolution()[0] )
|
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_RATIO:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
( width, height ) = x.GetResolution()
|
|
|
|
|
|
|
|
if width is None or height is None or width == 0 or height == 0:
|
|
|
|
|
|
|
|
return -1
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
2019-01-09 22:59:03 +00:00
|
|
|
return width / height
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_NUM_PIXELS:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
( width, height ) = x.GetResolution()
|
|
|
|
|
|
|
|
if width is None or height is None:
|
|
|
|
|
|
|
|
return -1
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return width * height
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_NUM_TAGS:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
tags_manager = x.GetTagsManager()
|
|
|
|
|
2021-05-19 21:30:28 +00:00
|
|
|
return len( tags_manager.GetCurrentAndPending( CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_ACTUAL ) )
|
2017-08-09 21:33:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_MIME:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return x.GetMime()
|
|
|
|
|
|
|
|
|
2018-12-05 22:35:30 +00:00
|
|
|
elif sort_data == CC.SORT_FILES_BY_MEDIA_VIEWS:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
2019-03-06 23:06:22 +00:00
|
|
|
fvsm = x.GetFileViewingStatsManager()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2019-10-09 22:03:03 +00:00
|
|
|
# do not do viewtime as a secondary sort here, to allow for user secondary sort to help out
|
|
|
|
|
2022-01-26 21:57:04 +00:00
|
|
|
return fvsm.GetViews( CC.CANVAS_MEDIA_VIEWER )
|
2018-12-05 22:35:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
elif sort_data == CC.SORT_FILES_BY_MEDIA_VIEWTIME:
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
2019-03-06 23:06:22 +00:00
|
|
|
fvsm = x.GetFileViewingStatsManager()
|
2018-12-05 22:35:30 +00:00
|
|
|
|
2019-10-09 22:03:03 +00:00
|
|
|
# do not do views as a secondary sort here, to allow for user secondary sort to help out
|
|
|
|
|
2022-01-26 21:57:04 +00:00
|
|
|
return fvsm.GetViewtime( CC.CANVAS_MEDIA_VIEWER )
|
2020-04-29 21:44:12 +00:00
|
|
|
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
elif sort_metadata == 'namespaces':
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
( namespaces, tag_display_type ) = sort_data
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def sort_key( x ):
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
x_tags_manager = x.GetTagsManager()
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
return [ x_tags_manager.GetComparableNamespaceSlice( ( namespace, ), tag_display_type ) for namespace in namespaces ]
|
2019-10-02 23:38:59 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
elif sort_metadata == 'rating':
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
service_key = sort_data
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
x_ratings_manager = x.GetRatingsManager()
|
|
|
|
|
|
|
|
rating = deal_with_none( x_ratings_manager.GetRating( service_key ) )
|
|
|
|
|
|
|
|
return rating
|
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
|
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
reverse = self.sort_order == CC.SORT_DESC
|
|
|
|
|
|
|
|
return ( sort_key, reverse )
|
|
|
|
|
|
|
|
|
|
|
|
def GetSortOrderStrings( self ):
|
|
|
|
|
|
|
|
( sort_metatype, sort_data ) = self.sort_type
|
|
|
|
|
|
|
|
if sort_metatype == 'system':
|
|
|
|
|
|
|
|
sort_string_lookup = {}
|
|
|
|
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_APPROX_BITRATE ] = ( 'smallest first', 'largest first', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_FILESIZE ] = ( 'smallest first', 'largest first', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_DURATION ] = ( 'shortest first', 'longest first', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_FRAMERATE ] = ( 'slowest first', 'fastest first', CC.SORT_DESC )
|
2020-09-23 21:02:02 +00:00
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_NUM_COLLECTION_FILES ] = ( 'fewest first', 'most first', CC.SORT_DESC )
|
2020-07-22 20:59:16 +00:00
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_NUM_FRAMES ] = ( 'smallest first', 'largest first', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_HAS_AUDIO ] = ( 'audio first', 'silent first', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_IMPORT_TIME ] = ( 'oldest first', 'newest first', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_FILE_MODIFIED_TIMESTAMP ] = ( 'oldest first', 'newest first', CC.SORT_DESC )
|
2022-01-26 21:57:04 +00:00
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_LAST_VIEWED_TIME ] = ( 'oldest first', 'newest first', CC.SORT_DESC )
|
2020-07-22 20:59:16 +00:00
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_MIME ] = ( 'filetype', 'filetype', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_RANDOM ] = ( 'random', 'random', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_WIDTH ] = ( 'slimmest first', 'widest first', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_HEIGHT ] = ( 'shortest first', 'tallest first', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_RATIO ] = ( 'tallest first', 'widest first', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_NUM_PIXELS ] = ( 'ascending', 'descending', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_NUM_TAGS ] = ( 'ascending', 'descending', CC.SORT_ASC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_MEDIA_VIEWS ] = ( 'ascending', 'descending', CC.SORT_DESC )
|
|
|
|
sort_string_lookup[ CC.SORT_FILES_BY_MEDIA_VIEWTIME ] = ( 'ascending', 'descending', CC.SORT_DESC )
|
|
|
|
|
|
|
|
return sort_string_lookup[ sort_data ]
|
|
|
|
|
2021-03-17 21:59:28 +00:00
|
|
|
elif sort_metatype == 'namespaces':
|
|
|
|
|
|
|
|
return ( 'a-z', 'z-a', CC.SORT_ASC )
|
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
else:
|
|
|
|
|
2021-03-10 23:10:11 +00:00
|
|
|
return ( 'ascending', 'descending', CC.SORT_DESC )
|
2020-07-22 20:59:16 +00:00
|
|
|
|
2016-06-15 18:59:44 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def GetSortTypeString( self ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
( sort_metatype, sort_data ) = self.sort_type
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
sort_string = 'sort by '
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
if sort_metatype == 'system':
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
sort_string += CC.sort_type_string_lookup[ sort_data ]
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
elif sort_metatype == 'namespaces':
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2021-05-27 00:09:06 +00:00
|
|
|
( namespaces, tag_display_type ) = sort_data
|
2020-04-29 21:44:12 +00:00
|
|
|
|
|
|
|
sort_string += 'tags: ' + '-'.join( namespaces )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
elif sort_metatype == 'rating':
|
|
|
|
|
|
|
|
service_key = sort_data
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
try:
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
service = HG.client_controller.services_manager.GetService( service_key )
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
name = service.GetName()
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
except HydrusExceptions.DataMissing:
|
|
|
|
|
|
|
|
name = 'unknown service'
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2017-02-08 22:27:00 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
sort_string += 'rating: {}'.format( name )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
return sort_string
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
def Sort( self, location_context: ClientLocation.LocationContext, media_results_list: "SortedList" ):
|
2022-01-05 22:15:56 +00:00
|
|
|
|
|
|
|
( sort_metadata, sort_data ) = self.sort_type
|
|
|
|
|
|
|
|
if sort_data == CC.SORT_FILES_BY_RANDOM:
|
|
|
|
|
|
|
|
media_results_list.random_sort()
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
2022-01-19 21:28:59 +00:00
|
|
|
( sort_key, reverse ) = self.GetSortKeyAndReverse( location_context )
|
2022-01-05 22:15:56 +00:00
|
|
|
|
|
|
|
media_results_list.sort( sort_key, reverse = reverse )
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def ToString( self ):
|
2017-08-09 21:33:51 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
sort_type_string = self.GetSortTypeString()
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
( asc_string, desc_string, sort_gumpf ) = self.GetSortOrderStrings()
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
sort_order_string = asc_string if self.sort_order == CC.SORT_ASC else desc_string
|
2019-01-23 22:19:16 +00:00
|
|
|
|
2020-07-22 20:59:16 +00:00
|
|
|
return '{}, {}'.format( sort_type_string, sort_order_string )
|
2019-01-23 22:19:16 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_MEDIA_SORT ] = MediaSort
|
|
|
|
|
|
|
|
class SortedList( object ):
|
2017-08-09 21:33:51 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def __init__( self, initial_items = None ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
if initial_items is None:
|
|
|
|
|
|
|
|
initial_items = []
|
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._sort_key = None
|
|
|
|
self._sort_reverse = False
|
2016-07-27 21:53:34 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._sorted_list = list( initial_items )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._items_to_indices = {}
|
|
|
|
self._indices_dirty = True
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def __contains__( self, item ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
if self._indices_dirty:
|
|
|
|
|
|
|
|
self._RecalcIndices()
|
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
return self._items_to_indices.__contains__( item )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
|
|
|
|
def __getitem__( self, value ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
return self._sorted_list.__getitem__( value )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def __iter__( self ):
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
return iter( self._sorted_list )
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
|
|
|
|
def __len__( self ):
|
2016-06-15 18:59:44 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
return len( self._sorted_list )
|
2016-06-15 18:59:44 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def _DirtyIndices( self ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._indices_dirty = True
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._items_to_indices = {}
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def _RecalcIndices( self ):
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._items_to_indices = { item : index for ( index, item ) in enumerate( self._sorted_list ) }
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._indices_dirty = False
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def append_items( self, items ):
|
|
|
|
|
|
|
|
if self._indices_dirty is None:
|
|
|
|
|
|
|
|
self._RecalcIndices()
|
|
|
|
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
for ( i, item ) in enumerate( items, start = len( self._sorted_list ) ):
|
|
|
|
|
|
|
|
self._items_to_indices[ item ] = i
|
|
|
|
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._sorted_list.extend( items )
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def index( self, item ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
if self._indices_dirty:
|
|
|
|
|
|
|
|
self._RecalcIndices()
|
|
|
|
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
try:
|
|
|
|
|
|
|
|
result = self._items_to_indices[ item ]
|
|
|
|
|
|
|
|
except KeyError:
|
|
|
|
|
|
|
|
raise HydrusExceptions.DataMissing()
|
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
return result
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def insert_items( self, items ):
|
2016-09-14 18:03:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self.append_items( items )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self.sort()
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
|
|
|
|
def remove_items( self, items ):
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
deletee_indices = [ self.index( item ) for item in items ]
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
deletee_indices.sort( reverse = True )
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
for index in deletee_indices:
|
2020-02-12 22:50:37 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
del self._sorted_list[ index ]
|
2020-02-12 22:50:37 +00:00
|
|
|
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._DirtyIndices()
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2022-01-05 22:15:56 +00:00
|
|
|
def random_sort( self ):
|
|
|
|
|
|
|
|
def sort_key( x ):
|
|
|
|
|
|
|
|
return random.random()
|
|
|
|
|
|
|
|
|
|
|
|
self._sort_key = sort_key
|
|
|
|
|
|
|
|
random.shuffle( self._sorted_list )
|
|
|
|
|
|
|
|
self._DirtyIndices()
|
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
def sort( self, sort_key = None, reverse = False ):
|
2019-10-02 23:38:59 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
if sort_key is None:
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
sort_key = self._sort_key
|
|
|
|
reverse = self._sort_reverse
|
2015-08-05 18:42:35 +00:00
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
else:
|
|
|
|
|
|
|
|
self._sort_key = sort_key
|
|
|
|
self._sort_reverse = reverse
|
2015-08-05 18:42:35 +00:00
|
|
|
|
|
|
|
|
2020-04-29 21:44:12 +00:00
|
|
|
self._sorted_list.sort( key = sort_key, reverse = reverse )
|
|
|
|
|
|
|
|
self._DirtyIndices()
|
|
|
|
|
2016-12-21 22:30:54 +00:00
|
|
|
|