hydrus/hydrus/client/ClientDuplicates.py

373 lines
16 KiB
Python
Raw Normal View History

2020-04-22 21:00:35 +00:00
from hydrus.client import ClientConstants as CC
from hydrus.client import ClientTags
2018-11-07 23:09:40 +00:00
import collections
2020-04-22 21:00:35 +00:00
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusSerialisable
2018-05-16 20:09:50 +00:00
class DuplicateActionOptions( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS
SERIALISABLE_NAME = 'Duplicate Action Options'
2019-05-22 22:35:06 +00:00
SERIALISABLE_VERSION = 4
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
def __init__( self, tag_service_actions = None, rating_service_actions = None, sync_archive = False, sync_urls_action = None ):
2018-05-16 20:09:50 +00:00
if tag_service_actions is None:
tag_service_actions = []
if rating_service_actions is None:
rating_service_actions = []
HydrusSerialisable.SerialisableBase.__init__( self )
self._tag_service_actions = tag_service_actions
self._rating_service_actions = rating_service_actions
self._sync_archive = sync_archive
self._sync_urls_action = sync_urls_action
def _GetSerialisableInfo( self ):
if HG.client_controller.IsBooted():
services_manager = HG.client_controller.services_manager
2020-03-11 21:52:11 +00:00
self._tag_service_actions = [ ( service_key, action, tag_filter ) for ( service_key, action, tag_filter ) in self._tag_service_actions if services_manager.ServiceExists( service_key ) and services_manager.GetServiceType( service_key ) in HC.REAL_TAG_SERVICES ]
2018-05-16 20:09:50 +00:00
self._rating_service_actions = [ ( service_key, action ) for ( service_key, action ) in self._rating_service_actions if services_manager.ServiceExists( service_key ) and services_manager.GetServiceType( service_key ) in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ]
2019-01-09 22:59:03 +00:00
serialisable_tag_service_actions = [ ( service_key.hex(), action, tag_filter.GetSerialisableTuple() ) for ( service_key, action, tag_filter ) in self._tag_service_actions ]
serialisable_rating_service_actions = [ ( service_key.hex(), action ) for ( service_key, action ) in self._rating_service_actions ]
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
return ( serialisable_tag_service_actions, serialisable_rating_service_actions, self._sync_archive, self._sync_urls_action )
2018-05-16 20:09:50 +00:00
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
2019-05-22 22:35:06 +00:00
( serialisable_tag_service_actions, serialisable_rating_service_actions, self._sync_archive, self._sync_urls_action ) = serialisable_info
2018-05-16 20:09:50 +00:00
2019-01-09 22:59:03 +00:00
self._tag_service_actions = [ ( bytes.fromhex( serialisable_service_key ), action, HydrusSerialisable.CreateFromSerialisableTuple( serialisable_tag_filter ) ) for ( serialisable_service_key, action, serialisable_tag_filter ) in serialisable_tag_service_actions ]
self._rating_service_actions = [ ( bytes.fromhex( serialisable_service_key ), action ) for ( serialisable_service_key, action ) in serialisable_rating_service_actions ]
2018-05-16 20:09:50 +00:00
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
if version == 1:
( serialisable_service_actions, delete_second_file ) = old_serialisable_info
tag_service_actions = []
rating_service_actions = []
# As the client isn't booted when this is loaded in options, there isn't a good way to figure out tag from rating
# So, let's just dupe and purge later on, in serialisation
for ( service_key_encoded, action ) in serialisable_service_actions:
2019-01-09 22:59:03 +00:00
service_key = bytes.fromhex( service_key_encoded )
2018-05-16 20:09:50 +00:00
tag_filter = ClientTags.TagFilter()
tag_service_actions.append( ( service_key, action, tag_filter ) )
rating_service_actions.append( ( service_key, action ) )
2019-01-09 22:59:03 +00:00
serialisable_tag_service_actions = [ ( service_key.hex(), action, tag_filter.GetSerialisableTuple() ) for ( service_key, action, tag_filter ) in tag_service_actions ]
serialisable_rating_service_actions = [ ( service_key.hex(), action ) for ( service_key, action ) in rating_service_actions ]
2018-05-16 20:09:50 +00:00
sync_archive = delete_second_file
delete_both_files = False
new_serialisable_info = ( serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files )
return ( 2, new_serialisable_info )
if version == 2:
( serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files ) = old_serialisable_info
sync_urls_action = None
new_serialisable_info = ( serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files, sync_urls_action )
return ( 3, new_serialisable_info )
2019-05-22 22:35:06 +00:00
if version == 3:
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
( serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files, sync_urls_action ) = old_serialisable_info
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
new_serialisable_info = ( serialisable_tag_service_actions, serialisable_rating_service_actions, sync_archive, sync_urls_action )
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
return ( 4, new_serialisable_info )
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
def SetTuple( self, tag_service_actions, rating_service_actions, sync_archive, sync_urls_action ):
2018-05-16 20:09:50 +00:00
self._tag_service_actions = tag_service_actions
self._rating_service_actions = rating_service_actions
self._sync_archive = sync_archive
self._sync_urls_action = sync_urls_action
def ToTuple( self ):
2019-05-22 22:35:06 +00:00
return ( self._tag_service_actions, self._rating_service_actions, self._sync_archive, self._sync_urls_action )
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
def ProcessPairIntoContentUpdates( self, first_media, second_media, delete_first = False, delete_second = False, delete_both = False, file_deletion_reason = None ):
2019-04-10 22:50:53 +00:00
if file_deletion_reason is None:
file_deletion_reason = 'unknown reason'
2018-05-16 20:09:50 +00:00
2018-11-07 23:09:40 +00:00
service_keys_to_content_updates = collections.defaultdict( list )
2018-05-16 20:09:50 +00:00
first_hashes = first_media.GetHashes()
second_hashes = second_media.GetHashes()
#
services_manager = HG.client_controller.services_manager
for ( service_key, action, tag_filter ) in self._tag_service_actions:
content_updates = []
try:
service = services_manager.GetService( service_key )
except HydrusExceptions.DataMissing:
continue
service_type = service.GetServiceType()
if service_type == HC.LOCAL_TAG:
add_content_action = HC.CONTENT_UPDATE_ADD
elif service_type == HC.TAG_REPOSITORY:
add_content_action = HC.CONTENT_UPDATE_PEND
2019-10-02 23:38:59 +00:00
first_tags = first_media.GetTagsManager().GetCurrentAndPending( service_key, ClientTags.TAG_DISPLAY_STORAGE )
second_tags = second_media.GetTagsManager().GetCurrentAndPending( service_key, ClientTags.TAG_DISPLAY_STORAGE )
2018-05-16 20:09:50 +00:00
2019-01-23 22:19:16 +00:00
first_tags = tag_filter.Filter( first_tags )
second_tags = tag_filter.Filter( second_tags )
2018-05-16 20:09:50 +00:00
if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:
2019-01-23 22:19:16 +00:00
first_needs = second_tags.difference( first_tags )
second_needs = first_tags.difference( second_tags )
2018-05-16 20:09:50 +00:00
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, second_hashes ) ) for tag in second_needs ) )
elif action == HC.CONTENT_MERGE_ACTION_COPY:
2019-01-23 22:19:16 +00:00
first_needs = second_tags.difference( first_tags )
2018-05-16 20:09:50 +00:00
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) )
elif service_type == HC.LOCAL_TAG and action == HC.CONTENT_MERGE_ACTION_MOVE:
2019-01-23 22:19:16 +00:00
first_needs = second_tags.difference( first_tags )
2018-05-16 20:09:50 +00:00
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) )
2019-01-23 22:19:16 +00:00
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( tag, second_hashes ) ) for tag in second_tags ) )
2018-05-16 20:09:50 +00:00
if len( content_updates ) > 0:
2018-11-07 23:09:40 +00:00
service_keys_to_content_updates[ service_key ].extend( content_updates )
2018-05-16 20:09:50 +00:00
2019-02-27 23:03:30 +00:00
def worth_updating_rating( source_rating, dest_rating ):
if source_rating is not None:
if dest_rating is None or source_rating > dest_rating:
return True
return False
2018-05-16 20:09:50 +00:00
for ( service_key, action ) in self._rating_service_actions:
content_updates = []
try:
service = services_manager.GetService( service_key )
except HydrusExceptions.DataMissing:
continue
first_current_value = first_media.GetRatingsManager().GetRating( service_key )
second_current_value = second_media.GetRatingsManager().GetRating( service_key )
if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:
2019-02-27 23:03:30 +00:00
if worth_updating_rating( first_current_value, second_current_value ):
2018-05-16 20:09:50 +00:00
2019-02-27 23:03:30 +00:00
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( first_current_value, second_hashes ) ) )
2018-05-16 20:09:50 +00:00
2019-02-27 23:03:30 +00:00
elif worth_updating_rating( second_current_value, first_current_value ):
2018-05-16 20:09:50 +00:00
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( second_current_value, first_hashes ) ) )
elif action == HC.CONTENT_MERGE_ACTION_COPY:
2019-02-27 23:03:30 +00:00
if worth_updating_rating( second_current_value, first_current_value ):
2018-05-16 20:09:50 +00:00
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( second_current_value, first_hashes ) ) )
elif action == HC.CONTENT_MERGE_ACTION_MOVE:
if second_current_value is not None:
2019-02-27 23:03:30 +00:00
if worth_updating_rating( second_current_value, first_current_value ):
2018-05-16 20:09:50 +00:00
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( second_current_value, first_hashes ) ) )
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, second_hashes ) ) )
if len( content_updates ) > 0:
2018-11-07 23:09:40 +00:00
service_keys_to_content_updates[ service_key ].extend( content_updates )
2018-05-16 20:09:50 +00:00
#
if self._sync_archive:
if first_media.HasInbox() and second_media.HasArchive():
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, first_hashes )
2018-11-07 23:09:40 +00:00
service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ].append( content_update )
2018-05-16 20:09:50 +00:00
elif first_media.HasArchive() and second_media.HasInbox():
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, second_hashes )
2018-11-07 23:09:40 +00:00
service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ].append( content_update )
2018-05-16 20:09:50 +00:00
#
if self._sync_urls_action is not None:
first_urls = set( first_media.GetLocationsManager().GetURLs() )
second_urls = set( second_media.GetLocationsManager().GetURLs() )
content_updates = []
if self._sync_urls_action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:
first_needs = second_urls.difference( first_urls )
second_needs = first_urls.difference( second_urls )
2018-10-17 21:00:09 +00:00
if len( first_needs ) > 0:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( first_needs, first_hashes ) ) )
if len( second_needs ) > 0:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( second_needs, second_hashes ) ) )
2018-05-16 20:09:50 +00:00
elif self._sync_urls_action == HC.CONTENT_MERGE_ACTION_COPY:
first_needs = second_urls.difference( first_urls )
2018-10-17 21:00:09 +00:00
if len( first_needs ) > 0:
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( first_needs, first_hashes ) ) )
2018-05-16 20:09:50 +00:00
if len( content_updates ) > 0:
2018-11-07 23:09:40 +00:00
service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ].extend( content_updates )
2018-05-16 20:09:50 +00:00
#
deletee_media = []
2019-05-22 22:35:06 +00:00
if delete_first or delete_second or delete_both:
2018-05-16 20:09:50 +00:00
2019-05-22 22:35:06 +00:00
if delete_first or delete_both:
2018-05-16 20:09:50 +00:00
deletee_media.append( first_media )
2019-05-22 22:35:06 +00:00
if delete_second or delete_both:
2019-04-10 22:50:53 +00:00
2019-05-22 22:35:06 +00:00
deletee_media.append( second_media )
2019-04-10 22:50:53 +00:00
2018-05-16 20:09:50 +00:00
for media in deletee_media:
current_locations = media.GetLocationsManager().GetCurrent()
if CC.LOCAL_FILE_SERVICE_KEY in current_locations:
deletee_service_key = CC.LOCAL_FILE_SERVICE_KEY
elif CC.TRASH_SERVICE_KEY in current_locations:
deletee_service_key = CC.TRASH_SERVICE_KEY
else:
deletee_service_key = None
if deletee_service_key is not None:
2019-04-10 22:50:53 +00:00
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, media.GetHashes(), reason = file_deletion_reason )
2018-05-16 20:09:50 +00:00
service_keys_to_content_updates[ deletee_service_key ].append( content_update )
#
2018-11-07 23:09:40 +00:00
return service_keys_to_content_updates
2018-05-16 20:09:50 +00:00
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS ] = DuplicateActionOptions