hydrus/include/TestHydrusSerialisable.py

674 lines
31 KiB
Python
Raw Normal View History

2017-04-26 21:58:12 +00:00
import ClientCaches
2017-03-08 23:23:12 +00:00
import ClientConstants as CC
2017-03-15 20:13:04 +00:00
import ClientData
2017-04-12 21:46:46 +00:00
import ClientDefaults
2017-03-29 19:39:34 +00:00
import ClientDownloading
import ClientImporting
2017-04-26 21:58:12 +00:00
import ClientMedia
import ClientRatings
2017-03-08 23:23:12 +00:00
import ClientSearch
import HydrusConstants as HC
2017-03-15 20:13:04 +00:00
import HydrusData
2017-03-08 23:23:12 +00:00
import HydrusNetwork
import HydrusSerialisable
2017-04-26 21:58:12 +00:00
import TestConstants as TC
2017-03-15 20:13:04 +00:00
import os
2017-03-08 23:23:12 +00:00
import unittest
2017-03-15 20:13:04 +00:00
import wx
2017-03-08 23:23:12 +00:00
2017-03-29 19:39:34 +00:00
class TestSerialisables( unittest.TestCase ):
2017-03-08 23:23:12 +00:00
def _dump_and_load_and_test( self, obj, test_func ):
serialisable_tuple = obj.GetSerialisableTuple()
self.assertIsInstance( serialisable_tuple, tuple )
if isinstance( obj, HydrusSerialisable.SerialisableBaseNamed ):
( serialisable_type, name, version, serialisable_info ) = serialisable_tuple
elif isinstance( obj, HydrusSerialisable.SerialisableBase ):
( serialisable_type, version, serialisable_info ) = serialisable_tuple
self.assertEqual( serialisable_type, obj.SERIALISABLE_TYPE )
self.assertEqual( version, obj.SERIALISABLE_VERSION )
dupe_obj = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_tuple )
self.assertIsNot( obj, dupe_obj )
test_func( obj, dupe_obj )
#
json_string = obj.DumpToString()
self.assertIsInstance( json_string, str )
dupe_obj = HydrusSerialisable.CreateFromString( json_string )
self.assertIsNot( obj, dupe_obj )
test_func( obj, dupe_obj )
#
network_string = obj.DumpToNetworkString()
self.assertIsInstance( network_string, str )
dupe_obj = HydrusSerialisable.CreateFromNetworkString( network_string )
self.assertIsNot( obj, dupe_obj )
test_func( obj, dupe_obj )
def test_basics( self ):
2017-03-15 20:13:04 +00:00
def test( obj, dupe_obj ):
self.assertEqual( len( obj.items() ), len( dupe_obj.items() ) )
for ( key, value ) in obj.items():
self.assertEqual( value, dupe_obj[ key ] )
#
2017-03-08 23:23:12 +00:00
d = HydrusSerialisable.SerialisableDictionary()
d[ 1 ] = 2
d[ 3 ] = 'test1'
d[ 'test2' ] = 4
d[ 'test3' ] = 5
d[ 6 ] = HydrusSerialisable.SerialisableDictionary( { i : 'test' + str( i ) for i in range( 20 ) } )
d[ ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, 'test pred 1' ) ] = 56
d[ ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, 'test pred 2' ) ] = HydrusSerialisable.SerialisableList( [ ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, 'test' + str( i ) ) for i in range( 10 ) ] )
2017-03-15 20:13:04 +00:00
self.assertEqual( len( d.keys() ), 7 )
for ( key, value ) in d.items():
self.assertEqual( d[ key ], value )
self._dump_and_load_and_test( d, test )
#
db = HydrusSerialisable.SerialisableBytesDictionary()
db[ HydrusData.GenerateKey() ] = HydrusData.GenerateKey()
db[ HydrusData.GenerateKey() ] = [ HydrusData.GenerateKey() for i in range( 10 ) ]
db[ 1 ] = HydrusData.GenerateKey()
db[ 2 ] = [ HydrusData.GenerateKey() for i in range( 10 ) ]
self.assertEqual( len( db.keys() ), 4 )
for ( key, value ) in db.items():
self.assertEqual( db[ key ], value )
self._dump_and_load_and_test( db, test )
2017-04-12 21:46:46 +00:00
def test_SERIALISABLE_TYPE_APPLICATION_COMMAND( self ):
2017-03-15 20:13:04 +00:00
2017-03-08 23:23:12 +00:00
def test( obj, dupe_obj ):
2017-04-12 21:46:46 +00:00
self.assertEqual( obj.GetCommandType(), dupe_obj.GetCommandType() )
2017-03-08 23:23:12 +00:00
2017-04-12 21:46:46 +00:00
self.assertEqual( obj.GetData(), dupe_obj.GetData() )
2017-03-08 23:23:12 +00:00
2017-04-12 21:46:46 +00:00
acs = []
2017-03-15 20:13:04 +00:00
2017-04-19 20:58:30 +00:00
acs.append( ( ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_SIMPLE, 'archive_file' ), 'archive_file' ) )
2017-04-12 21:46:46 +00:00
acs.append( ( ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for unknown service!' ) )
acs.append( ( ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for local tags' ) )
acs.append( ( ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_SET, 0.4 ) ), 'set ratings "0.4" for unknown service!' ) )
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
for ( ac, s ) in acs:
self._dump_and_load_and_test( ac, test )
self.assertEqual( ac.ToString(), s )
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
2017-04-26 21:58:12 +00:00
def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj.ToTuple(), dupe_obj.ToTuple() )
2017-05-24 20:28:24 +00:00
duplicate_action_options_delete_and_move = ClientData.DuplicateActionOptions( [ ( CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientData.TagCensor() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ) ], True )
duplicate_action_options_copy = ClientData.DuplicateActionOptions( [ ( CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientData.TagCensor() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ) ], False )
duplicate_action_options_merge = ClientData.DuplicateActionOptions( [ ( CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientData.TagCensor() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ) ], False )
2017-04-26 21:58:12 +00:00
inbox = True
size = 40960
mime = HC.IMAGE_JPEG
width = 640
height = 480
duration = None
num_frames = None
num_words = None
2017-05-31 21:50:53 +00:00
local_locations_manager = ClientMedia.LocationsManager( { CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox )
trash_locations_manager = ClientMedia.LocationsManager( { CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox )
deleted_locations_manager = ClientMedia.LocationsManager( set(), { CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), inbox )
2017-04-26 21:58:12 +00:00
# duplicate to generate proper dicts
one_tags_manager = ClientMedia.TagsManager( { CC.LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } } ).Duplicate()
two_tags_manager = ClientMedia.TagsManager( { CC.LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } } ).Duplicate()
substantial_tags_manager = ClientMedia.TagsManager( { CC.LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } } ).Duplicate()
empty_tags_manager = ClientMedia.TagsManager( {} ).Duplicate()
one_ratings_manager = ClientRatings.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } )
two_ratings_manager = ClientRatings.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.6 } )
substantial_ratings_manager = ClientRatings.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } )
empty_ratings_manager = ClientRatings.RatingsManager( {} )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
local_hash_has_values = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( local_hash_has_values, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager )
2017-04-26 21:58:12 +00:00
local_media_has_values = ClientMedia.MediaSingleton( media_result )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
other_local_hash_has_values = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( other_local_hash_has_values, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager )
2017-04-26 21:58:12 +00:00
other_local_media_has_values = ClientMedia.MediaSingleton( media_result )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
local_hash_empty = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( local_hash_empty, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager )
2017-04-26 21:58:12 +00:00
local_media_empty = ClientMedia.MediaSingleton( media_result )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
trashed_hash_empty = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( trashed_hash_empty, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager )
2017-04-26 21:58:12 +00:00
trashed_media_empty = ClientMedia.MediaSingleton( media_result )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
deleted_hash_empty = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( deleted_hash_empty, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager )
2017-04-26 21:58:12 +00:00
deleted_media_empty = ClientMedia.MediaSingleton( media_result )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
one_hash = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( one_hash, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager )
2017-04-26 21:58:12 +00:00
one_media = ClientMedia.MediaSingleton( media_result )
2017-05-31 21:50:53 +00:00
#
2017-04-26 21:58:12 +00:00
two_hash = HydrusData.GenerateKey()
2017-05-31 21:50:53 +00:00
file_info_manager = ClientMedia.FileInfoManager( two_hash, size, mime, width, height, duration, num_frames, num_words )
media_result = ClientMedia.MediaResult( file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager )
2017-04-26 21:58:12 +00:00
two_media = ClientMedia.MediaSingleton( media_result )
#
self._dump_and_load_and_test( duplicate_action_options_delete_and_move, test )
self._dump_and_load_and_test( duplicate_action_options_copy, test )
self._dump_and_load_and_test( duplicate_action_options_merge, test )
#
def assertSCUEqual( one, two ):
self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable( one ), TC.ConvertServiceKeysToContentUpdatesToComparable( two ) )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty )
scu = {}
scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { local_hash_empty } ) ]
assertSCUEqual( result[0], scu )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty )
scu = {}
scu[ CC.TRASH_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { trashed_hash_empty } ) ]
assertSCUEqual( result[0], scu )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty )
self.assertEqual( result, [] )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values )
scu = {}
scu[ CC.LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
assertSCUEqual( result[0], scu )
scu = {}
scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values } ) ]
assertSCUEqual( result[1], scu )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values )
scu = {}
scu[ CC.LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
assertSCUEqual( result[0], scu )
scu = {}
scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values } ) ]
assertSCUEqual( result[1], scu )
#
#
result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty )
self.assertEqual( result, [] )
#
result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values )
scu = {}
scu[ CC.LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
assertSCUEqual( result[0], scu )
#
#
result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty )
scu = {}
scu[ CC.LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
assertSCUEqual( result[0], scu )
#
result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values )
scu = {}
scu[ CC.LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
assertSCUEqual( result[0], scu )
#
result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media )
scu = {}
scu[ CC.LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'one', { two_hash } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'two', { one_hash } ) ) ]
assertSCUEqual( result[0], scu )
2017-04-12 21:46:46 +00:00
def test_SERIALISABLE_TYPE_SHORTCUT( self ):
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
def test( obj, dupe_obj ):
self.assertEqual( dupe_obj.__hash__(), ( dupe_obj._shortcut_type, dupe_obj._shortcut_key, tuple( dupe_obj._modifiers ) ).__hash__() )
self.assertEqual( obj, dupe_obj )
shortcuts = []
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
shortcuts.append( ( ClientData.Shortcut(), 'f7' ) )
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, wx.WXK_SPACE, [] ), 'space' ) )
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, ord( 'a' ), [ CC.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) )
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, ord( 'A' ), [ CC.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) )
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, wx.WXK_HOME, [ CC.SHORTCUT_MODIFIER_ALT, CC.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+alt+home' ) )
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_MOUSE, CC.SHORTCUT_MOUSE_LEFT, [] ), 'left-click' ) )
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_MOUSE, CC.SHORTCUT_MOUSE_MIDDLE, [ CC.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+middle-click' ) )
shortcuts.append( ( ClientData.Shortcut( CC.SHORTCUT_TYPE_MOUSE, CC.SHORTCUT_MOUSE_SCROLL_DOWN, [ CC.SHORTCUT_MODIFIER_ALT, CC.SHORTCUT_MODIFIER_SHIFT ] ), 'alt+shift+scroll down' ) )
for ( shortcut, s ) in shortcuts:
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
self._dump_and_load_and_test( shortcut, test )
self.assertEqual( shortcut.ToString(), s )
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
def test_SERIALISABLE_TYPE_SHORTCUTS( self ):
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
def test( obj, dupe_obj ):
for ( shortcut, command ) in obj:
self.assertEqual( dupe_obj.GetCommand( shortcut ).GetData(), command.GetData() )
default_shortcuts = ClientDefaults.GetDefaultShortcuts()
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
for shortcuts in default_shortcuts:
2017-03-15 20:13:04 +00:00
2017-04-12 21:46:46 +00:00
self._dump_and_load_and_test( shortcuts, test )
2017-03-15 20:13:04 +00:00
2017-03-08 23:23:12 +00:00
2017-04-19 20:58:30 +00:00
command_1 = ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_SIMPLE, 'archive_file' )
2017-04-12 21:46:46 +00:00
command_2 = ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) )
command_3 = ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) )
k_shortcut_1 = ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, wx.WXK_SPACE, [] )
k_shortcut_2 = ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, ord( 'a' ), [ CC.SHORTCUT_MODIFIER_CTRL ] )
k_shortcut_3 = ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, ord( 'A' ), [ CC.SHORTCUT_MODIFIER_CTRL ] )
k_shortcut_4 = ClientData.Shortcut( CC.SHORTCUT_TYPE_KEYBOARD, wx.WXK_HOME, [ CC.SHORTCUT_MODIFIER_ALT, CC.SHORTCUT_MODIFIER_CTRL ] )
m_shortcut_1 = ClientData.Shortcut( CC.SHORTCUT_TYPE_MOUSE, CC.SHORTCUT_MOUSE_LEFT, [] )
m_shortcut_2 = ClientData.Shortcut( CC.SHORTCUT_TYPE_MOUSE, CC.SHORTCUT_MOUSE_MIDDLE, [ CC.SHORTCUT_MODIFIER_CTRL ] )
m_shortcut_3 = ClientData.Shortcut( CC.SHORTCUT_TYPE_MOUSE, CC.SHORTCUT_MOUSE_SCROLL_DOWN, [ CC.SHORTCUT_MODIFIER_ALT, CC.SHORTCUT_MODIFIER_SHIFT ] )
shortcuts = ClientData.Shortcuts( 'test' )
shortcuts.SetCommand( k_shortcut_1, command_1 )
shortcuts.SetCommand( k_shortcut_2, command_2 )
shortcuts.SetCommand( k_shortcut_3, command_2 )
shortcuts.SetCommand( k_shortcut_4, command_3 )
shortcuts.SetCommand( m_shortcut_1, command_1 )
shortcuts.SetCommand( m_shortcut_2, command_2 )
shortcuts.SetCommand( m_shortcut_3, command_3 )
self._dump_and_load_and_test( shortcuts, test )
self.assertEqual( shortcuts.GetCommand( k_shortcut_1 ).GetData(), command_1.GetData() )
shortcuts.SetCommand( k_shortcut_1, command_3 )
self.assertEqual( shortcuts.GetCommand( k_shortcut_1 ).GetData(), command_3.GetData() )
2017-03-29 19:39:34 +00:00
def test_SERIALISABLE_TYPE_SUBSCRIPTION( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj.GetName(), dupe_obj.GetName() )
self.assertEqual( obj._gallery_identifier, dupe_obj._gallery_identifier )
self.assertEqual( obj._gallery_stream_identifiers, dupe_obj._gallery_stream_identifiers )
self.assertEqual( obj._query, dupe_obj._query )
self.assertEqual( obj._period, dupe_obj._period )
self.assertEqual( obj._get_tags_if_url_known_and_file_redundant, dupe_obj._get_tags_if_url_known_and_file_redundant )
self.assertEqual( obj._initial_file_limit, dupe_obj._initial_file_limit )
self.assertEqual( obj._periodic_file_limit, dupe_obj._periodic_file_limit )
self.assertEqual( obj._paused, dupe_obj._paused )
2017-09-13 20:50:41 +00:00
self.assertEqual( obj._file_import_options.GetSerialisableTuple(), dupe_obj._file_import_options.GetSerialisableTuple() )
2017-03-29 19:39:34 +00:00
self.assertEqual( obj._import_tag_options.GetSerialisableTuple(), dupe_obj._import_tag_options.GetSerialisableTuple() )
self.assertEqual( obj._last_checked, dupe_obj._last_checked )
self.assertEqual( obj._last_error, dupe_obj._last_error )
self.assertEqual( obj._check_now, dupe_obj._check_now )
self.assertEqual( obj._seed_cache.GetSerialisableTuple(), dupe_obj._seed_cache.GetSerialisableTuple() )
sub = ClientImporting.Subscription( 'test sub' )
self._dump_and_load_and_test( sub, test )
gallery_identifier = ClientDownloading.GalleryIdentifier( HC.SITE_TYPE_BOORU, 'gelbooru' )
gallery_stream_identifiers = ClientDownloading.GetGalleryStreamIdentifiers( gallery_identifier )
query = 'test query'
period = 86400 * 7
get_tags_if_url_known_and_file_redundant = True
initial_file_limit = 100
periodic_file_limit = 50
paused = False
2017-09-13 20:50:41 +00:00
file_import_options = ClientImporting.FileImportOptions( automatic_archive = False, exclude_deleted = True, min_size = 8 * 1024, min_resolution = [ 25, 25 ] )
2017-03-29 19:39:34 +00:00
import_tag_options = ClientData.ImportTagOptions( service_keys_to_namespaces = { HydrusData.GenerateKey() : { 'series', '' } }, service_keys_to_explicit_tags = { HydrusData.GenerateKey() : { 'test explicit tag', 'and another' } } )
last_checked = HydrusData.GetNow() - 3600
last_error = HydrusData.GetNow() - 86400 * 20
check_now = False
seed_cache = ClientImporting.SeedCache()
2017-05-24 20:28:24 +00:00
seed_cache.AddSeeds( [ 'http://exampleurl.com/image/123456' ] )
2017-03-29 19:39:34 +00:00
2017-09-13 20:50:41 +00:00
sub.SetTuple( gallery_identifier, gallery_stream_identifiers, query, period, get_tags_if_url_known_and_file_redundant, initial_file_limit, periodic_file_limit, paused, file_import_options, import_tag_options, last_checked, last_error, check_now, seed_cache )
2017-03-29 19:39:34 +00:00
self.assertEqual( sub.GetGalleryIdentifier(), gallery_identifier )
self.assertEqual( sub.GetImportTagOptions(), import_tag_options )
self.assertEqual( sub.GetQuery(), query )
self.assertEqual( sub.GetSeedCache(), seed_cache )
self.assertEqual( sub._paused, False )
sub.PauseResume()
self.assertEqual( sub._paused, True )
sub.PauseResume()
self.assertEqual( sub._paused, False )
self._dump_and_load_and_test( sub, test )
2017-05-24 20:28:24 +00:00
def test_SERIALISABLE_TYPE_TAG_CENSOR( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj._tag_slices_to_rules, dupe_obj._tag_slices_to_rules )
tags = set()
tags.add( 'title:test title' )
tags.add( 'series:neon genesis evangelion' )
tags.add( 'series:kill la kill' )
tags.add( 'smile' )
tags.add( 'blue eyes' )
#
tag_censor = ClientData.TagCensor()
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( '', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), set() )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( '', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'series:', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( '', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'series:kill la kill', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'series:kill la kill' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( '', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'smile', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile', 'blue eyes' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'series:', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile', 'blue eyes', 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( ':', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'series:kill la kill', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile', 'blue eyes', 'series:kill la kill' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( 'series:', CC.CENSOR_BLACKLIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile', 'blue eyes', 'title:test title' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( 'series:', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'series:neon genesis evangelion', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( '', CC.CENSOR_BLACKLIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_censor = ClientData.TagCensor()
tag_censor.SetRule( '', CC.CENSOR_BLACKLIST )
tag_censor.SetRule( 'blue eyes', CC.CENSOR_WHITELIST )
self._dump_and_load_and_test( tag_censor, test )
self.assertEqual( tag_censor.Censor( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill', 'blue eyes' } )