Version 367

This commit is contained in:
Hydrus Network Developer 2019-09-11 16:51:09 -05:00
parent e8c789f4c5
commit 611e7bbb0d
27 changed files with 1429 additions and 235 deletions

View File

@ -45,10 +45,12 @@ try:
argparser = argparse.ArgumentParser( description = 'hydrus network client (console)' )
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' )
argparser.add_argument( '--no_wal', action='store_true', help = 'run without WAL db journalling' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run the db entirely in memory' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_memory_journalling', action='store_true', help = 'run db journalling entirely in memory (DANGEROUS)' )
argparser.add_argument( '--db_synchronous_override', help = 'override SQLite Synchronous PRAGMA (range 0-3, default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
result = argparser.parse_args()
@ -89,6 +91,25 @@ try:
HG.no_daemons = result.no_daemons
HG.no_wal = result.no_wal
HG.db_memory_journalling = result.db_memory_journalling
if result.db_synchronous_override is not None:
try:
db_synchronous_override = int( result.db_synchronous_override )
except ValueError:
raise Exception( 'db_synchronous_override must be an integer in the range 0-3' )
if db_synchronous_override not in range( 4 ):
raise Exception( 'db_synchronous_override must be in the range 0-3' )
HG.no_db_temp_files = result.no_db_temp_files
if result.temp_dir is not None:

View File

@ -45,10 +45,12 @@ try:
argparser = argparse.ArgumentParser( description = 'hydrus network client (windowed)' )
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' )
argparser.add_argument( '--no_wal', action='store_true', help = 'run without WAL db journalling' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run the db entirely in memory' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_memory_journalling', action='store_true', help = 'run db journalling entirely in memory (DANGEROUS)' )
argparser.add_argument( '--db_synchronous_override', help = 'override SQLite Synchronous PRAGMA (range 0-3, default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
result = argparser.parse_args()
@ -89,6 +91,25 @@ try:
HG.no_daemons = result.no_daemons
HG.no_wal = result.no_wal
HG.db_memory_journalling = result.db_memory_journalling
if result.db_synchronous_override is not None:
try:
db_synchronous_override = int( result.db_synchronous_override )
except ValueError:
raise Exception( 'db_synchronous_override must be an integer in the range 0-3' )
if db_synchronous_override not in range( 4 ):
raise Exception( 'db_synchronous_override must be in the range 0-3' )
HG.no_db_temp_files = result.no_db_temp_files
if result.temp_dir is not None:

View File

@ -8,6 +8,34 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 367</h3></li>
<ul>
<li>tag migration:</li>
<li>added htpa and tag service sources for parents/siblings migration that support filtering for the left and right tag of each pair</li>
<li>added htpa and tag service destinations for parents/siblings migration</li>
<li>added unit tests for all parent/siblings migration scenarios</li>
<li>misc improvements to mappings migration code</li>
<li>reworded some of the tooltip/tag filter message text to more clearly explain how the filter applies to migrations</li>
<li>the tag filter edit panel now has a 'test' area where you can put in an example tag to see if it passes or is blocked by the current filter</li>
<li>.</li>
<li>the rest:</li>
<li>fixed an issue with auto-no-ing yes/no dialogs throwing errors on exit. I am sorry for the inconvenience!</li>
<li>thumbnails now show the 'has audio' string on their thumbnails</li>
<li>'sort by file: has audio' added!</li>
<li>icons drawn on thumbnails are now adjusted to sit inside the border</li>
<li>added increment/decrement numerical ratings actions for media shortcuts! if a file hit by this action has no rating, it will initialise with 0/1 stars or max stars. please forgive the ugly expanding ui in the shortcuts panel here--I'll rewrite this to layout more dynamically in future</li>
<li>client repository services now track whether they are 'caught up' to their repos, which for now means processed up until at least two weeks ago, and will prohibit uploading new content until the client is caught up</li>
<li>repository review services panels will now display the 'caught up' status below the 'processed' progress gauge</li>
<li>repository review services panels will no longer duplicate 'account' status problems in the 'this client's network use' status line--both lines now refer to service/account functionality separately</li>
<li>repositories will now put in 'unknown error' when an empty error reason slips through the 'no requests until x time' reporting process</li>
<li>the new thumbnail and media viewer right-click menus now collapse the selection info lines at the top to just the top line and places all the rest (and in complicated file domain situations, this can be a long list) in a submenu off that line</li>
<li>the new thumbnail 'remove' submenu has separators after 'selected' and 'all' to reduce misclicks</li>
<li>reworded a couple of things in the manage shortcuts panel to be more clear</li>
<li>added wildcard support ('eva*lion') and namespace wildcards (like 'character:*') to the advanced OR text input parsing</li>
<li>fixed a rare issue with the duplicate filter being unable to go back or retreat from an interstitial confirm/forget/cancel dialog when every pair in the current batch cannot be displayed (such as if at least one of the pair has been physically deleted). the filter now catches this situation, informs the user, and closes itself gracefully</li>
<li>added two extremely advanced and dangerous launch parameters for database access testing</li>
<li>couple of misc fixes and cleanup</li>
</ul>
<li><h3>version 366</h3></li>
<ul>
<li>tag migration:</li>

View File

@ -373,6 +373,7 @@ SORT_FILES_BY_NUM_TAGS = 9
SORT_FILES_BY_MEDIA_VIEWS = 10
SORT_FILES_BY_MEDIA_VIEWTIME = 11
SORT_FILES_BY_APPROX_BITRATE = 12
SORT_FILES_BY_HAS_AUDIO = 13
SORT_ASC = 0
SORT_DESC = 1
@ -392,6 +393,7 @@ SORT_CHOICES.append( ( 'system', SORT_FILES_BY_NUM_TAGS ) )
SORT_CHOICES.append( ( 'system', SORT_FILES_BY_MEDIA_VIEWS ) )
SORT_CHOICES.append( ( 'system', SORT_FILES_BY_MEDIA_VIEWTIME ) )
SORT_CHOICES.append( ( 'system', SORT_FILES_BY_APPROX_BITRATE ) )
SORT_CHOICES.append( ( 'system', SORT_FILES_BY_HAS_AUDIO ) )
STATUS_UNKNOWN = 0
STATUS_SUCCESSFUL_AND_NEW = 1

View File

@ -7188,6 +7188,19 @@ class DB( HydrusDB.HydrusDB ):
return needed_hashes
def _GetRepositoryUpdateHashesUnprocessed( self, service_key ):
service_id = self._GetServiceId( service_key )
repository_updates_table_name = GenerateRepositoryRepositoryUpdatesTableName( service_id )
unprocessed_hash_ids = self._STL( self._c.execute( 'SELECT hash_id FROM {} WHERE processed = ?;'.format( repository_updates_table_name ), ( False, ) ) )
hashes = self._GetHashes( unprocessed_hash_ids )
return hashes
def _GetService( self, service_id ):
if service_id in self._service_cache:
@ -8433,6 +8446,49 @@ class DB( HydrusDB.HydrusDB ):
return data
def _MigrationGetPairs( self, database_temp_job_name, left_tag_filter, right_tag_filter ):
time_started_precise = HydrusData.GetNowPrecise()
data = []
we_should_stop = False
while not we_should_stop:
result = self._c.execute( 'SELECT left_tag_id, right_tag_id FROM {};'.format( database_temp_job_name ) ).fetchone()
if result is None:
break
( left_tag_id, right_tag_id ) = result
self._c.execute( 'DELETE FROM {} WHERE left_tag_id = ? AND right_tag_id = ?;'.format( database_temp_job_name ), ( left_tag_id, right_tag_id ) )
left_tag = self._GetTag( left_tag_id )
if not left_tag_filter.TagOK( left_tag ):
continue
right_tag = self._GetTag( right_tag_id )
if not right_tag_filter.TagOK( right_tag ):
continue
data.append( ( left_tag, right_tag ) )
we_should_stop = len( data ) >= 256 or ( len( data ) > 0 and HydrusData.TimeHasPassedPrecise( time_started_precise + 1.0 ) )
return data
def _MigrationStartMappingsJob( self, database_temp_job_name, file_service_key, tag_service_key, hashes, content_statuses ):
self._c.execute( 'CREATE TABLE durable_temp.{} ( hash_id INTEGER PRIMARY KEY );'.format( database_temp_job_name ) )
@ -8489,11 +8545,36 @@ class DB( HydrusDB.HydrusDB ):
for select_subquery in select_subqueries:
self._c.execute( 'INSERT INTO {} ( hash_id ) {};'.format( database_temp_job_name, select_subquery ) )
self._c.execute( 'INSERT OR IGNORE INTO {} ( hash_id ) {};'.format( database_temp_job_name, select_subquery ) )
def _MigrationStartPairsJob( self, database_temp_job_name, tag_service_key, content_type, content_statuses ):
self._c.execute( 'CREATE TABLE durable_temp.{} ( left_tag_id INTEGER, right_tag_id INTEGER, PRIMARY KEY ( left_tag_id, right_tag_id ) );'.format( database_temp_job_name ) )
tag_service_id = self._GetServiceId( tag_service_key )
if content_type == HC.CONTENT_TYPE_TAG_PARENTS:
source_table_names = [ 'tag_parents', 'tag_parent_petitions' ]
left_column_name = 'child_tag_id'
right_column_name = 'parent_tag_id'
elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
source_table_names = [ 'tag_siblings', 'tag_sibling_petitions' ]
left_column_name = 'bad_tag_id'
right_column_name = 'good_tag_id'
for source_table_name in source_table_names:
self._c.execute( 'INSERT OR IGNORE INTO {} ( left_tag_id, right_tag_id ) SELECT {}, {} FROM {} WHERE service_id = ? AND status IN {};'.format( database_temp_job_name, left_column_name, right_column_name, source_table_name, HydrusData.SplayListForDB( content_statuses ) ), ( tag_service_id, ) )
def _NamespaceExists( self, namespace ):
if namespace == '':
@ -9428,7 +9509,7 @@ class DB( HydrusDB.HydrusDB ):
notify_new_siblings = False
notify_new_force_refresh_tags = False
for ( service_key, content_updates ) in list(service_keys_to_content_updates.items()):
for ( service_key, content_updates ) in service_keys_to_content_updates.items():
try:
@ -9923,8 +10004,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', ( ratings_added, service_id, HC.SERVICE_INFO_NUM_FILES ) )
# and then do a thing here where it looks up remote services links and then pends/rescinds pends appropriately
elif action == HC.CONTENT_UPDATE_ADVANCED:
@ -10412,6 +10491,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'media_results': result = self._GetMediaResultsFromHashes( *args, **kwargs )
elif action == 'media_results_from_ids': result = self._GetMediaResults( *args, **kwargs )
elif action == 'migration_get_mappings': result = self._MigrationGetMappings( *args, **kwargs )
elif action == 'migration_get_pairs': result = self._MigrationGetPairs( *args, **kwargs )
elif action == 'missing_repository_update_hashes': result = self._GetRepositoryUpdateHashesIDoNotHave( *args, **kwargs )
elif action == 'missing_thumbnail_hashes': result = self._GetRepositoryThumbnailHashesIDoNotHave( *args, **kwargs )
elif action == 'nums_pending': result = self._GetNumsPending( *args, **kwargs )
@ -10421,6 +10501,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'random_potential_duplicate_hashes': result = self._DuplicatesGetRandomPotentialDuplicateHashes( *args, **kwargs )
elif action == 'recent_tags': result = self._GetRecentTags( *args, **kwargs )
elif action == 'repository_progress': result = self._GetRepositoryProgress( *args, **kwargs )
elif action == 'repository_unprocessed_hashes': result = self._GetRepositoryUpdateHashesUnprocessed( *args, **kwargs )
elif action == 'repository_update_hashes_to_process': result = self._GetRepositoryUpdateHashesICanProcess( *args, **kwargs )
elif action == 'serialisable': result = self._GetJSONDump( *args, **kwargs )
elif action == 'serialisable_simple': result = self._GetJSONSimple( *args, **kwargs )
@ -13547,6 +13628,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'maintain_similar_files_tree': self._PHashesMaintainTree( *args, **kwargs )
elif action == 'migration_clear_job': self._MigrationClearJob( *args, **kwargs )
elif action == 'migration_start_mappings_job': self._MigrationStartMappingsJob( *args, **kwargs )
elif action == 'migration_start_pairs_job': self._MigrationStartPairsJob( *args, **kwargs )
elif action == 'process_repository_content': result = self._ProcessRepositoryContent( *args, **kwargs )
elif action == 'process_repository_definitions': result = self._ProcessRepositoryDefinitions( *args, **kwargs )
elif action == 'push_recent_tags': self._PushRecentTags( *args, **kwargs )

View File

@ -4091,6 +4091,14 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
service.CheckFunctional()
if isinstance( service, ClientServices.ServiceRepository ):
if not service.IsMostlyCaughtUp():
raise Exception( 'Repository processing is not caught up--please process more before you upload new content.' )
except Exception as e:
wx.MessageBox( 'Unfortunately, there is a problem with starting the upload: ' + str( e ) )

View File

@ -2526,12 +2526,22 @@ class CanvasPanel( Canvas ):
menu = wx.Menu()
for line in self._current_media.GetPrettyInfoLines():
#
info_lines = self._current_media.GetPrettyInfoLines()
top_line = info_lines.pop(0)
info_menu = wx.Menu()
for line in info_lines:
ClientGUIMenus.AppendMenuLabel( menu, line, line )
ClientGUIMenus.AppendMenuLabel( info_menu, line, line )
ClientGUIMedia.AddFileViewingStatsMenu( menu, self._current_media )
ClientGUIMedia.AddFileViewingStatsMenu( info_menu, self._current_media )
ClientGUIMenus.AppendMenu( menu, info_menu, top_line )
#
@ -3420,12 +3430,12 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
def _GetNumCommittableDecisions( self ):
return len( [ 1 for ( hash_pair, duplicate_type, first_media, second_media, service_keys_to_content_updates, was_auto_skipped ) in self._processed_pairs if duplicate_type is not None ] )
return len( [ 1 for ( hash_pair, duplicate_type, first_media, second_media, service_keys_to_content_updates, was_auto_skipped ) in self._processed_pairs if duplicate_type is not None and not was_auto_skipped ] )
def _GoBack( self ):
if len( self._processed_pairs ) > 0:
if len( self._processed_pairs ) > 0 and self._GetNumCommittableDecisions() > 0:
self._unprocessed_pairs.append( self._current_pair )
@ -3435,6 +3445,15 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
while was_auto_skipped:
if len( self._processed_pairs ) == 0:
wx.MessageBox( 'Due to an unexpected series of events (likely a series of file deletes), the duplicate filter has no valid pair to back up to. It will now close.' )
self._Close()
return
( hash_pair, duplicate_type, first_media, second_media, service_keys_to_content_updates, was_auto_skipped ) = self._processed_pairs.pop()
self._unprocessed_pairs.append( hash_pair )
@ -3559,6 +3578,15 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
while was_auto_skipped:
if len( self._processed_pairs ) == 0:
wx.MessageBox( 'Due to an unexpected series of events (likely a series of file deletes), the duplicate filter has no valid pair to back up to. It will now close.' )
self._Close()
return
( hash_pair, duplicate_type, first_media, second_media, service_keys_to_content_updates, was_auto_skipped ) = self._processed_pairs.pop()
self._unprocessed_pairs.append( hash_pair )
@ -3621,7 +3649,7 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
if len( self._processed_pairs ) == 0:
wx.MessageBox( 'It seems an entire batch of pairs were unable to be displayed. The duplicate filter will now close. Please inform hydrus dev of this.' )
wx.MessageBox( 'It seems an entire batch of pairs were unable to be displayed. The duplicate filter will now close.' )
self._Close()
@ -4909,12 +4937,24 @@ class CanvasMediaListBrowser( CanvasMediaListNavigable ):
menu = wx.Menu()
for line in self._current_media.GetPrettyInfoLines():
#
info_lines = self._current_media.GetPrettyInfoLines()
top_line = info_lines.pop(0)
info_menu = wx.Menu()
for line in info_lines:
ClientGUIMenus.AppendMenuLabel( menu, line, line )
ClientGUIMenus.AppendMenuLabel( info_menu, line, line )
ClientGUIMedia.AddFileViewingStatsMenu( menu, self._current_media )
ClientGUIMedia.AddFileViewingStatsMenu( info_menu, self._current_media )
ClientGUIMenus.AppendMenu( menu, info_menu, top_line )
#
ClientGUIMenus.AppendSeparator( menu )

View File

@ -511,8 +511,6 @@ class DialogManageUPnP( ClientGUIDialogs.Dialog ):
external_ip_text = 'Error finding external IP: ' + str( e )
return
wx.CallAfter( wx_code, external_ip_text )

View File

@ -74,7 +74,7 @@ def GetYesNo( win, message, title = 'Are you sure?', yes_label = 'yes', no_label
job = HG.client_controller.CallLaterWXSafe( dlg, auto_yes_time, dlg.EndModal, wx.ID_YES )
elif auto_no_time is None:
elif auto_no_time is not None:
job = HG.client_controller.CallLaterWXSafe( dlg, auto_no_time, dlg.EndModal, wx.ID_NO )

View File

@ -1,3 +1,4 @@
import collections
from . import HydrusConstants as HC
from . import HydrusData
from . import HydrusExceptions
@ -27,7 +28,7 @@ def ApplyContentApplicationCommandToMedia( parent, command, media ):
for m in media:
hashes.update( m.GetHashes() )
hashes.add( m.GetHash() )
if service_type in HC.TAG_SERVICES:
@ -167,48 +168,110 @@ def ApplyContentApplicationCommandToMedia( parent, command, media ):
elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ):
rating = value
can_set = False
can_unset = False
for m in media:
if action in ( HC.CONTENT_UPDATE_SET, HC.CONTENT_UPDATE_FLIP ):
ratings_manager = m.GetRatingsManager()
rating = value
current_rating = ratings_manager.GetRating( service_key )
can_set = False
can_unset = False
if current_rating == rating and action == HC.CONTENT_UPDATE_FLIP:
for m in media:
can_unset = True
ratings_manager = m.GetRatingsManager()
current_rating = ratings_manager.GetRating( service_key )
if current_rating == rating and action == HC.CONTENT_UPDATE_FLIP:
can_unset = True
else:
can_set = True
if can_set:
row = ( rating, hashes )
elif can_unset:
row = ( None, hashes )
else:
can_set = True
return True
if can_set:
content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, row ) ]
row = ( rating, hashes )
elif action in ( HC.CONTENT_UPDATE_INCREMENT, HC.CONTENT_UPDATE_DECREMENT ):
elif can_unset:
if service_type == HC.LOCAL_RATING_NUMERICAL:
if action == HC.CONTENT_UPDATE_INCREMENT:
direction = 1
initialisation_rating = 0.0
elif action == HC.CONTENT_UPDATE_DECREMENT:
direction = -1
initialisation_rating = 1.0
num_stars = service.GetNumStars()
if service.AllowZero():
num_stars += 1
one_star_value = 1.0 / ( num_stars - 1 )
ratings_to_hashes = collections.defaultdict( set )
for m in media:
ratings_manager = m.GetRatingsManager()
current_rating = ratings_manager.GetRating( service_key )
if current_rating is None:
new_rating = initialisation_rating
else:
new_rating = current_rating + ( one_star_value * direction )
new_rating = max( min( new_rating, 1.0 ), 0.0 )
if current_rating != new_rating:
ratings_to_hashes[ new_rating ].add( m.GetHash() )
content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( rating, hashes ) ) for ( rating, hashes ) in ratings_to_hashes.items() ]
else:
return True
row = ( None, hashes )
else:
return True
content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, row ) ]
else:
return False
HG.client_controller.Write( 'content_updates', { service_key : content_updates } )
if len( content_updates ) > 0:
HG.client_controller.Write( 'content_updates', { service_key : content_updates } )
return True

View File

@ -493,9 +493,16 @@ def AddRemoveMenu( win, menu, num_files, num_selected, num_inbox, num_archive ):
if do_files:
ClientGUIMenus.AppendSeparator( remove_menu )
ClientGUIMenus.AppendMenuItem( win, remove_menu, 'all ({})'.format( HydrusData.ToHumanInt( num_files ) ), 'Remove all the files from the current view.', win._Remove, 'all' )
if do_archive_and_inbox or do_not_selected:
ClientGUIMenus.AppendSeparator( remove_menu )
if do_archive_and_inbox:
ClientGUIMenus.AppendMenuItem( win, remove_menu, 'inbox ({})'.format( HydrusData.ToHumanInt( num_inbox ) ), 'Remove all the inbox files from the current view.', win._Remove, 'inbox' )
@ -3878,91 +3885,111 @@ class MediaPanelThumbnails( MediaPanel ):
# do the actual menu
selection_info_menu = wx.Menu()
if multiple_selected:
ClientGUIMenus.AppendMenuLabel( menu, HydrusData.ToHumanInt( num_selected ) + ' files, ' + self._GetPrettyTotalSize( only_selected = True ) )
selection_info_menu_label = '{} files, {}'.format( HydrusData.ToHumanInt( num_selected ), self._GetPrettyTotalSize( only_selected = True ) )
else:
for line in self._focused_media.GetPrettyInfoLines():
pretty_info_lines = self._focused_media.GetPrettyInfoLines()
top_line = pretty_info_lines.pop( 0 )
selection_info_menu_label = top_line
for line in pretty_info_lines:
ClientGUIMenus.AppendMenuLabel( menu, line, line )
ClientGUIMenus.AppendMenuLabel( selection_info_menu, line, line )
if len( self._selected_media ) == 1:
AddFileViewingStatsMenu( menu, self._focused_media )
AddFileViewingStatsMenu( selection_info_menu, self._focused_media )
if len( disparate_current_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_current_file_service_keys, 'some uploaded to' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_current_file_service_keys, 'some uploaded to' )
if multiple_selected and len( common_current_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_current_file_service_keys, 'selected uploaded to' )
AddServiceKeyLabelsToMenu( selection_info_menu, common_current_file_service_keys, 'selected uploaded to' )
if len( disparate_pending_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_pending_file_service_keys, 'some pending to' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_pending_file_service_keys, 'some pending to' )
if len( common_pending_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_pending_file_service_keys, 'pending to' )
AddServiceKeyLabelsToMenu( selection_info_menu, common_pending_file_service_keys, 'pending to' )
if len( disparate_petitioned_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_petitioned_file_service_keys, 'some petitioned from' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_petitioned_file_service_keys, 'some petitioned from' )
if len( common_petitioned_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_petitioned_file_service_keys, 'petitioned from' )
AddServiceKeyLabelsToMenu( selection_info_menu, common_petitioned_file_service_keys, 'petitioned from' )
if len( disparate_deleted_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_deleted_file_service_keys, 'some deleted from' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_deleted_file_service_keys, 'some deleted from' )
if len( common_deleted_file_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_deleted_file_service_keys, 'deleted from' )
AddServiceKeyLabelsToMenu( selection_info_menu, common_deleted_file_service_keys, 'deleted from' )
if len( disparate_current_ipfs_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_current_ipfs_service_keys, 'some pinned to' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_current_ipfs_service_keys, 'some pinned to' )
if multiple_selected and len( common_current_ipfs_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_current_ipfs_service_keys, 'selected pinned to' )
AddServiceKeyLabelsToMenu( selection_info_menu, common_current_ipfs_service_keys, 'selected pinned to' )
if len( disparate_pending_ipfs_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_pending_ipfs_service_keys, 'some to be pinned to' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_pending_ipfs_service_keys, 'some to be pinned to' )
if len( common_pending_ipfs_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_pending_ipfs_service_keys, 'to be pinned to' )
AddServiceKeyLabelsToMenu( selection_info_menu, common_pending_ipfs_service_keys, 'to be pinned to' )
if len( disparate_petitioned_ipfs_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, disparate_petitioned_ipfs_service_keys, 'some to be unpinned from' )
AddServiceKeyLabelsToMenu( selection_info_menu, disparate_petitioned_ipfs_service_keys, 'some to be unpinned from' )
if len( common_petitioned_ipfs_service_keys ) > 0:
AddServiceKeyLabelsToMenu( menu, common_petitioned_ipfs_service_keys, unpin_phrase )
AddServiceKeyLabelsToMenu( selection_info_menu, common_petitioned_ipfs_service_keys, unpin_phrase )
if selection_info_menu.GetMenuItemCount() == 0:
selection_info_menu.Destroy()
ClientGUIMenus.AppendMenuLabel( menu, selection_info_menu_label )
else:
ClientGUIMenus.AppendMenu( menu, selection_info_menu, selection_info_menu_label )
@ -5007,12 +5034,12 @@ class Thumbnail( Selectable ):
gc.SetPen( wx.TRANSPARENT_PEN )
( text_x, text_y ) = gc.GetTextExtent( upper_summary )
( text_width, text_height ) = gc.GetTextExtent( upper_summary )
top_left_x = int( ( width - text_x ) // 2 )
top_left_x = int( ( width - text_width ) // 2 )
top_left_y = thumbnail_border
gc.DrawRectangle( thumbnail_border, top_left_y, width - ( thumbnail_border * 2 ), text_y + 1 )
gc.DrawRectangle( thumbnail_border, top_left_y, width - ( thumbnail_border * 2 ), text_height + 1 )
gc.DrawText( upper_summary, top_left_x, top_left_y )
@ -5029,12 +5056,12 @@ class Thumbnail( Selectable ):
gc.SetPen( wx.TRANSPARENT_PEN )
( text_x, text_y ) = gc.GetTextExtent( lower_summary )
( text_width, text_height ) = gc.GetTextExtent( lower_summary )
top_left_x = width - text_x - thumbnail_border
top_left_y = height - text_y - thumbnail_border
top_left_x = width - text_width - thumbnail_border
top_left_y = height - text_height - thumbnail_border
gc.DrawRectangle( top_left_x - 1, top_left_y - 1, text_x + 1, text_y + 1 )
gc.DrawRectangle( top_left_x - 1, top_left_y - 1, text_width + 1, text_height + 1 )
gc.DrawText( lower_summary, top_left_x, top_left_y )
@ -5113,20 +5140,16 @@ class Thumbnail( Selectable ):
if len( icons_to_draw ) > 0:
icon_x = 0
icon_x = -thumbnail_border
for icon in icons_to_draw:
dc.DrawBitmap( icon, width + icon_x - 18, 0 )
dc.DrawBitmap( icon, width + icon_x - 18, thumbnail_border )
icon_x -= 18
if self._dump_status == CC.DUMPER_DUMPED_OK: dc.DrawBitmap( CC.GlobalBMPs.dump_ok, width - 18, 18 )
elif self._dump_status == CC.DUMPER_RECOVERABLE_ERROR: dc.DrawBitmap( CC.GlobalBMPs.dump_recoverable, width - 18, 18 )
elif self._dump_status == CC.DUMPER_UNRECOVERABLE_ERROR: dc.DrawBitmap( CC.GlobalBMPs.dump_fail, width - 18, 18 )
if self.IsCollection():
dc.DrawBitmap( CC.GlobalBMPs.collection, 1, height - 17 )
@ -5135,7 +5158,7 @@ class Thumbnail( Selectable ):
dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) )
( text_x, text_y ) = dc.GetTextExtent( num_files_str )
( text_width, text_height ) = dc.GetTextExtent( num_files_str )
dc.SetBrush( wx.Brush( CC.COLOUR_UNSELECTED ) )
@ -5143,17 +5166,41 @@ class Thumbnail( Selectable ):
dc.SetPen( wx.TRANSPARENT_PEN )
dc.DrawRectangle( 17, height - text_y - 3, text_x + 2, text_y + 2 )
dc.DrawRectangle( 17, height - text_height - 3, text_width + 2, text_height + 2 )
dc.DrawText( num_files_str, 18, height - text_y - 2 )
dc.DrawText( num_files_str, 18, height - text_height - 2 )
# repo icons
# top left icons
top_left_x = 0
if self.HasAudio():
has_audio_string = new_options.GetString( 'has_audio_label' )
dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) )
( text_width, text_height ) = dc.GetTextExtent( has_audio_string )
dc.SetBrush( wx.Brush( CC.COLOUR_UNSELECTED ) )
dc.SetTextForeground( CC.COLOUR_SELECTED_DARK )
dc.SetPen( wx.TRANSPARENT_PEN )
box_x = thumbnail_border
box_y = thumbnail_border
dc.DrawRectangle( box_x, box_y, text_width + 2, text_height + 2 )
dc.DrawText( has_audio_string, box_x + 1, box_y + 1 )
top_left_x += text_height + 2
services_manager = HG.client_controller.services_manager
repo_icon_x = 0
current = locations_manager.GetCurrentRemote()
pending = locations_manager.GetPendingRemote()
petitioned = locations_manager.GetPetitionedRemote()
@ -5166,16 +5213,16 @@ class Thumbnail( Selectable ):
if HC.FILE_REPOSITORY in service_types:
dc.DrawBitmap( CC.GlobalBMPs.file_repository, repo_icon_x, 0 )
dc.DrawBitmap( CC.GlobalBMPs.file_repository, top_left_x, thumbnail_border )
repo_icon_x += 20
top_left_x += 20
if HC.IPFS in service_types:
dc.DrawBitmap( CC.GlobalBMPs.ipfs, repo_icon_x, 0 )
dc.DrawBitmap( CC.GlobalBMPs.ipfs, top_left_x, thumbnail_border )
repo_icon_x += 20
top_left_x += 20
#
@ -5184,16 +5231,16 @@ class Thumbnail( Selectable ):
if HC.FILE_REPOSITORY in service_types:
dc.DrawBitmap( CC.GlobalBMPs.file_repository_pending, repo_icon_x, 0 )
dc.DrawBitmap( CC.GlobalBMPs.file_repository_pending, top_left_x, thumbnail_border )
repo_icon_x += 20
top_left_x += 20
if HC.IPFS in service_types:
dc.DrawBitmap( CC.GlobalBMPs.ipfs_pending, repo_icon_x, 0 )
dc.DrawBitmap( CC.GlobalBMPs.ipfs_pending, top_left_x, thumbnail_border )
repo_icon_x += 20
top_left_x += 20
#
@ -5202,16 +5249,16 @@ class Thumbnail( Selectable ):
if HC.FILE_REPOSITORY in service_types:
dc.DrawBitmap( CC.GlobalBMPs.file_repository_petitioned, repo_icon_x, 0 )
dc.DrawBitmap( CC.GlobalBMPs.file_repository_petitioned, top_left_x, thumbnail_border )
repo_icon_x += 20
top_left_x += 20
if HC.IPFS in service_types:
dc.DrawBitmap( CC.GlobalBMPs.ipfs_petitioned, repo_icon_x, 0 )
dc.DrawBitmap( CC.GlobalBMPs.ipfs_petitioned, top_left_x, thumbnail_border )
repo_icon_x += 20
top_left_x += 20
return bmp

View File

@ -1106,6 +1106,7 @@ class ReviewServicePanel( wx.Panel ):
self._download_progress = ClientGUICommon.TextAndGauge( self )
self._processing_progress = ClientGUICommon.TextAndGauge( self )
self._is_mostly_caught_up_st = ClientGUICommon.BetterStaticText( self )
self._sync_now_button = ClientGUICommon.BetterButton( self, 'process now', self._SyncNow )
self._pause_play_button = ClientGUICommon.BetterButton( self, 'pause', self._PausePlay )
@ -1136,6 +1137,7 @@ class ReviewServicePanel( wx.Panel ):
self.Add( self._metadata_st, CC.FLAGS_EXPAND_PERPENDICULAR )
self.Add( self._download_progress, CC.FLAGS_EXPAND_PERPENDICULAR )
self.Add( self._processing_progress, CC.FLAGS_EXPAND_PERPENDICULAR )
self.Add( self._is_mostly_caught_up_st, CC.FLAGS_EXPAND_PERPENDICULAR )
self.Add( hbox, CC.FLAGS_BUTTON_SIZER )
HG.client_controller.sub( self, 'ServiceUpdated', 'service_updated' )
@ -1325,7 +1327,7 @@ class ReviewServicePanel( wx.Panel ):
def THREADFetchInfo( self, service ):
def wx_code( download_text, download_value, processing_text, processing_value, range ):
def wx_code( download_text, download_value, processing_text, processing_value, range, is_mostly_caught_up ):
if not self:
@ -1335,6 +1337,17 @@ class ReviewServicePanel( wx.Panel ):
self._download_progress.SetValue( download_text, download_value, range )
self._processing_progress.SetValue( processing_text, processing_value, range )
if is_mostly_caught_up:
caught_up_text = 'Client is caught up to service and can upload content.'
else:
caught_up_text = 'Still some processing to do until the client is caught up.'
self._is_mostly_caught_up_st.SetLabel( caught_up_text )
if processing_value == download_value:
self._sync_now_button.Disable()
@ -1376,11 +1389,13 @@ class ReviewServicePanel( wx.Panel ):
( download_value, processing_value, range ) = HG.client_controller.Read( 'repository_progress', service.GetServiceKey() )
is_mostly_caught_up = service.IsMostlyCaughtUp()
download_text = 'downloaded ' + HydrusData.ConvertValueRangeToPrettyString( download_value, range )
processing_text = 'processed ' + HydrusData.ConvertValueRangeToPrettyString( processing_value, range )
wx.CallAfter( wx_code, download_text, download_value, processing_text, processing_value, range )
wx.CallAfter( wx_code, download_text, download_value, processing_text, processing_value, range, is_mostly_caught_up )

View File

@ -221,9 +221,10 @@ class EditAdvancedORPredicates( ClientGUIScrolledPanels.EditPanel ):
self._current_predicates = []
output = ''
colour = ( 0, 0, 0 )
output = ''
if len( text ) > 0:
try:
@ -233,9 +234,6 @@ class EditAdvancedORPredicates( ClientGUIScrolledPanels.EditPanel ):
for s in result:
output += ' OR '.join( s )
output += os.linesep
row_preds = []
for tag_string in s:
@ -251,7 +249,23 @@ class EditAdvancedORPredicates( ClientGUIScrolledPanels.EditPanel ):
inclusive = True
row_pred = ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, tag_string, inclusive )
if '*' in tag_string:
( namespace, subtag ) = HydrusTags.SplitTag( tag_string )
if '*' not in namespace and subtag == '*':
row_pred = ClientSearch.Predicate( HC.PREDICATE_TYPE_NAMESPACE, namespace, inclusive )
else:
row_pred = ClientSearch.Predicate( HC.PREDICATE_TYPE_WILDCARD, tag_string, inclusive )
else:
row_pred = ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, tag_string, inclusive )
row_preds.append( row_pred )
@ -266,6 +280,7 @@ class EditAdvancedORPredicates( ClientGUIScrolledPanels.EditPanel ):
output = os.linesep.join( ( pred.ToString() for pred in self._current_predicates ) )
colour = ( 0, 128, 0 )
except ValueError:

View File

@ -4719,9 +4719,9 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
help_button = ClientGUICommon.BetterBitmapButton( self, CC.GlobalBMPs.help, self._ShowHelp )
help_button.SetToolTip( 'Show help regarding editing shortcuts.' )
reserved_panel = ClientGUICommon.StaticBox( self, 'reserved' )
reserved_panel = ClientGUICommon.StaticBox( self, 'built-in hydrus shortcut sets' )
self._reserved_shortcuts = ClientGUIListCtrl.SaneListCtrlForSingleObject( reserved_panel, 180, [ ( 'name', -1 ), ( 'size', 100 ) ], activation_callback = self._EditReserved )
self._reserved_shortcuts = ClientGUIListCtrl.SaneListCtrlForSingleObject( reserved_panel, 180, [ ( 'name', -1 ), ( 'number of shortcuts', 150 ) ], activation_callback = self._EditReserved )
self._reserved_shortcuts.SetMinSize( ( 320, 200 ) )
@ -4729,9 +4729,9 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
#
custom_panel = ClientGUICommon.StaticBox( self, 'custom' )
custom_panel = ClientGUICommon.StaticBox( self, 'custom user sets' )
self._custom_shortcuts = ClientGUIListCtrl.SaneListCtrlForSingleObject( custom_panel, 120, [ ( 'name', -1 ), ( 'size', 100 ) ], delete_key_callback = self._Delete, activation_callback = self._EditCustom )
self._custom_shortcuts = ClientGUIListCtrl.SaneListCtrlForSingleObject( custom_panel, 120, [ ( 'name', -1 ), ( 'number of shortcuts', 150 ) ], delete_key_callback = self._Delete, activation_callback = self._EditCustom )
self._add_button = ClientGUICommon.BetterButton( custom_panel, 'add', self._Add )
self._edit_custom_button = ClientGUICommon.BetterButton( custom_panel, 'edit', self._EditCustom )
@ -4907,13 +4907,13 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
message += os.linesep * 2
message += 'In hydrus, shortcuts are split into different sets that are active in different contexts. Depending on where the program focus is, multiple sets can be active at the same time. On a keyboard or mouse event, the active sets will be consulted one after another (typically from the smallest and most precise focus to the largest and broadest parent) until an action match is found.'
message += os.linesep * 2
message += 'There are two kinds--\'reserved\' and \'custom\':'
message += 'There are two kinds--ones built-in to hydrus, and custom sets that you turn on and off:'
message += os.linesep * 2
message += 'Reserved shortcuts are always active in their contexts--the \'main_gui\' one is always consulted when you hit a key on the main gui window, for instance. They have limited actions to choose from, appropriate to their context. If you would prefer to, say, open the manage tags dialog with Ctrl+F3, edit or add that entry in the \'media\' set and that new shortcut will apply anywhere you are focused on some particular media.'
message += 'The built-in shortcut sets are always active in their contexts--the \'main_gui\' one is always consulted when you hit a key on the main gui window, for instance. They have limited actions to choose from, appropriate to their context. If you would prefer to, say, open the manage tags dialog with Ctrl+F3, edit or add that entry in the \'media\' set and that new shortcut will apply anywhere you are focused on some particular media.'
message += os.linesep * 2
message += 'Custom shortcuts sets are those you can create and rename at will. They are only ever active in the media viewer window, and only when you set them so from the top hover-window\'s keyboard icon. They are primarily meant for setting tags and ratings with shortcuts, and are intended to be turned on and off as you perform different \'filtering\' jobs--for instance, you might like to set the 1-5 keys to the different values of a five-star rating system, or assign a few simple keystrokes to a number of common tags.'
message += os.linesep * 2
message += 'The reserved \'media\' set also supports tag and rating actions, if you would like some of those to always be active.'
message += 'The built-in \'media\' set also supports tag and rating actions, if you would like some of those to always be active.'
wx.MessageBox( message )
@ -5116,6 +5116,9 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, shortcut, command, shortcuts_name ):
# rewrite this to dynamic layout etc... after the switchover to qt
# get rid of the 'set command' button nonsense
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._final_command = 'simple'
@ -5131,7 +5134,7 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
#
self._none_panel = ClientGUICommon.StaticBox( self, 'simple actions' )
self._simple_action_panel = ClientGUICommon.StaticBox( self, 'simple actions' )
if shortcuts_name in CC.SHORTCUTS_RESERVED_NAMES:
@ -5146,9 +5149,9 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
choices.sort()
self._simple_actions = wx.Choice( self._none_panel, choices = choices )
self._simple_actions = wx.Choice( self._simple_action_panel, choices = choices )
self._set_simple = ClientGUICommon.BetterButton( self._none_panel, 'set command', self._SetSimple )
self._set_simple = ClientGUICommon.BetterButton( self._simple_action_panel, 'set command', self._SetSimple )
#
@ -5197,17 +5200,41 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
#
self._ratings_numerical_incdec_panel = ClientGUICommon.StaticBox( self, 'numerical ratings increment/decrement service actions' )
self._ratings_numerical_incdec_service_keys = wx.Choice( self._ratings_numerical_incdec_panel )
self._ratings_numerical_incdec = ClientGUICommon.BetterChoice( self._ratings_numerical_incdec_panel )
self._ratings_numerical_incdec.Append( HC.content_update_string_lookup[ HC.CONTENT_UPDATE_INCREMENT ], HC.CONTENT_UPDATE_INCREMENT )
self._ratings_numerical_incdec.Append( HC.content_update_string_lookup[ HC.CONTENT_UPDATE_DECREMENT ], HC.CONTENT_UPDATE_DECREMENT )
self._set_ratings_numerical_incdec = ClientGUICommon.BetterButton( self._ratings_numerical_incdec_panel, 'set command', self._SetRatingsNumericalIncDec )
#
services = HG.client_controller.services_manager.GetServices( ( HC.LOCAL_TAG, HC.TAG_REPOSITORY, HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) )
for service in services:
service_name = service.GetName()
service_key = service.GetServiceKey()
service_type = service.GetServiceType()
if service_type in HC.TAG_SERVICES: choice = self._tag_service_keys
elif service_type == HC.LOCAL_RATING_LIKE: choice = self._ratings_like_service_keys
elif service_type == HC.LOCAL_RATING_NUMERICAL: choice = self._ratings_numerical_service_keys
choice.Append( service.GetName(), service.GetServiceKey() )
if service_type in HC.TAG_SERVICES:
self._tag_service_keys.Append( service_name, service_key )
elif service_type == HC.LOCAL_RATING_LIKE:
self._ratings_like_service_keys.Append( service_name, service_key )
elif service_type == HC.LOCAL_RATING_NUMERICAL:
self._ratings_numerical_service_keys.Append( service_name, service_key )
self._ratings_numerical_incdec_service_keys.Append( service_name, service_key )
self._SetActions()
@ -5269,24 +5296,35 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
elif service_type == HC.LOCAL_RATING_NUMERICAL:
self._ratings_numerical_service_keys.SetStringSelection( service_name )
self._SetActions()
if value is None:
if action in ( HC.CONTENT_UPDATE_SET, HC.CONTENT_UPDATE_FLIP ):
self._ratings_numerical_remove.SetValue( True )
self._ratings_numerical_service_keys.SetStringSelection( service_name )
else:
self._SetActions()
num_stars = self._current_ratings_numerical_service.GetNumStars()
if value is None:
self._ratings_numerical_remove.SetValue( True )
else:
num_stars = self._current_ratings_numerical_service.GetNumStars()
slider_value = int( round( value * num_stars ) )
self._ratings_numerical_slider.SetValue( slider_value )
slider_value = int( round( value * num_stars ) )
self._SetRatingsNumerical()
self._ratings_numerical_slider.SetValue( slider_value )
elif action in ( HC.CONTENT_UPDATE_INCREMENT, HC.CONTENT_UPDATE_DECREMENT ):
self._ratings_numerical_incdec_service_keys.SetStringSelection( service_name )
self._ratings_numerical_incdec.SetValue( action )
self._SetRatingsNumericalIncDec()
self._SetRatingsNumerical()
if self._final_command is None:
@ -5304,7 +5342,7 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
none_hbox.Add( self._simple_actions, CC.FLAGS_EXPAND_DEPTH_ONLY )
none_hbox.Add( self._set_simple, CC.FLAGS_VCENTER )
self._none_panel.Add( none_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._simple_action_panel.Add( none_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
tag_sub_vbox = wx.BoxSizer( wx.VERTICAL )
@ -5338,6 +5376,14 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._ratings_numerical_panel.Add( ratings_numerical_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( self._ratings_numerical_incdec_service_keys, CC.FLAGS_EXPAND_DEPTH_ONLY )
hbox.Add( self._ratings_numerical_incdec, CC.FLAGS_VCENTER )
hbox.Add( self._set_ratings_numerical_incdec, CC.FLAGS_VCENTER )
self._ratings_numerical_incdec_panel.Add( hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._flip_or_set_action, CC.FLAGS_EXPAND_PERPENDICULAR )
self._content_panel.Add( self._tag_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
self._content_panel.Add( self._ratings_like_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
@ -5345,8 +5391,9 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._none_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._simple_action_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._content_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._ratings_numerical_incdec_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
is_custom_or_media = shortcuts_name not in CC.SHORTCUTS_RESERVED_NAMES or shortcuts_name == 'media'
@ -5384,11 +5431,15 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
return self._GetRatingsLike()
if self._final_command == 'ratings_numerical':
elif self._final_command == 'ratings_numerical':
return self._GetRatingsNumerical()
if self._final_command == 'tag':
elif self._final_command == 'ratings_numerical_incdec':
return self._GetRatingsNumericalIncDec()
elif self._final_command == 'tag':
return self._GetTag()
@ -5478,6 +5529,26 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
def _GetRatingsNumericalIncDec( self ):
selection = self._ratings_numerical_incdec_service_keys.GetSelection()
if selection != wx.NOT_FOUND:
service_key = self._ratings_numerical_incdec_service_keys.GetClientData( selection )
action = self._ratings_numerical_incdec.GetValue()
value = 1
return ClientData.ApplicationCommand( CC.APPLICATION_COMMAND_TYPE_CONTENT, ( service_key, HC.CONTENT_TYPE_RATINGS, action, value ) )
else:
raise HydrusExceptions.VetoException( 'Please select a rating service!' )
def _GetTag( self ):
selection = self._tag_service_keys.GetSelection()
@ -5576,6 +5647,15 @@ class ManageShortcutsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._final_command = 'ratings_numerical'
def _SetRatingsNumericalIncDec( self ):
self._EnableButtons()
self._set_ratings_numerical_incdec.Disable()
self._final_command = 'ratings_numerical_incdec'
def _SetTag( self ):
self._EnableButtons()

View File

@ -967,9 +967,11 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
self._migration_source_file_filter.SetValue( self.HASHES_SERVICE_KEY )
self._migration_source_file_filter.SetToolTip( 'This filters the files for which tags can be migrated.' )
self._migration_source_file_filter.SetToolTip( 'Tags that pass this filter will be applied to the destination with the chosen action.' )
message = 'The tags that pass this filter will be included in the migration.'
message = 'Tags that pass this filter will be applied to the destination with the chosen action.'
message += os.linesep * 2
message += 'For instance, if you whitelist the \'series\' namespace, only series: tags from the source will be added to/deleted from the destination.'
tag_filter = ClientTags.TagFilter()
@ -978,6 +980,8 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
self._migration_source_tag_filter.SetToolTip( 'This filters the tags that can be migrated.' )
message = 'The left side of a tag sibling/parent pair must pass this filter for the pair to be included in the migration.'
message += os.linesep * 2
message += 'For instance, if you whitelist the \'character\' namespace, only pairs from the source with character: tags on the left will be added to/deleted from the destination.'
tag_filter = ClientTags.TagFilter()
@ -986,6 +990,8 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
self._migration_source_left_tag_pair_filter.SetToolTip( 'This filters the tags on the left side of the pair that can be migrated.' )
message = 'The right side of a tag sibling/parent pair must pass this filter for the pair to be included in the migration.'
message += os.linesep * 2
message += 'For instance, if you whitelist the \'series\' namespace, only pairs from the source with series: tags on the right will be added to/deleted from the destination.'
tag_filter = ClientTags.TagFilter()
@ -1129,11 +1135,13 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
destination_action_strings[ HC.CONTENT_UPDATE_PETITION ] = 'petitioning them from'
content_type = self._migration_content_type.GetValue()
content_statuses = self._migration_source_content_status_filter.GetValue()
destination_service_key = self._migration_destination.GetValue()
source_service_key = self._migration_source.GetValue()
if content_type == HC.CONTENT_TYPE_MAPPINGS:
destination_service_key = self._migration_destination.GetValue()
if destination_service_key == self.HTA_SERVICE_KEY:
if self._dest_archive_path is None:
@ -1158,8 +1166,6 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
destination = ClientMigration.MigrationDestinationTagServiceMappings( HG.client_controller, destination_service_key, content_action )
content_statuses = self._migration_source_content_status_filter.GetValue()
file_service_key = self._migration_source_file_filter.GetValue()
if file_service_key == self.HASHES_SERVICE_KEY:
@ -1187,8 +1193,6 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
tag_filter = self._migration_source_tag_filter.GetValue()
source_service_key = self._migration_source.GetValue()
if source_service_key == self.HTA_SERVICE_KEY:
if self._source_archive_path is None:
@ -1205,6 +1209,47 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
source = ClientMigration.MigrationSourceTagServiceMappings( HG.client_controller, source_service_key, file_service_key, desired_hash_type, hashes, tag_filter, content_statuses )
else:
if destination_service_key == self.HTPA_SERVICE_KEY:
if self._dest_archive_path is None:
wx.MessageBox( 'Please set a path for the destination Hydrus Tag Pair Archive.' )
return
content_action = HC.CONTENT_UPDATE_ADD
destination = ClientMigration.MigrationDestinationHTPA( HG.client_controller, self._dest_archive_path, content_type )
else:
content_action = self._migration_action.GetValue()
destination = ClientMigration.MigrationDestinationTagServicePairs( HG.client_controller, destination_service_key, content_action, content_type )
left_tag_pair_filter = self._migration_source_left_tag_pair_filter.GetValue()
right_tag_pair_filter = self._migration_source_right_tag_pair_filter.GetValue()
if source_service_key == self.HTPA_SERVICE_KEY:
if self._source_archive_path is None:
wx.MessageBox( 'Please set a path for the source Hydrus Tag Archive.' )
return
source = ClientMigration.MigrationSourceHTPA( HG.client_controller, self._source_archive_path, left_tag_pair_filter, right_tag_pair_filter )
else:
source = ClientMigration.MigrationSourceTagServicePairs( HG.client_controller, source_service_key, content_type, left_tag_pair_filter, right_tag_pair_filter, content_statuses )
title = 'taking {} {} from "{}"{} and {} "{}"'.format( source_content_statuses_strings[ content_statuses ], HC.content_type_string_lookup[ content_type ], source.GetName(), extra_info, destination_action_strings[ content_action ], destination.GetName() )
@ -1397,6 +1442,8 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
def _UpdateMigrationControlsActions( self ):
content_type = self._migration_content_type.GetValue()
self._migration_action.Clear()
source = self._migration_source.GetValue()
@ -1433,7 +1480,7 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
actions.append( HC.CONTENT_UPDATE_DELETE )
if not pulling_and_pushing_existing:
if not pulling_and_pushing_existing and content_type == HC.CONTENT_TYPE_MAPPINGS:
actions.append( HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD )
@ -1554,15 +1601,6 @@ class MigrateTagsPanel( ClientGUIScrolledPanels.ReviewPanel ):
content_type = self._migration_content_type.GetValue()
if content_type != HC.CONTENT_TYPE_MAPPINGS:
wx.MessageBox( 'Tag parents and siblings are coming soon, but not yet supported!' )
self._migration_content_type.SetValue( HC.CONTENT_TYPE_MAPPINGS )
return
self._migration_source.Clear()
self._migration_destination.Clear()

View File

@ -2,6 +2,8 @@ from . import ClientConstants as CC
from . import ClientData
from . import ClientGUICommon
from . import ClientGUIFunctions
from . import ClientGUIScrolledPanels
from . import ClientGUITopLevelWindows
from . import HydrusConstants as HC
from . import HydrusData
from . import HydrusGlobals as HG
@ -694,19 +696,19 @@ class ShortcutsHandler( object ):
event.Skip()
def AddShortcuts( self, shortcuts_name ):
def AddShortcuts( self, shortcut_set_name ):
if shortcuts_name not in self._shortcuts_names:
if shortcut_set_name not in self._shortcuts_names:
self._shortcuts_names.append( shortcuts_name )
self._shortcuts_names.append( shortcut_set_name )
def RemoveShortcuts( self, shortcuts_name ):
def RemoveShortcuts( self, shortcut_set_name ):
if shortcuts_name in self._shortcuts_names:
if shortcut_set_name in self._shortcuts_names:
self._shortcuts_names.remove( shortcuts_name )
self._shortcuts_names.remove( shortcut_set_name )

View File

@ -33,6 +33,8 @@ import wx
class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
TEST_RESULT_DEFAULT = 'Enter a tag here to test if it passes the current filter:'
def __init__( self, parent, tag_filter, prefer_blacklist = False, namespaces = None, message = None ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
@ -110,6 +112,10 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
self._current_filter_st = ClientGUICommon.BetterStaticText( self, 'currently keeping: ', style = wx.ST_ELLIPSIZE_END )
self._test_result_st = ClientGUICommon.BetterStaticText( self, self.TEST_RESULT_DEFAULT, style = wx.TEXT_ALIGNMENT_RIGHT )
self._test_input = wx.TextCtrl( self )
#
vbox = wx.BoxSizer( wx.VERTICAL )
@ -125,6 +131,13 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
vbox.Add( self._redundant_st, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._current_filter_st, CC.FLAGS_EXPAND_PERPENDICULAR )
hbox = ClientGUICommon.BetterBoxSizer( wx.HORIZONTAL )
hbox.Add( self._test_result_st, CC.FLAGS_VCENTER_EXPAND_DEPTH_ONLY )
hbox.Add( self._test_input, CC.FLAGS_VCENTER_EXPAND_DEPTH_ONLY )
vbox.Add( hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
#
@ -135,6 +148,8 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
self._simple_blacklist_global_checkboxes.Bind( wx.EVT_CHECKLISTBOX, self.EventSimpleBlacklistGlobalCheck )
self._simple_blacklist_namespace_checkboxes.Bind( wx.EVT_CHECKLISTBOX, self.EventSimpleBlacklistNamespaceCheck )
self._test_input.Bind( wx.EVT_TEXT, self.EventTestText )
self._UpdateStatus()
@ -706,6 +721,40 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
self._current_filter_st.SetLabelText( 'currently keeping: ' + pretty_tag_filter )
self._UpdateTest()
def _UpdateTest( self ):
test_input = self._test_input.GetValue()
if test_input == '':
text = self.TEST_RESULT_DEFAULT
colour = ( 0, 0, 0 )
else:
tag_filter = self.GetValue()
if tag_filter.TagOK( test_input ):
text = 'tag passes!'
colour = ( 0, 128, 0 )
else:
text = 'tag blocked!'
colour = ( 128, 0, 0 )
self._test_result_st.SetForegroundColour( colour )
self._test_result_st.SetLabel( text )
def EventListBoxChanged( self, event ):
@ -767,6 +816,11 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
def EventTestText( self, event ):
self._UpdateTest()
def GetValue( self ):
tag_filter = ClientTags.TagFilter()
@ -2145,25 +2199,6 @@ class ManageTagParents( ClientGUIScrolledPanels.ManagePanel ):
def EventMenu( self, event ):
action = ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() )
if action is not None:
( command, data ) = action
if command == 'set_search_focus':
self._SetSearchFocus()
else:
event.Skip()
class _Panel( wx.Panel ):
def __init__( self, parent, service_key, tags = None ):
@ -2988,25 +3023,6 @@ class ManageTagSiblings( ClientGUIScrolledPanels.ManagePanel ):
def EventMenu( self, event ):
action = ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() )
if action is not None:
( command, data ) = action
if command == 'set_search_focus':
self._SetSearchFocus()
else:
event.Skip()
def EventServiceChanged( self, event ):
page = self._tag_repositories.GetCurrentPage()

View File

@ -1632,6 +1632,7 @@ class MediaCollection( MediaList, Media ):
self._duration = None
self._num_frames = None
self._num_words = None
self._has_audio = None
self._tags_manager = None
self._locations_manager = None
self._file_viewing_stats_manager = None
@ -1654,6 +1655,8 @@ class MediaCollection( MediaList, Media ):
if duration_sum > 0: self._duration = duration_sum
else: self._duration = None
self._has_audio = True in ( media.HasAudio() for media in self._sorted_media )
tags_managers = [ m.GetTagsManager() for m in self._sorted_media ]
self._tags_manager = MergeTagsManagers( tags_managers )
@ -1795,6 +1798,11 @@ class MediaCollection( MediaList, Media ):
def HasArchive( self ): return self._archive
def HasAudio( self ):
return self._has_audio
def HasDuration( self ): return self._duration is not None
def HasImages( self ): return True in ( media.HasImages() for media in self._sorted_media )
@ -2524,6 +2532,13 @@ class MediaSort( HydrusSerialisable.SerialisableBase ):
return deal_with_none( x.GetDuration() )
elif sort_data == CC.SORT_FILES_BY_HAS_AUDIO:
def sort_key( x ):
return - deal_with_none( x.HasAudio() )
elif sort_data == CC.SORT_FILES_BY_IMPORT_TIME:
file_service = HG.client_controller.services_manager.GetService( file_service_key )
@ -2667,6 +2682,7 @@ class MediaSort( HydrusSerialisable.SerialisableBase ):
sort_string_lookup[ CC.SORT_FILES_BY_APPROX_BITRATE ] = 'file: approximate bitrate'
sort_string_lookup[ CC.SORT_FILES_BY_FILESIZE ] = 'file: filesize'
sort_string_lookup[ CC.SORT_FILES_BY_MIME ] = 'file: filetype'
sort_string_lookup[ CC.SORT_FILES_BY_HAS_AUDIO ] = 'file: has audio'
sort_string_lookup[ CC.SORT_FILES_BY_IMPORT_TIME ] = 'file: time imported'
sort_string_lookup[ CC.SORT_FILES_BY_RANDOM ] = 'random'
sort_string_lookup[ CC.SORT_FILES_BY_NUM_TAGS ] = 'tags: number of tags'
@ -2704,6 +2720,7 @@ class MediaSort( HydrusSerialisable.SerialisableBase ):
sort_string_lookup[ CC.SORT_FILES_BY_APPROX_BITRATE ] = ( 'smallest first', 'largest first', CC.SORT_DESC )
sort_string_lookup[ CC.SORT_FILES_BY_FILESIZE ] = ( 'smallest first', 'largest first', CC.SORT_DESC )
sort_string_lookup[ CC.SORT_FILES_BY_DURATION ] = ( 'shortest first', 'longest first', CC.SORT_DESC )
sort_string_lookup[ CC.SORT_FILES_BY_HAS_AUDIO ] = ( 'audio first', 'silent first', CC.SORT_ASC )
sort_string_lookup[ CC.SORT_FILES_BY_IMPORT_TIME ] = ( 'oldest first', 'newest first', CC.SORT_DESC )
sort_string_lookup[ CC.SORT_FILES_BY_MIME ] = ( 'mime', 'mime', CC.SORT_ASC )
sort_string_lookup[ CC.SORT_FILES_BY_RANDOM ] = ( 'random', 'random', CC.SORT_ASC )

View File

@ -6,6 +6,16 @@ from . import HydrusGlobals as HG
from . import HydrusTagArchive
import os
pair_types_to_content_types = {}
pair_types_to_content_types[ HydrusTagArchive.TAG_PAIR_TYPE_PARENTS ] = HC.CONTENT_TYPE_TAG_PARENTS
pair_types_to_content_types[ HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS ] = HC.CONTENT_TYPE_TAG_SIBLINGS
content_types_to_pair_types = {}
content_types_to_pair_types[ HC.CONTENT_TYPE_TAG_PARENTS ] = HydrusTagArchive.TAG_PAIR_TYPE_PARENTS
content_types_to_pair_types[ HC.CONTENT_TYPE_TAG_SIBLINGS ] = HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS
def GetBasicSpeedStatement( num_done, time_started_precise ):
if num_done == 0:
@ -110,6 +120,62 @@ class MigrationDestinationHTA( MigrationDestination ):
self._hta.BeginBigJob()
class MigrationDestinationHTPA( MigrationDestination ):
def __init__( self, controller, path, content_type ):
name = os.path.basename( path )
MigrationDestination.__init__( self, controller, name )
self._path = path
self._content_type = content_type
self._time_started = 0
self._htpa = None
def CleanUp( self ):
self._htpa.CommitBigJob()
if HydrusData.TimeHasPassed( self._time_started + 120 ):
self._htpa.Optimise()
self._htpa.Close()
self._htpa = None
def DoSomeWork( self, source ):
time_started_precise = HydrusData.GetNowPrecise()
data = source.GetSomeData()
self._htpa.AddPairs( data )
num_done = len( data )
return GetBasicSpeedStatement( num_done, time_started_precise )
def Prepare( self ):
self._time_started = HydrusData.GetNow()
self._htpa = HydrusTagArchive.HydrusTagPairArchive( self._path )
pair_type = content_types_to_pair_types[ self._content_type ]
self._htpa.SetPairType( pair_type )
self._htpa.BeginBigJob()
class MigrationDestinationList( MigrationDestination ):
def __init__( self, controller ):
@ -148,6 +214,21 @@ class MigrationDestinationListMappings( MigrationDestinationList ):
return GetBasicSpeedStatement( num_done, time_started_precise )
class MigrationDestinationListPairs( MigrationDestinationList ):
def DoSomeWork( self, source ):
time_started_precise = HydrusData.GetNowPrecise()
data = source.GetSomeData()
self._data_received.extend( data )
num_done = len( data )
return GetBasicSpeedStatement( num_done, time_started_precise )
class MigrationDestinationTagService( MigrationDestination ):
def __init__( self, controller, tag_service_key, content_action ):
@ -211,6 +292,43 @@ class MigrationDestinationTagServiceMappings( MigrationDestinationTagService ):
return GetBasicSpeedStatement( num_done, time_started_precise )
class MigrationDestinationTagServicePairs( MigrationDestinationTagService ):
def __init__( self, controller, tag_service_key, content_action, content_type ):
MigrationDestinationTagService.__init__( self, controller, tag_service_key, content_action )
self._content_type = content_type
def DoSomeWork( self, source ):
time_started_precise = HydrusData.GetNowPrecise()
data = source.GetSomeData()
content_updates = []
if self._content_action in ( HC.CONTENT_UPDATE_PETITION, HC.CONTENT_UPDATE_PEND ):
reason = 'Mass Migration Job'
else:
reason = None
content_updates = [ HydrusData.ContentUpdate( self._content_type, self._content_action, tag_pair, reason = reason ) for tag_pair in data ]
service_keys_to_content_updates = { self._tag_service_key : content_updates }
self._controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
num_done = len( data )
return GetBasicSpeedStatement( num_done, time_started_precise )
class MigrationJob( object ):
def __init__( self, controller, title, source, destination ):
@ -474,6 +592,60 @@ class MigrationSourceHTA( MigrationSource ):
self._iterator = self._hta.IterateMappings()
class MigrationSourceHTPA( MigrationSource ):
def __init__( self, controller, path, left_tag_filter, right_tag_filter ):
name = os.path.basename( path )
MigrationSource.__init__( self, controller, name )
self._path = path
self._left_tag_filter = left_tag_filter
self._right_tag_filter = right_tag_filter
self._htpa = None
self._iterator = None
def CleanUp( self ):
self._htpa.CommitBigJob()
self._htpa.Close()
self._htpa = None
self._iterator = None
def GetSomeData( self ):
data = HydrusData.PullNFromIterator( self._iterator, 256 )
if len( data ) == 0:
self._work_to_do = False
return data
if not ( self._left_tag_filter.AllowsEverything() and self._right_tag_filter.AllowsEverything() ):
data = [ ( left_tag, right_tag ) for ( left_tag, right_tag ) in data if self._left_tag_filter.TagOK( left_tag ) and self._right_tag_filter.TagOK( right_tag ) ]
return data
def Prepare( self ):
self._htpa = HydrusTagArchive.HydrusTagPairArchive( self._path )
self._htpa.BeginBigJob()
self._iterator = self._htpa.IteratePairs()
class MigrationSourceList( MigrationSource ):
def __init__( self, controller, data ):
@ -545,3 +717,42 @@ class MigrationSourceTagServiceMappings( MigrationSource ):
self._controller.WriteSynchronous( 'migration_start_mappings_job', self._database_temp_job_name, self._file_service_key, self._tag_service_key, self._hashes, self._content_statuses )
class MigrationSourceTagServicePairs( MigrationSource ):
def __init__( self, controller, tag_service_key, content_type, left_tag_filter, right_tag_filter, content_statuses ):
name = controller.services_manager.GetName( tag_service_key )
MigrationSource.__init__( self, controller, name )
self._tag_service_key = tag_service_key
self._content_type = content_type
self._left_tag_filter = left_tag_filter
self._right_tag_filter = right_tag_filter
self._content_statuses = content_statuses
self._database_temp_job_name = 'migrate_{}'.format( os.urandom( 16 ).hex() )
def CleanUp( self ):
self._controller.WriteSynchronous( 'migration_clear_job', self._database_temp_job_name )
def GetSomeData( self ):
data = self._controller.Read( 'migration_get_pairs', self._database_temp_job_name, self._left_tag_filter, self._right_tag_filter )
if len( data ) == 0:
self._work_to_do = False
return data
def Prepare( self ):
self._controller.WriteSynchronous( 'migration_start_pairs_job', self._database_temp_job_name, self._tag_service_key, self._content_type, self._content_statuses )

View File

@ -574,6 +574,11 @@ class ServiceRemote( Service ):
def _DelayFutureRequests( self, reason, duration = None ):
if reason == '':
reason = 'unknown error'
if duration is None:
duration = self._GetErrorWaitPeriod()
@ -813,6 +818,23 @@ class ServiceRestricted( ServiceRemote ):
return 'next account sync ' + s
def GetStatusString( self ):
with self._lock:
try:
self._CheckFunctional( including_account = False )
return 'service is functional'
except Exception as e:
return str( e )
def HasPermission( self, content_type, action ):
with self._lock:
@ -1111,6 +1133,8 @@ class ServiceRepository( ServiceRestricted ):
self._sync_lock = threading.Lock()
self._is_mostly_caught_up = None
def _CanSyncDownload( self ):
@ -1248,6 +1272,8 @@ class ServiceRepository( ServiceRestricted ):
self._metadata.UpdateFromSlice( metadata_slice )
self._is_mostly_caught_up = None
self._SetDirty()
@ -1753,6 +1779,8 @@ class ServiceRepository( ServiceRestricted ):
if work_done:
self._is_mostly_caught_up = None
HG.client_controller.pub( 'notify_new_force_refresh_tags_data' )
self._SetDirty()
@ -1810,6 +1838,45 @@ class ServiceRepository( ServiceRestricted ):
def IsMostlyCaughtUp( self ):
# if a user is more than two weeks behind, let's assume they aren't 'caught up'
CAUGHT_UP_BUFFER = 14 * 86400
two_weeks_ago = HydrusData.GetNow() - CAUGHT_UP_BUFFER
with self._lock:
if self._is_mostly_caught_up is None:
next_begin = self._metadata.GetNextUpdateBegin()
# haven't synced new metadata, so def not caught up
if next_begin < two_weeks_ago:
self._is_mostly_caught_up = False
return self._is_mostly_caught_up
unprocessed_update_hashes = HG.client_controller.Read( 'repository_unprocessed_hashes', self._service_key )
if len( unprocessed_update_hashes ) == 0:
self._is_mostly_caught_up = True # done them all, even if there aren't any yet to do
else:
earliest_unsorted_update_timestamp = self._metadata.GetEarliestTimestampForTheseHashes( unprocessed_update_hashes )
self._is_mostly_caught_up = earliest_unsorted_update_timestamp > two_weeks_ago
return self._is_mostly_caught_up
def IsPaused( self ):
with self._lock:
@ -1847,6 +1914,8 @@ class ServiceRepository( ServiceRestricted ):
self._metadata = HydrusNetwork.Metadata()
self._is_mostly_caught_up = None
self._SetDirty()
HG.client_controller.pub( 'important_dirt_to_clean' )

View File

@ -335,6 +335,14 @@ class TagFilter( HydrusSerialisable.SerialisableBase ):
def TagOK( self, tag ):
with self._lock:
return self._TagOK( tag )
def ToBlacklistString( self ):
with self._lock:

View File

@ -67,7 +67,7 @@ options = {}
# Misc
NETWORK_VERSION = 18
SOFTWARE_VERSION = 366
SOFTWARE_VERSION = 367
CLIENT_API_VERSION = 11
SERVER_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -174,6 +174,8 @@ CONTENT_UPDATE_UNDELETE = 14
CONTENT_UPDATE_SET = 15
CONTENT_UPDATE_FLIP = 16
CONTENT_UPDATE_CLEAR_DELETE_RECORD = 17
CONTENT_UPDATE_INCREMENT = 18
CONTENT_UPDATE_DECREMENT = 19
content_update_string_lookup = {}
@ -193,6 +195,8 @@ content_update_string_lookup[ CONTENT_UPDATE_UNDELETE ] = 'undelete'
content_update_string_lookup[ CONTENT_UPDATE_SET ] = 'set'
content_update_string_lookup[ CONTENT_UPDATE_FLIP ] = 'flip on/off'
content_update_string_lookup[ CONTENT_UPDATE_CLEAR_DELETE_RECORD ] = 'clear deletion record'
content_update_string_lookup[ CONTENT_UPDATE_INCREMENT ] = 'increment'
content_update_string_lookup[ CONTENT_UPDATE_DECREMENT ] = 'decrement'
DEFINITIONS_TYPE_HASHES = 0
DEFINITIONS_TYPE_TAGS = 1

View File

@ -466,50 +466,64 @@ class HydrusDB( object ):
self._c.execute( 'PRAGMA temp_store = 2;' ) # use memory for temp store exclusively
self._c.execute( 'PRAGMA main.cache_size = -10000;' )
self._c.execute( 'ATTACH ":memory:" AS mem;' )
self._AttachExternalDatabases()
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp', 'durable_temp' ) ]
# if this is set to 1, transactions are not immediately synced to the journal so multiple can be undone following a power-loss
# if set to 2, all transactions are synced, so once a new one starts you know the last one is on disk
# corruption cannot occur either way, but since we have multiple ATTACH dbs with diff journals, let's not mess around when power-cut during heavy file import or w/e
synchronous = 2
if HG.db_synchronous_override is not None:
synchronous = HG.db_synchronous_override
# durable_temp is not excluded here
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp' ) ]
for db_name in db_names:
self._c.execute( 'PRAGMA ' + db_name + '.cache_size = -10000;' )
self._c.execute( 'PRAGMA {}.cache_size = -10000;'.format( db_name ) )
if HG.no_wal:
if HG.db_memory_journalling:
self._c.execute( 'PRAGMA ' + db_name + '.journal_mode = TRUNCATE;' )
self._c.execute( 'PRAGMA {}.journal_mode = MEMORY;'.format( db_name ) )
self._c.execute( 'PRAGMA ' + db_name + '.synchronous = 2;' )
elif HG.no_wal:
self._c.execute( 'SELECT * FROM ' + db_name + '.sqlite_master;' ).fetchone()
self._c.execute( 'PRAGMA {}.journal_mode = TRUNCATE;'.format( db_name ) )
else:
self._c.execute( 'PRAGMA ' + db_name + '.journal_mode = WAL;' )
self._c.execute( 'PRAGMA {}.journal_mode = WAL;'.format( db_name ) )
# if this is set to 1, transactions are not immediately synced to the journal and can be undone following a power-loss
# if set to 2, all transactions are synced
# either way, transactions are atomically consistent, but let's not mess around when power-cut during heavy file import or w/e
self._c.execute( 'PRAGMA ' + db_name + '.synchronous = 2;' )
self._c.execute( 'PRAGMA {}.synchronous = {};'.format( db_name, synchronous ) )
try:
try:
self._c.execute( 'SELECT * FROM {}.sqlite_master;'.format( db_name ) ).fetchone()
except sqlite3.OperationalError as e:
if HG.no_wal:
self._c.execute( 'SELECT * FROM ' + db_name + '.sqlite_master;' ).fetchone()
message = 'The database failed to read any data. Please check your hard drive and perhaps \'help my db is broke.txt\' in the db directory. Full error information:'
except sqlite3.OperationalError as e:
else:
message = 'The database failed to read some data. You may need to run the program in no-wal mode using the --no_wal command parameter. Full error information:'
message += os.linesep * 2
message += str( e )
HydrusData.DebugPrint( message )
raise HydrusExceptions.DBAccessException( message )
message += os.linesep * 2
message += str( e )
HydrusData.DebugPrint( message )
raise HydrusExceptions.DBAccessException( message )
try:

View File

@ -10,6 +10,8 @@ model_shutdown = False
no_daemons = False
no_wal = False
no_db_temp_files = False
db_memory_journalling = False
db_synchronous_override = None
import_folders_running = False
export_folders_running = False

View File

@ -1712,6 +1712,28 @@ class Metadata( HydrusSerialisable.SerialisableBase ):
def GetEarliestTimestampForTheseHashes( self, hashes ):
hashes = set( hashes )
with self._lock:
data = list( self._metadata.items() )
data.sort()
for ( update_index, ( update_hashes, begin, end ) ) in data:
if len( hashes.intersection( update_hashes ) ) > 0:
return end
return 0
def GetNextUpdateIndex( self ):
with self._lock:
@ -1762,7 +1784,7 @@ class Metadata( HydrusSerialisable.SerialisableBase ):
with self._lock:
num_update_hashes = sum( ( len( update_hashes ) for ( update_hashes, begin, end ) in list(self._metadata.values()) ) )
num_update_hashes = sum( ( len( update_hashes ) for ( update_hashes, begin, end ) in self._metadata.values() ) )
return num_update_hashes
@ -1856,16 +1878,6 @@ class Metadata( HydrusSerialisable.SerialisableBase ):
def GetUpdateTimestamps( self, update_index ):
with self._lock:
update_timestamps = [ ( update_index, begin, end ) for ( update_index, ( update_hashes, begin, end ) ) in list(self._metadata.items()) ]
return update_timestamps
def HasUpdateHash( self, update_hash ):
with self._lock:

View File

@ -26,6 +26,67 @@ deleted_tag_pool = [ 'trash', 'ugly', 'character:smaus aran', 'red hair' ]
to_be_pended_tag_pool = [ 'clothing:high heels', 'firearm', 'puffy armpit' ]
current_parents_pool = []
current_parents_pool.append( ( 'character:princess peach', 'series:super mario bros' ) )
current_parents_pool.append( ( 'character:princess peach', 'gender:female' ) )
current_parents_pool.append( ( 'mario_(mario)', 'series:super mario bros' ) )
current_parents_pool.append( ( 'meta:explicit', 'nsfw' ) )
current_parents_pool.append( ( 'bepis', 'genidalia' ) )
current_parents_pool.append( ( 'bagina', 'genidalia' ) )
pending_parents_pool = []
pending_parents_pool.append( ( 'character:princess daisy', 'series:super mario bros' ) )
pending_parents_pool.append( ( 'character:princess daisy', 'gender:female' ) )
pending_parents_pool.append( ( 'mario_(mario)', 'series:super mario bros' ) )
pending_parents_pool.append( ( 'bepis', 'genidalia' ) )
pending_parents_pool.append( ( 'bagina', 'genidalia' ) )
to_be_pended_parents_pool = []
to_be_pended_parents_pool.append( ( 'pend:parent a', 'pend:parent b' ) )
to_be_pended_parents_pool.append( ( 'parent c', 'parent d' ) )
deleted_parents_pool = []
deleted_parents_pool.append( ( 'male', 'human' ) )
deleted_parents_pool.append( ( 'table', 'general:furniture' ) )
deleted_parents_pool.append( ( 'character:iron man', 'studio:dc' ) )
current_siblings_pool = []
current_siblings_pool.append( ( 'lara_croft', 'character:lara croft' ) )
current_siblings_pool.append( ( 'lara croft', 'character:lara croft' ) )
current_siblings_pool.append( ( 'series:tomb raider (series)', 'series:tomb raider' ) )
current_siblings_pool.append( ( 'general:lamp', 'lamp' ) )
current_siblings_pool.append( ( 'bog', 'bepis' ) )
current_siblings_pool.append( ( 'buggy', 'bagina' ) )
pending_siblings_pool = []
pending_siblings_pool.append( ( 'horse', 'species:horse' ) )
pending_siblings_pool.append( ( 'equine', 'species:equine' ) )
pending_siblings_pool.append( ( 'dog', 'species:dog' ) )
pending_siblings_pool.append( ( 'canine', 'species:canine' ) )
pending_siblings_pool.append( ( 'eguine', 'equine' ) )
to_be_pended_siblings_pool = []
to_be_pended_siblings_pool.append( ( 'pend:sibling a', 'pend:sibling b' ) )
to_be_pended_siblings_pool.append( ( 'sibling c', 'sibling d' ) )
deleted_siblings_pool = []
deleted_siblings_pool.append( ( 'male', 'male:male' ) )
deleted_siblings_pool.append( ( 'table', 'general:table' ) )
deleted_siblings_pool.append( ( 'shadow', 'character:shadow the hedgehog' ) )
pair_types_to_pools = {}
pair_types_to_pools[ HC.CONTENT_TYPE_TAG_PARENTS ] = ( current_parents_pool, pending_parents_pool, to_be_pended_parents_pool, deleted_parents_pool )
pair_types_to_pools[ HC.CONTENT_TYPE_TAG_SIBLINGS ] = ( current_siblings_pool, pending_siblings_pool, to_be_pended_siblings_pool, deleted_siblings_pool )
class TestMigration( unittest.TestCase ):
@classmethod
@ -143,7 +204,7 @@ class TestMigration( unittest.TestCase ):
def _add_tags_to_services( self ):
def _add_mappings_to_services( self ):
content_updates = []
@ -633,13 +694,306 @@ class TestMigration( unittest.TestCase ):
run_test( source, self._test_tag_repo_service_keys[1], HC.CONTENT_UPDATE_PETITION, data )
def _add_pairs_to_services( self, content_type ):
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
content_updates = []
for pair in current:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in deleted:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_DELETE, pair ) )
service_keys_to_content_updates = { CC.LOCAL_TAG_SERVICE_KEY : content_updates }
self.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
content_updates = []
for pair in current:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_ADD, pair ) )
for pair in pending:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_PEND, pair ) )
for pair in deleted:
content_updates.append( HydrusData.ContentUpdate( content_type, HC.CONTENT_UPDATE_DELETE, pair ) )
service_keys_to_content_updates = { service_key : content_updates for service_key in self._test_tag_repo_service_keys.values() }
self.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def _test_pairs_list_to_list( self, content_type ):
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
data = list( current )
self.assertTrue( len( data ) > 0 )
source = ClientMigration.MigrationSourceList( self, data )
destination = ClientMigration.MigrationDestinationListPairs( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( destination.GetDataReceived(), data )
def _test_pairs_htpa_to_list( self, content_type ):
def run_test( source, expected_data ):
destination = ClientMigration.MigrationDestinationListPairs( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
htpa_path = os.path.join( TestController.DB_DIR, 'htpa.db' )
htpa = HydrusTagArchive.HydrusTagPairArchive( htpa_path )
if content_type == HC.CONTENT_TYPE_TAG_PARENTS:
htpa.SetPairType( HydrusTagArchive.TAG_PAIR_TYPE_PARENTS )
elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
htpa.SetPairType( HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS )
htpa.BeginBigJob()
htpa.AddPairs( current )
htpa.CommitBigJob()
htpa.Optimise()
htpa.Close()
del htpa
#
# test tag filter, left, right, both
free_filter = ClientTags.TagFilter()
namespace_filter = ClientTags.TagFilter()
namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
test_filters = []
test_filters.append( ( free_filter, free_filter ) )
test_filters.append( ( namespace_filter, free_filter ) )
test_filters.append( ( free_filter, namespace_filter ) )
test_filters.append( ( namespace_filter, namespace_filter ) )
for ( left_tag_filter, right_tag_filter ) in test_filters:
source = ClientMigration.MigrationSourceHTPA( self, htpa_path, left_tag_filter, right_tag_filter )
expected_data = [ ( left_tag, right_tag ) for ( left_tag, right_tag ) in current if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ]
run_test( source, expected_data )
#
os.remove( htpa_path )
def _test_pairs_list_to_htpa( self, content_type ):
def run_test( source, destination_path, content_type, expected_data ):
destination = ClientMigration.MigrationDestinationHTPA( self, destination_path, content_type )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
hta = HydrusTagArchive.HydrusTagPairArchive( destination_path )
result = list( hta.IteratePairs() )
self.assertEqual( set( result ), set( expected_data ) )
hta.Close()
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
htpa_path = os.path.join( TestController.DB_DIR, 'htpa.db' )
#
source = ClientMigration.MigrationSourceList( self, current )
run_test( source, htpa_path, content_type, list( current ) )
#
os.remove( htpa_path )
def _test_pairs_service_to_list( self, content_type ):
def run_test( source, expected_data ):
destination = ClientMigration.MigrationDestinationListPairs( self )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
# test filters and content statuses
tag_repo_service_key = self._test_tag_repo_service_keys[10]
content_source_tests = []
content_source_tests.append( ( CC.LOCAL_TAG_SERVICE_KEY, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
content_source_tests.append( ( CC.LOCAL_TAG_SERVICE_KEY, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
content_source_tests.append( ( tag_repo_service_key, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
content_source_tests.append( ( tag_repo_service_key, ( current, pending ), ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) ) )
content_source_tests.append( ( tag_repo_service_key, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
free_filter = ClientTags.TagFilter()
namespace_filter = ClientTags.TagFilter()
namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
test_filters = []
test_filters.append( ( free_filter, free_filter ) )
test_filters.append( ( namespace_filter, free_filter ) )
test_filters.append( ( free_filter, namespace_filter ) )
test_filters.append( ( namespace_filter, namespace_filter ) )
for ( left_tag_filter, right_tag_filter ) in test_filters:
for ( service_key, content_lists, content_statuses ) in content_source_tests:
source = ClientMigration.MigrationSourceTagServicePairs( self, service_key, content_type, left_tag_filter, right_tag_filter, content_statuses )
expected_data = set()
for content_list in content_lists:
expected_data.update( ( ( left_tag, right_tag ) for ( left_tag, right_tag ) in content_list if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ) )
run_test( source, expected_data )
def _test_pairs_list_to_service( self, content_type ):
def run_test( source, tag_service_key, content_action, expected_data ):
destination = ClientMigration.MigrationDestinationTagServicePairs( self, tag_service_key, content_action, content_type )
job = ClientMigration.MigrationJob( self, 'test', source, destination )
job.Run()
if content_type == HC.CONTENT_TYPE_TAG_PARENTS:
statuses_to_pairs = self.Read( 'tag_parents', tag_service_key )
elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
statuses_to_pairs = self.Read( 'tag_siblings', tag_service_key )
if content_action == HC.CONTENT_UPDATE_ADD:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ] )
should_not_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_DELETED ] )
elif content_action == HC.CONTENT_UPDATE_DELETE:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_DELETED ] )
should_not_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ] )
elif content_action == HC.CONTENT_UPDATE_PEND:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] )
should_not_be_in = set()
elif content_action == HC.CONTENT_UPDATE_PETITION:
should_be_in = set( statuses_to_pairs[ HC.CONTENT_STATUS_PETITIONED ] )
should_not_be_in = set()
for pair in expected_data:
self.assertIn( pair, should_be_in )
self.assertNotIn( pair, should_not_be_in )
#
tag_repo_service_key = self._test_tag_repo_service_keys[11]
( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
test_rows = []
test_rows.append( ( CC.LOCAL_TAG_SERVICE_KEY, to_be_pended, HC.CONTENT_UPDATE_ADD ) )
test_rows.append( ( CC.LOCAL_TAG_SERVICE_KEY, random.sample( current, 3 ), HC.CONTENT_UPDATE_DELETE ) )
test_rows.append( ( tag_repo_service_key, to_be_pended, HC.CONTENT_UPDATE_PEND ) )
test_rows.append( ( tag_repo_service_key, random.sample( current, 3 ), HC.CONTENT_UPDATE_PETITION ) )
for ( service_key, data, action ) in test_rows:
source = ClientMigration.MigrationSourceList( self, data )
run_test( source, service_key, action, data )
def test_migration( self ):
# mappings
self._set_up_services()
self._do_fake_imports()
self._add_tags_to_services()
self._add_mappings_to_services()
self._test_mappings_list_to_list()
self._test_mappings_hta_to_list()
@ -647,8 +1001,14 @@ class TestMigration( unittest.TestCase ):
self._test_mappings_service_to_list()
self._test_mappings_list_to_service()
# parents
# siblings
for content_type in ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_TYPE_TAG_SIBLINGS ):
self._add_pairs_to_services( content_type )
self._test_pairs_list_to_list( content_type )
self._test_pairs_htpa_to_list( content_type )
self._test_pairs_list_to_htpa( content_type )
self._test_pairs_service_to_list( content_type )
self._test_pairs_list_to_service( content_type )

View File

@ -41,10 +41,12 @@ try:
argparser.add_argument( 'action', default = 'start', nargs = '?', choices = [ 'start', 'stop', 'restart' ], help = 'either start this server (default), or stop an existing server, or both' )
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' )
argparser.add_argument( '--no_wal', action='store_true', help = 'run without WAL db journalling' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run the db entirely in memory' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_memory_journalling', action='store_true', help = 'run db journalling entirely in memory (DANGEROUS)' )
argparser.add_argument( '--db_synchronous_override', help = 'override SQLite Synchronous PRAGMA (range 0-3, default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
result = argparser.parse_args()
@ -87,6 +89,25 @@ try:
HG.no_daemons = result.no_daemons
HG.no_wal = result.no_wal
HG.db_memory_journalling = result.db_memory_journalling
if result.db_synchronous_override is not None:
try:
db_synchronous_override = int( result.db_synchronous_override )
except ValueError:
raise Exception( 'db_synchronous_override must be an integer in the range 0-3' )
if db_synchronous_override not in range( 4 ):
raise Exception( 'db_synchronous_override must be in the range 0-3' )
HG.no_db_temp_files = result.no_db_temp_files
if result.temp_dir is not None: