Version 202

This commit is contained in:
Hydrus Network Developer 2016-04-20 15:42:21 -05:00
parent 505db9306e
commit 596667f655
24 changed files with 1382 additions and 808 deletions

View File

@ -8,6 +8,42 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 202</h3></li>
<ul>
<li>fixed a problem with the v198->v199 update step</li>
<li>added a bad error catch in the vacuum step in v200->v201</li>
<li>added some rollback exception handling for unusual situations</li>
<li>the autocomplete caches under the client_cache subdirectory are dynamically folded into a single client.caches.db file, and that file is folded into the main db journal like the other attached dbs</li>
<li>ac_cache file and tag processing lag and edge-case autocomplete miscounting should be removed as a result</li>
<li>cache generation is optimised for empty services</li>
<li>specific file_caches' add and delete file commands now modify their tables directly, saving a whole load of time that was previously done superfluously filtering</li>
<li>the specific ac_caches' add mappings command is a bit more logically efficient</li>
<li>increased cache size for each attached database</li>
<li>folded all 'attached' db update code into the main update_db routine</li>
<li>folded all 'attached' db init code into the main create_db routine</li>
<li>made the vacuum checking routine safer</li>
<li>reduced analyze log spam</li>
<li>analyze maintenance breaks its larger jobs up better</li>
<li>analyze mainenance now analyzes external databases</li>
<li>the db connection now refreshes every half hour, to regularly clear out journal files</li>
<li>the hydrus db assumes it generally has exclusive control over its db, so it no longer wraps its read-only requests in transactions</li>
<li>session load now happens off the main gui thread, and media generation is broken into 256-file chunks, which reduces startup db and gui lag for large sessions</li>
<li>the youtube video downloader now lists a webm option if youtube provides one (they seem to offer 360p webms on everything(?) now)</li>
<li>the timedeltacontrol (where you set the period for the thread checker and import folders and so on) is replaced by the more compact timedeltabutton, which will launch a dialog with the old control when clicked</li>
<li>the server will no longer revisit old updates and cull since-deleted content, as this was proving cpu-burdensome and not helpful to future plans for update dissemination, which will rely on static update files</li>
<li>misc cleanup of some server db stuff</li>
<li>content update processing will not spam menu updates throughout, but only notify once at the end, which should reduce idle gui hang due to db access choke</li>
<li>the autocomplete tag entry will not refresh system pred menu during idle, which should reduce idle gui hang due to db access choke</li>
<li>shutdown repo sync will now report the update summary text and will not spam the intervening statuses to the log</li>
<li>moved timestamp out of the serviceless media_result and into the locations_manager, to reflect the database's knowledge of each current service having a different timestamp</li>
<li>local and trash timestamps are generated from the locations_manager now, and non-local file service timestamps will soon follow (e.g. 'pinned to ipfs_service 3 days ago')</li>
<li>if the timestamp is unknown for the current service context, it will not be displayed</li>
<li>file repositories will now only sync thumbnails when their updates are completely synced (to stop 404s from since-deleted files)</li>
<li>fixed a ( 0, 0 ) resize event bug that was sometimes causing borked media containers on media viewer shutdown</li>
<li>syncing to a sha256 tag archive will still import the data as requested, but a popup note will explain that as everything will be imported, further syncing is pointless., and the sync will not be saved</li>
<li>fixed a bug in hta export if you click cancel on the hash_type choosing dialogs</li>
<li>misc cleanup</li>
</ul>
<li><h3>version 201</h3></li>
<ul>
<li>exported hash and tag master tables to external database files for both client and server</li>

View File

@ -598,16 +598,15 @@ class Controller( HydrusController.HydrusController ):
if HydrusData.TimeHasPassed( shutdown_timestamps[ CC.SHUTDOWN_TIMESTAMP_VACUUM ] + maintenance_vacuum_period ):
self.WriteSynchronous( 'vacuum' )
self.WriteInterruptable( 'vacuum' )
stale_time_delta = 14 * 86400
stop_time = HydrusData.GetNow() + 120
self.pub( 'splash_set_status_text', 'analyzing' )
self.WriteSynchronous( 'analyze', stale_time_delta, stop_time )
self.WriteInterruptable( 'analyze', stop_time )
if self._timestamps[ 'last_service_info_cache_fatten' ] == 0:

File diff suppressed because it is too large Load Diff

View File

@ -678,7 +678,7 @@ class Imageboard( HydrusData.HydrusYAMLBase ):
def IsOkToPost( self, media_result ):
( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = media_result.ToTuple()
( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = media_result.ToTuple()
if CC.RESTRICTION_MIN_RESOLUTION in self._restrictions:
@ -1300,18 +1300,18 @@ class ServiceRepository( ServiceRestricted ):
def CanDownloadUpdate( self ):
update_due = HydrusData.TimeHasPassed( self._info[ 'next_download_timestamp' ] + HC.UPDATE_DURATION + 1800 )
work_to_do = self.IsUpdateDueForDownload()
return self.CanDownload() and update_due and not self.IsPaused()
return work_to_do and self.CanDownload() and not self.IsPaused()
def CanProcessUpdate( self ):
update_is_downloaded = self._info[ 'next_download_timestamp' ] > self._info[ 'next_processing_timestamp' ]
work_to_do = self.IsUpdateDueForProcessing()
it_is_time = HydrusData.TimeHasPassed( self._info[ 'next_processing_timestamp' ] + HC.UPDATE_DURATION + HC.options[ 'processing_phase' ] )
return update_is_downloaded and it_is_time and not self.IsPaused()
return work_to_do and it_is_time and not self.IsPaused()
def GetTimestamps( self ): return ( self._info[ 'first_timestamp' ], self._info[ 'next_download_timestamp' ], self._info[ 'next_processing_timestamp' ] )
@ -1368,6 +1368,16 @@ class ServiceRepository( ServiceRestricted ):
return self._info[ 'paused' ]
def IsUpdateDueForDownload( self ):
return HydrusData.TimeHasPassed( self._info[ 'next_download_timestamp' ] + HC.UPDATE_DURATION + 1800 )
def IsUpdateDueForProcessing( self ):
return self._info[ 'next_download_timestamp' ] > self._info[ 'next_processing_timestamp' ]
def Sync( self, only_when_idle = False, stop_time = None ):
if self.IsPaused():
@ -1672,8 +1682,6 @@ class ServiceRepository( ServiceRestricted ):
HydrusGlobals.client_controller.Write( 'service_updates', service_keys_to_service_updates )
HydrusGlobals.client_controller.pub( 'notify_new_pending' )
HydrusGlobals.client_controller.WaitUntilPubSubsEmpty()
@ -1720,11 +1728,19 @@ class ServiceRepository( ServiceRestricted ):
time.sleep( 3 )
finally:
HydrusGlobals.client_controller.pub( 'notify_new_pending' )
HydrusGlobals.client_controller.pub( 'notify_new_siblings' )
HydrusGlobals.client_controller.pub( 'notify_new_parents' )
def SyncThumbnails( self, job_key ):
if self._service_type == HC.FILE_REPOSITORY and self.CanDownload():
synced = not ( self.IsUpdateDueForDownload() or self.IsUpdateDueForProcessing() )
if self._service_type == HC.FILE_REPOSITORY and synced and self.CanDownload():
options = HydrusGlobals.client_controller.GetOptions()

View File

@ -211,7 +211,7 @@ def GetYoutubeFormats( youtube_url ):
raise Exception( 'Could not fetch video info from youtube!' + os.linesep + HydrusData.ToUnicode( e ) )
info = { ( s.extension, s.resolution ) : ( s.url, s.title ) for s in p.streams if s.extension in ( 'flv', 'mp4' ) }
info = { ( s.extension, s.resolution ) : ( s.url, s.title ) for s in p.streams if s.extension in ( 'flv', 'mp4', 'webm' ) }
return info

View File

@ -65,6 +65,8 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
self._focus_holder = wx.Window( self, size = ( 0, 0 ) )
self._loading_session = False
self._media_status_override = None
self._closed_pages = []
self._deleted_page_keys = set()
self._lock = threading.Lock()
@ -1166,6 +1168,15 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _LoadGUISession( self, name ):
if self._loading_session:
HydrusData.ShowText( 'Sorry, currently loading a session. Please wait.' )
return
self._loading_session = True
try:
session = self._controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_GUI_SESSION, name )
@ -1194,30 +1205,57 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
self._CloseCurrentPage( polite = False )
for ( page_name, management_controller, initial_hashes ) in session.IteratePages():
def do_it():
try:
if len( initial_hashes ) > 0:
for ( page_name, management_controller, initial_hashes ) in session.IteratePages():
file_service_key = management_controller.GetKey( 'file_service' )
initial_media_results = self._controller.Read( 'media_results', file_service_key, initial_hashes )
else:
initial_media_results = []
try:
if len( initial_hashes ) > 0:
file_service_key = management_controller.GetKey( 'file_service' )
initial_media_results = []
for group_of_inital_hashes in HydrusData.SplitListIntoChunks( initial_hashes, 256 ):
more_media_results = self._controller.Read( 'media_results', file_service_key, group_of_inital_hashes )
initial_media_results.extend( more_media_results )
self._media_status_override = u'Loading session page \'' + page_name + u'\'\u2026 ' + HydrusData.ConvertValueRangeToPrettyString( len( initial_media_results ), len( initial_hashes ) )
self._controller.pub( 'refresh_status' )
else:
initial_media_results = []
wx.CallAfter( self._NewPage, page_name, management_controller, initial_media_results = initial_media_results )
except Exception as e:
HydrusData.ShowException( e )
self._NewPage( page_name, management_controller, initial_media_results = initial_media_results )
if HC.PLATFORM_OSX:
wx.CallAfter( self._ClosePage, 0 )
except Exception as e:
finally:
HydrusData.ShowException( e )
self._loading_session = False
self._media_status_override = None
if HC.PLATFORM_OSX: self._ClosePage( 0 )
self._controller.CallToThread( do_it )
def _Manage4chanPass( self ):
@ -1502,10 +1540,23 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _RefreshStatusBar( self ):
page = self._notebook.GetCurrentPage()
if page is None: media_status = ''
else: media_status = page.GetPrettyStatus()
if self._media_status_override is not None:
media_status = self._media_status_override
else:
page = self._notebook.GetCurrentPage()
if page is None:
media_status = ''
else:
media_status = page.GetPrettyStatus()
if self._controller.CurrentlyIdle():
@ -1670,6 +1721,13 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _SaveGUISession( self, name = None ):
if self._loading_session:
HydrusData.ShowText( 'Sorry, currently loading a session. Please wait.' )
return
if name is None:
while True:
@ -2062,7 +2120,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
service.Request( HC.POST, 'file', { 'file' : file } )
( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = media_result.ToTuple()
( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = media_result.ToTuple()
timestamp = HydrusData.GetNow()
@ -2458,7 +2516,10 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
HydrusGlobals.restart = True
self._SaveGUISession( 'last session' )
if not self._loading_session:
self._SaveGUISession( 'last session' )
self._message_manager.CleanBeforeDestroy()
@ -3732,6 +3793,7 @@ class FrameSplash( ClientGUICommon.Frame ):
self._controller.sub( self, 'SetTitleText', 'splash_set_title_text' )
self._controller.sub( self, 'SetStatusText', 'splash_set_status_text' )
self._controller.sub( self, 'SetStatusTextNoLog', 'splash_set_status_text_no_log' )
self._controller.sub( self, 'Destroy', 'splash_destroy' )
self.Raise()
@ -3819,9 +3881,12 @@ class FrameSplash( ClientGUICommon.Frame ):
def SetStatusText( self, text ):
def SetStatusText( self, text, print_to_log = True ):
HydrusData.Print( text )
if print_to_log:
HydrusData.Print( text )
self._status_text = text
@ -3830,9 +3895,12 @@ class FrameSplash( ClientGUICommon.Frame ):
self.Refresh()
def SetTitleText( self, text ):
def SetTitleText( self, text, print_to_log = True ):
HydrusData.Print( text )
if print_to_log:
HydrusData.Print( text )
self._title_text = text

View File

@ -127,7 +127,7 @@ class Animation( wx.Window ):
( initial_width, initial_height ) = initial_size
self._media = media
self._video_container = self._video_container = ClientRendering.RasterContainerVideo( self._media, initial_size )
self._video_container = ClientRendering.RasterContainerVideo( self._media, initial_size )
self._animation_bar = None
@ -1237,19 +1237,24 @@ class Canvas( wx.Window ):
self._current_display_media = self._current_media.GetDisplayMedia()
if self._current_display_media.GetLocationsManager().HasLocal():
( initial_size, initial_position ) = self._GetMediaContainerSizeAndPosition()
( initial_width, initial_height ) = initial_size
if self._current_display_media.GetLocationsManager().HasLocal() and initial_width > 0 and initial_height > 0:
self._RecalcZoom()
( initial_size, initial_position ) = self._GetMediaContainerSizeAndPosition()
self._media_container = MediaContainer( self, self._image_cache, self._current_display_media, initial_size, initial_position )
if self._claim_focus: self._media_container.SetFocus()
self._PrefetchNeighbours()
else: self._current_media = None
else:
self._current_media = None
HydrusGlobals.client_controller.pub( 'canvas_new_display_media', self._canvas_key, self._current_display_media )
@ -1502,7 +1507,11 @@ class CanvasWithDetails( Canvas ):
def _GetInfoString( self ):
info_string = self._current_media.GetPrettyInfo() + ' | ' + ClientData.ConvertZoomToPercentage( self._current_zoom ) + ' | ' + self._current_media.GetPrettyAge()
lines = self._current_display_media.GetPrettyInfoLines()
lines.insert( 1, ClientData.ConvertZoomToPercentage( self._current_zoom ) )
info_string = ' | '.join( lines )
return info_string
@ -1569,8 +1578,10 @@ class CanvasPanel( Canvas ):
menu = wx.Menu()
menu.Append( CC.ID_NULL, self._current_display_media.GetPrettyInfo() )
menu.Append( CC.ID_NULL, self._current_display_media.GetPrettyAge() )
for line in self._current_display_media.GetPrettyInfoLines():
menu.Append( CC.ID_NULL, line )
#
@ -1792,14 +1803,14 @@ class CanvasMediaList( ClientMedia.ListeningMediaList, CanvasWithDetails ):
delay_base = 800
num_to_go_back = 2
num_to_go_forward = 2
num_to_go_back = 1
num_to_go_forward = 1
self._just_started = False
else:
delay_base = 200
delay_base = 400
num_to_go_back = 3
num_to_go_forward = 5
@ -2656,8 +2667,10 @@ class CanvasMediaListBrowser( CanvasMediaListNavigable ):
menu = wx.Menu()
menu.Append( CC.ID_NULL, self._current_display_media.GetPrettyInfo() )
menu.Append( CC.ID_NULL, self._current_display_media.GetPrettyAge() )
for line in self._current_display_media.GetPrettyInfoLines():
menu.Append( CC.ID_NULL, line )
menu.AppendSeparator()
@ -3103,8 +3116,10 @@ class CanvasMediaListCustomFilter( CanvasMediaListNavigable ):
menu = wx.Menu()
menu.Append( CC.ID_NULL, self._current_display_media.GetPrettyInfo() )
menu.Append( CC.ID_NULL, self._current_display_media.GetPrettyAge() )
for line in self._current_display_media.GetPrettyInfoLines():
menu.Append( CC.ID_NULL, line )
menu.AppendSeparator()

View File

@ -706,7 +706,10 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
num_chars = len( self._text_ctrl.GetValue() )
if num_chars == 0: self._lag_timer.Start( 5 * 60 * 1000, wx.TIMER_ONE_SHOT )
if num_chars == 0:
self._lag_timer.Start( 5 * 60 * 1000, wx.TIMER_ONE_SHOT )
def EventFileButton( self, event ):
@ -914,13 +917,22 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
if search_text in ( '', ':' ):
self._cache_text = ''
self._current_namespace = ''
input_just_changed = self._cache_text != ''
if self._file_service_key == CC.COMBINED_FILE_SERVICE_KEY: search_service_key = self._tag_service_key
else: search_service_key = self._file_service_key
db_not_going_to_hang_if_we_hit_it = not HydrusGlobals.client_controller.CurrentlyIdle()
matches = HydrusGlobals.client_controller.Read( 'file_system_predicates', search_service_key )
if input_just_changed or db_not_going_to_hang_if_we_hit_it:
self._cache_text = ''
self._current_namespace = ''
if self._file_service_key == CC.COMBINED_FILE_SERVICE_KEY: search_service_key = self._tag_service_key
else: search_service_key = self._file_service_key
self._cached_results = HydrusGlobals.client_controller.Read( 'file_system_predicates', search_service_key )
matches = self._cached_results
else:
@ -6090,6 +6102,109 @@ class StaticBoxSorterForListBoxTags( StaticBox ):
self._tags_box.SetTagsByMedia( media, force_reload = force_reload )
class TimeDeltaButton( wx.Button ):
def __init__( self, parent, min = 1, days = False, hours = False, minutes = False, seconds = False ):
wx.Button.__init__( self, parent )
self._min = min
self._show_days = days
self._show_hours = hours
self._show_minutes = minutes
self._show_seconds = seconds
self._value = self._min
self.SetLabelText( 'initialising' )
self.Bind( wx.EVT_BUTTON, self.EventButton )
def _RefreshLabel( self ):
text_components = []
value = self._value
if self._show_days:
days = value / 86400
if days > 0:
text_components.append( HydrusData.ConvertIntToPrettyString( days ) + ' days' )
value %= 86400
if self._show_hours:
hours = value / 3600
if hours > 0:
text_components.append( HydrusData.ConvertIntToPrettyString( hours ) + ' hours' )
value %= 3600
if self._show_minutes:
minutes = value / 60
if minutes > 0:
text_components.append( HydrusData.ConvertIntToPrettyString( minutes ) + ' minutes' )
value %= 60
if self._show_seconds:
if value > 0 or len( text_components ) == 0:
text_components.append( HydrusData.ConvertIntToPrettyString( value ) + ' seconds' )
text = ' '.join( text_components )
self.SetLabelText( text )
def EventButton( self, event ):
import ClientGUIDialogs
with ClientGUIDialogs.DialogInputTimeDelta( self, self._value, min = self._min, days = self._show_days, hours = self._show_hours, minutes = self._show_minutes, seconds = self._show_seconds ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
value = dlg.GetValue()
self.SetValue( value )
def GetValue( self ):
return self._value
def SetValue( self, value ):
self._value = value
self._RefreshLabel()
self.GetParent().Layout()
class TimeDeltaCtrl( wx.Panel ):
def __init__( self, parent, min = 1, days = False, hours = False, minutes = False, seconds = False ):

View File

@ -71,6 +71,8 @@ def ExportToHTA( parent, service_key, hashes ):
message += os.linesep * 2
message += 'If you do not know what this stuff means, click \'normal\'.'
hash_type = None
with DialogYesNo( parent, message, title = 'Choose which hash type.', yes_label = 'normal', no_label = 'alternative' ) as dlg:
result = dlg.ShowModal()
@ -2210,6 +2212,49 @@ class DialogInputTags( Dialog ):
self.EndModal( wx.ID_OK )
class DialogInputTimeDelta( Dialog ):
def __init__( self, parent, initial_value, min = 1, days = False, hours = False, minutes = False, seconds = False ):
Dialog.__init__( self, parent, 'input time delta' )
self._time_delta = ClientGUICommon.TimeDeltaCtrl( self, min = min, days = days, hours = hours, minutes = minutes, seconds = seconds )
self._ok = wx.Button( self, id = wx.ID_OK, label = 'Ok' )
self._ok.SetForegroundColour( ( 0, 128, 0 ) )
self._cancel = wx.Button( self, id = wx.ID_CANCEL, label = 'Cancel' )
self._cancel.SetForegroundColour( ( 128, 0, 0 ) )
#
self._time_delta.SetValue( initial_value )
#
b_box = wx.BoxSizer( wx.HORIZONTAL )
b_box.AddF( self._ok, CC.FLAGS_MIXED )
b_box.AddF( self._cancel, CC.FLAGS_MIXED )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( self._time_delta, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.AddF( b_box, CC.FLAGS_BUTTON_SIZER )
self.SetSizer( vbox )
( x, y ) = self.GetEffectiveMinSize()
self.SetInitialSize( ( x, y ) )
wx.CallAfter( self._ok.SetFocus )
def GetValue( self ):
return self._time_delta.GetValue()
class DialogInputUPnPMapping( Dialog ):
def __init__( self, parent, external_port, protocol_type, internal_port, description, duration ):

View File

@ -1663,7 +1663,7 @@ class DialogManageExportFoldersEdit( ClientGUIDialogs.Dialog ):
self._period_box = ClientGUICommon.StaticBox( self, 'export period' )
self._period = ClientGUICommon.TimeDeltaCtrl( self._period_box, min = 3 * 60, days = True, hours = True, minutes = True )
self._period = ClientGUICommon.TimeDeltaButton( self._period_box, min = 3 * 60, days = True, hours = True, minutes = True )
self._period.SetValue( period )
@ -2727,7 +2727,7 @@ class DialogManageImportFoldersEdit( ClientGUIDialogs.Dialog ):
self._open_popup = wx.CheckBox( self._folder_box )
self._period = ClientGUICommon.TimeDeltaCtrl( self._folder_box, min = 3 * 60, days = True, hours = True, minutes = True )
self._period = ClientGUICommon.TimeDeltaButton( self._folder_box, min = 3 * 60, days = True, hours = True, minutes = True )
self._paused = wx.CheckBox( self._folder_box )
@ -3612,7 +3612,7 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
self._thread_times_to_check = wx.SpinCtrl( thread_checker, min = 0, max = 100 )
self._thread_times_to_check.SetToolTipString( 'how many times the thread checker will check' )
self._thread_check_period = ClientGUICommon.TimeDeltaCtrl( thread_checker, min = 30, hours = True, minutes = True, seconds = True )
self._thread_check_period = ClientGUICommon.TimeDeltaButton( thread_checker, min = 30, hours = True, minutes = True, seconds = True )
self._thread_check_period.SetToolTipString( 'how long the checker will wait between checks' )
#
@ -6787,7 +6787,10 @@ class DialogManageServices( ClientGUIDialogs.Dialog ):
namespaces = HydrusData.ConvertPrettyStringsToUglyNamespaces( dlg.GetChecked() )
else: return
else:
return
name_to_display = self._GetArchiveNameToDisplay( archive_name, namespaces )
@ -7334,7 +7337,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
self._booru_selector = wx.ListBox( self._query_panel )
self._booru_selector.Bind( wx.EVT_LISTBOX, self.EventBooruSelected )
self._period = ClientGUICommon.TimeDeltaCtrl( self._query_panel, min = 3600 * 4, days = True, hours = True )
self._period = ClientGUICommon.TimeDeltaButton( self._query_panel, min = 3600 * 4, days = True, hours = True )
self._info_panel = ClientGUICommon.StaticBox( self, 'info' )

View File

@ -332,7 +332,9 @@ class FullscreenHoverFrameCommands( FullscreenHoverFrame ):
else: self._title_text.Hide()
label = self._current_media.GetPrettyInfo() + ' | ' + self._current_media.GetPrettyAge()
lines = self._current_media.GetPrettyInfoLines()
label = ' | '.join( lines )
self._info_text.SetLabelText( label )

View File

@ -2842,7 +2842,7 @@ class ManagementPanelThreadWatcherImport( ManagementPanel ):
self._thread_times_to_check.SetValue( times_to_check )
self._thread_times_to_check.Bind( wx.EVT_SPINCTRL, self.EventTimesToCheck )
self._thread_check_period = ClientGUICommon.TimeDeltaCtrl( self._options_panel, min = 30, hours = True, minutes = True, seconds = True )
self._thread_check_period = ClientGUICommon.TimeDeltaButton( self._options_panel, min = 30, hours = True, minutes = True, seconds = True )
self._thread_check_period.SetValue( check_period )
self._thread_check_period.Bind( wx.EVT_SPINCTRL, self.EventCheckPeriod )

View File

@ -534,7 +534,7 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
( selected_media, ) = self._selected_media
s += selected_files_string + ' selected, ' + selected_media.GetPrettyInfo()
s += selected_files_string + ' selected, ' + ', '.join( selected_media.GetPrettyInfoLines() )
else: # 23 files - 5 selected, selection_info
@ -2399,11 +2399,16 @@ class MediaPanelThumbnails( MediaPanel ):
# do the actual menu
if multiple_selected: menu.Append( CC.ID_NULL, HydrusData.ConvertIntToPrettyString( num_selected ) + ' files, ' + self._GetPrettyTotalSelectedSize() )
if multiple_selected:
menu.Append( CC.ID_NULL, HydrusData.ConvertIntToPrettyString( num_selected ) + ' files, ' + self._GetPrettyTotalSelectedSize() )
else:
menu.Append( CC.ID_NULL, thumbnail.GetPrettyInfo() )
menu.Append( CC.ID_NULL, thumbnail.GetPrettyAge() )
for line in thumbnail.GetPrettyInfoLines():
menu.Append( CC.ID_NULL, line )
if len( disparate_current_file_service_keys ) > 0: AddServiceKeysToMenu( menu, disparate_current_file_service_keys, 'some uploaded to', CC.ID_NULL )

View File

@ -64,13 +64,20 @@ class LocationsManager( object ):
LOCAL_LOCATIONS = { CC.LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY }
def __init__( self, current, deleted, pending, petitioned ):
def __init__( self, current, deleted, pending, petitioned, current_to_timestamps = None ):
self._current = current
self._deleted = deleted
self._pending = pending
self._petitioned = petitioned
if current_to_timestamps is None:
current_to_timestamps = {}
self._current_to_timestamps = current_to_timestamps
def DeletePending( self, service_key ):
@ -148,6 +155,30 @@ class LocationsManager( object ):
return remote_service_strings
def GetTimestamp( self, service_key = None ):
if service_key is None:
if len( self._current_to_timestamps ) > 0:
return max( self._current_to_timestamps.values() )
else:
return None
if service_key in self._current_to_timestamps:
return self._current_to_timestamps[ service_key ]
else:
return None
def HasDownloading( self ): return CC.LOCAL_FILE_SERVICE_KEY in self._pending
def HasLocal( self ): return len( self._current.intersection( self.LOCAL_LOCATIONS ) ) > 0
@ -168,6 +199,8 @@ class LocationsManager( object ):
self._current.discard( CC.TRASH_SERVICE_KEY )
self._current_to_timestamps[ service_key ] = HydrusData.GetNow()
elif action == HC.CONTENT_UPDATE_DELETE:
self._deleted.add( service_key )
@ -179,6 +212,8 @@ class LocationsManager( object ):
self._current.add( CC.TRASH_SERVICE_KEY )
self._current_to_timestamps[ CC.TRASH_SERVICE_KEY ] = self._current_to_timestamps[ CC.LOCAL_FILE_SERVICE_KEY ]
elif action == HC.CONTENT_UPDATE_UNDELETE:
@ -186,6 +221,8 @@ class LocationsManager( object ):
self._current.add( CC.LOCAL_FILE_SERVICE_KEY )
self._current_to_timestamps[ CC.LOCAL_FILE_SERVICE_KEY ] = self._current_to_timestamps[ CC.TRASH_SERVICE_KEY ]
elif action == HC.CONTENT_UPDATE_PEND:
if service_key not in self._current: self._pending.add( service_key )
@ -655,7 +692,7 @@ class MediaList( object ):
def deal_with_none( x ):
if x == None: return -1
if x is None: return -1
else: return x
@ -755,8 +792,6 @@ class MediaCollection( MediaList, Media ):
self._size = 0
self._size_definite = True
self._timestamp = 0
self._width = None
self._height = None
self._duration = None
@ -778,9 +813,6 @@ class MediaCollection( MediaList, Media ):
self._size = sum( [ media.GetSize() for media in self._sorted_media ] )
self._size_definite = not False in ( media.IsSizeDefinite() for media in self._sorted_media )
if len( self._sorted_media ) == 0: self._timestamp = 0
else: self._timestamp = max( [ media.GetTimestamp() for media in self._sorted_media ] )
duration_sum = sum( [ media.GetDuration() for media in self._sorted_media if media.HasDuration() ] )
if duration_sum > 0: self._duration = duration_sum
@ -864,9 +896,7 @@ class MediaCollection( MediaList, Media ):
def GetNumWords( self ): return sum( ( media.GetNumWords() for media in self._sorted_media ) )
def GetPrettyAge( self ): return 'imported ' + HydrusData.ConvertTimestampToPrettyAgo( self._timestamp )
def GetPrettyInfo( self ):
def GetPrettyInfoLines( self ):
size = HydrusData.ConvertIntToBytes( self._size )
@ -876,7 +906,7 @@ class MediaCollection( MediaList, Media ):
info_string += ' (' + HydrusData.ConvertIntToPrettyString( self.GetNumFiles() ) + ' files)'
return info_string
return [ info_string ]
def GetRatings( self ): return self._ratings
@ -896,7 +926,7 @@ class MediaCollection( MediaList, Media ):
def GetTagsManager( self ): return self._tags_manager
def GetTimestamp( self ): return self._timestamp
def GetTimestamp( self ): return None
def HasArchive( self ): return self._archive
@ -1026,19 +1056,14 @@ class MediaSingleton( Media ):
def GetNumWords( self ): return self._media_result.GetNumWords()
def GetTimestamp( self ):
def GetTimestamp( self, service_key = None ):
timestamp = self._media_result.GetTimestamp()
if timestamp is None: return 0
else: return timestamp
return self._media_result.GetLocationsManager().GetTimestamp( service_key )
def GetPrettyAge( self ): return 'imported ' + HydrusData.ConvertTimestampToPrettyAgo( self._media_result.GetTimestamp() )
def GetPrettyInfo( self ):
def GetPrettyInfoLines( self ):
( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._media_result.ToTuple()
( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._media_result.ToTuple()
info_string = HydrusData.ConvertIntToBytes( size ) + ' ' + HC.mime_string_lookup[ mime ]
@ -1050,7 +1075,27 @@ class MediaSingleton( Media ):
if num_words is not None: info_string += ' (' + HydrusData.ConvertIntToPrettyString( num_words ) + ' words)'
return info_string
lines = [ info_string ]
locations_manager = self._media_result.GetLocationsManager()
current_service_keys = locations_manager.GetCurrent()
if CC.LOCAL_FILE_SERVICE_KEY in current_service_keys:
timestamp = locations_manager.GetTimestamp( CC.LOCAL_FILE_SERVICE_KEY )
lines.append( 'imported ' + HydrusData.ConvertTimestampToPrettyAgo( timestamp ) )
if CC.TRASH_SERVICE_KEY in current_service_keys:
timestamp = locations_manager.GetTimestamp( CC.TRASH_SERVICE_KEY )
lines.append( 'imported ' + HydrusData.ConvertTimestampToPrettyAgo( timestamp ) + ', now in the trash' )
return lines
def GetRatings( self ): return self._media_result.GetRatings()
@ -1198,14 +1243,14 @@ class MediaResult( object ):
def __init__( self, tuple ):
# hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings
# hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings
self._tuple = tuple
def DeletePending( self, service_key ):
( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._tuple
( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._tuple
service = HydrusGlobals.client_controller.GetServicesManager().GetService( service_key )
@ -1217,33 +1262,31 @@ class MediaResult( object ):
def GetHash( self ): return self._tuple[0]
def GetDuration( self ): return self._tuple[7]
def GetDuration( self ): return self._tuple[6]
def GetInbox( self ): return self._tuple[1]
def GetLocationsManager( self ): return self._tuple[11]
def GetLocationsManager( self ): return self._tuple[10]
def GetMime( self ): return self._tuple[3]
def GetNumFrames( self ): return self._tuple[8]
def GetNumFrames( self ): return self._tuple[7]
def GetNumWords( self ): return self._tuple[9]
def GetNumWords( self ): return self._tuple[8]
def GetRatings( self ): return ( self._tuple[12], self._tuple[13] )
def GetRatings( self ): return ( self._tuple[11], self._tuple[12] )
def GetResolution( self ): return ( self._tuple[5], self._tuple[6] )
def GetResolution( self ): return ( self._tuple[4], self._tuple[5] )
def GetSize( self ): return self._tuple[2]
def GetTagsManager( self ): return self._tuple[10]
def GetTimestamp( self ): return self._tuple[4]
def GetTagsManager( self ): return self._tuple[9]
def ProcessContentUpdate( self, service_key, content_update ):
( data_type, action, row ) = content_update.ToTuple()
( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._tuple
( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._tuple
service = HydrusGlobals.client_controller.GetServicesManager().GetService( service_key )
@ -1272,7 +1315,7 @@ class MediaResult( object ):
self._tuple = ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings )
self._tuple = ( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings )
locations_manager.ProcessContentUpdate( service_key, content_update )
@ -1286,7 +1329,7 @@ class MediaResult( object ):
def ResetService( self, service_key ):
( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._tuple
( hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings ) = self._tuple
tags_manager.ResetService( service_key )
locations_manager.ResetService( service_key )

View File

@ -23,7 +23,6 @@ else:
BIN_DIR = os.path.join( BASE_DIR, 'bin' )
DB_DIR = os.path.join( BASE_DIR, 'db' )
CLIENT_ARCHIVES_DIR = os.path.join( DB_DIR, 'client_archives' )
CLIENT_CACHE_DIR = os.path.join( DB_DIR, 'client_cache' )
CLIENT_FILES_DIR = os.path.join( DB_DIR, 'client_files' )
SERVER_FILES_DIR = os.path.join( DB_DIR, 'server_files' )
CLIENT_THUMBNAILS_DIR = os.path.join( DB_DIR, 'client_thumbnails' )
@ -54,7 +53,7 @@ options = {}
# Misc
NETWORK_VERSION = 17
SOFTWARE_VERSION = 201
SOFTWARE_VERSION = 202
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -200,6 +199,7 @@ DELETED_PENDING = 4
HIGH_PRIORITY = 0
LOW_PRIORITY = 2
INTERRUPTABLE_PRIORITY = 4
SCORE_PETITION = 0

View File

@ -314,6 +314,11 @@ class HydrusController( object ):
return self._Write( action, HC.HIGH_PRIORITY, False, *args, **kwargs )
def WriteInterruptable( self, action, *args, **kwargs ):
return self._Write( action, HC.INTERRUPTABLE_PRIORITY, True, *args, **kwargs )
def WriteSynchronous( self, action, *args, **kwargs ):
return self._Write( action, HC.LOW_PRIORITY, True, *args, **kwargs )

View File

@ -17,63 +17,76 @@ import threading
import traceback
import time
CONNECTION_REFRESH_TIME = 60 * 30
def CanVacuum( db_path, stop_time = None ):
db = sqlite3.connect( db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES )
c = db.cursor()
( page_size, ) = c.execute( 'PRAGMA page_size;' ).fetchone()
( page_count, ) = c.execute( 'PRAGMA page_count;' ).fetchone()
( freelist_count, ) = c.execute( 'PRAGMA freelist_count;' ).fetchone()
db_size = ( page_count - freelist_count ) * page_size
if stop_time is not None:
try:
approx_vacuum_speed_mb_per_s = 1048576 * 3
db = sqlite3.connect( db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES )
approx_vacuum_duration = db_size / approx_vacuum_speed_mb_per_s
c = db.cursor()
time_i_will_have_to_start = stop_time - approx_vacuum_duration
( page_size, ) = c.execute( 'PRAGMA page_size;' ).fetchone()
( page_count, ) = c.execute( 'PRAGMA page_count;' ).fetchone()
( freelist_count, ) = c.execute( 'PRAGMA freelist_count;' ).fetchone()
if HydrusData.TimeHasPassed( time_i_will_have_to_start ):
db_size = ( page_count - freelist_count ) * page_size
if stop_time is not None:
return False
approx_vacuum_speed_mb_per_s = 1048576 * 3
approx_vacuum_duration = db_size / approx_vacuum_speed_mb_per_s
time_i_will_have_to_start = stop_time - approx_vacuum_duration
if HydrusData.TimeHasPassed( time_i_will_have_to_start ):
return False
temp_dir = tempfile.gettempdir()
( db_dir, db_filename ) = os.path.split( db_path )
temp_disk_usage = psutil.disk_usage( temp_dir )
a = HydrusPaths.GetDevice( temp_dir )
b = HydrusPaths.GetDevice( db_dir )
if HydrusPaths.GetDevice( temp_dir ) == HydrusPaths.GetDevice( db_dir ):
temp_dir = tempfile.gettempdir()
( db_dir, db_filename ) = os.path.split( db_path )
if temp_disk_usage.free < db_size * 2.2:
temp_disk_usage = psutil.disk_usage( temp_dir )
a = HydrusPaths.GetDevice( temp_dir )
b = HydrusPaths.GetDevice( db_dir )
if HydrusPaths.GetDevice( temp_dir ) == HydrusPaths.GetDevice( db_dir ):
return False
if temp_disk_usage.free < db_size * 2.2:
return False
else:
if temp_disk_usage.free < db_size * 1.1:
return False
db_disk_usage = psutil.disk_usage( db_dir )
if db_disk_usage.free < db_size * 1.1:
return False
else:
return True
if temp_disk_usage.free < db_size * 1.1:
return False
except Exception as e:
db_disk_usage = psutil.disk_usage( db_dir )
HydrusData.Print( 'Could not determine whether to vacuum or not:' )
if db_disk_usage.free < db_size * 1.1:
return False
HydrusData.PrintException( e )
return False
return True
def SetupDBCreatePragma( c, no_wal = False ):
@ -142,6 +155,8 @@ class HydrusDB( object ):
self._db_name = db_name
self._no_wal = no_wal
self._connection_timestamp = 0
main_db_filename = db_name
if not main_db_filename.endswith( '.db' ):
@ -153,6 +168,8 @@ class HydrusDB( object ):
self._db_filenames[ 'main' ] = main_db_filename
self._InitExternalDatabases()
if distutils.version.LooseVersion( sqlite3.sqlite_version ) < distutils.version.LooseVersion( '3.11.0' ):
self._fast_big_transaction_wal = False
@ -193,7 +210,7 @@ class HydrusDB( object ):
time.sleep( self.UPDATE_WAIT )
try: self._c.execute( 'BEGIN IMMEDIATE' )
try: self._c.execute( 'BEGIN IMMEDIATE;' )
except Exception as e:
raise HydrusExceptions.DBAccessException( HydrusData.ToUnicode( e ) )
@ -203,13 +220,24 @@ class HydrusDB( object ):
self._UpdateDB( version )
self._c.execute( 'COMMIT' )
self._c.execute( 'COMMIT;' )
except:
self._c.execute( 'ROLLBACK' )
e = Exception( 'Updating the ' + self._db_name + ' db to version ' + str( version + 1 ) + ' caused this error:' + os.linesep + traceback.format_exc() )
raise Exception( 'Updating the ' + self._db_name + ' db to version ' + str( version + 1 ) + ' caused this error:' + os.linesep + traceback.format_exc() )
try:
self._c.execute( 'ROLLBACK;' )
except Exception as rollback_e:
HydrusData.Print( 'When the update failed, attempting to rollback the database failed.' )
HydrusData.PrintException( rollback_e )
raise e
( version, ) = self._c.execute( 'SELECT version FROM version;' ).fetchone()
@ -232,7 +260,29 @@ class HydrusDB( object ):
def _AttachExternalDatabases( self ):
pass
for ( name, filename ) in self._db_filenames.items():
if name == 'main':
continue
db_path = os.path.join( self._db_dir, self._db_filenames[ name ] )
if not os.path.exists( db_path ):
db = sqlite3.connect( db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES )
c = db.cursor()
SetupDBCreatePragma( c, no_wal = self._no_wal )
del c
del db
self._c.execute( 'ATTACH ? AS external_' + name + ';', ( db_path, ) )
def _CleanUpCaches( self ):
@ -309,11 +359,13 @@ class HydrusDB( object ):
self._db = sqlite3.connect( db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES )
self._connection_timestamp = HydrusData.GetNow()
self._db.create_function( 'hydrus_hamming', 2, HydrusData.GetHammingDistance )
self._c = self._db.cursor()
self._c.execute( 'PRAGMA cache_size = -150000;' )
self._c.execute( 'PRAGMA main.cache_size = -100000;' )
self._c.execute( 'ATTACH ":memory:" AS mem;' )
@ -323,6 +375,8 @@ class HydrusDB( object ):
for db_name in db_names:
self._c.execute( 'PRAGMA ' + db_name + '.cache_size = -100000;' )
if self._no_wal:
self._c.execute( 'PRAGMA ' + db_name + '.journal_mode = TRUNCATE;' )
@ -378,6 +432,11 @@ class HydrusDB( object ):
def _InitExternalDatabases( self ):
pass
def _ManageDBError( self, job, e ):
raise NotImplementedError()
@ -393,15 +452,22 @@ class HydrusDB( object ):
try:
if job_type == 'read': self._c.execute( 'BEGIN DEFERRED' )
elif job_type in ( 'read_write', 'write' ): self._c.execute( 'BEGIN IMMEDIATE' )
in_transaction = True
if job_type in ( 'read_write', 'write' ):
self._c.execute( 'BEGIN IMMEDIATE;' )
in_transaction = True
if job_type in ( 'read', 'read_write' ): result = self._Read( action, *args, **kwargs )
elif job_type in ( 'write' ): result = self._Write( action, *args, **kwargs )
self._c.execute( 'COMMIT' )
if in_transaction:
self._c.execute( 'COMMIT;' )
in_transaction = False
for ( topic, args, kwargs ) in self._pubsubs:
@ -415,7 +481,19 @@ class HydrusDB( object ):
except Exception as e:
if in_transaction: self._c.execute( 'ROLLBACK' )
if in_transaction:
try:
self._c.execute( 'ROLLBACK;' )
except Exception as rollback_e:
HydrusData.Print( 'When the transaction failed, attempting to rollback the database failed.' )
HydrusData.PrintException( rollback_e )
self._ManageDBError( job, e )
@ -529,6 +607,11 @@ class HydrusDB( object ):
pass # no jobs in the past little while; let's just check if we should shutdown
if HydrusData.TimeHasPassed( self._connection_timestamp + CONNECTION_REFRESH_TIME ): # just to clear out the journal files
self._InitDBCursor()
self._CleanUpCaches()

View File

@ -937,6 +937,11 @@ def SplitIteratorIntoChunks( iterator, n ):
def SplitListIntoChunks( xs, n ):
if isinstance( xs, set ):
xs = list( xs )
for i in xrange( 0, len( xs ), n ): yield xs[ i : i + n ]
def TimeHasPassed( timestamp ):

View File

@ -344,10 +344,9 @@ class Controller( HydrusController.HydrusController ):
def MaintainDB( self ):
stale_time_delta = 30 * 86400
stop_time = HydrusData.GetNow() + 10
self.WriteSynchronous( 'analyze', stale_time_delta, stop_time )
self.WriteSynchronous( 'analyze', stop_time )
def NotifyPubSubs( self ):

View File

@ -100,7 +100,7 @@ class MessageDB( object ):
'''
class DB( HydrusDB.HydrusDB ):
READ_WRITE_ACTIONS = []
READ_WRITE_ACTIONS = [ 'access_key', 'immediate_content_update', 'init', 'registration_keys' ]
def _AccountTypeExists( self, service_id, title ): return self._c.execute( 'SELECT 1 FROM account_types WHERE service_id = ? AND title = ?;', ( service_id, title ) ).fetchone() is not None
@ -217,12 +217,8 @@ class DB( HydrusDB.HydrusDB ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
affected_timestamps = [ timestamp for ( timestamp, ) in self._c.execute( 'SELECT DISTINCT timestamp FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ' AND status = ?;', ( service_id, tag_id, HC.DELETED ) ) ]
self._c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ' AND status = ?;', ( service_id, tag_id, HC.DELETED ) )
self._RefreshUpdateCache( service_id, affected_timestamps )
else:
already_deleted = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND status = ?;', ( service_id, tag_id, HC.DELETED ) ) ]
@ -312,7 +308,9 @@ class DB( HydrusDB.HydrusDB ):
def _AddToExpires( self, account_ids, timespan ): self._c.execute( 'UPDATE accounts SET expires = expires + ? WHERE account_id IN ' + HydrusData.SplayListForDB( account_ids ) + ';', ( timespan, ) )
def _Analyze( self, stale_time_delta, stop_time ):
def _Analyze( self, stop_time ):
stale_time_delta = 30 * 86400
all_names = [ name for ( name, ) in self._c.execute( 'SELECT name FROM sqlite_master;' ) ]
@ -339,7 +337,10 @@ class DB( HydrusDB.HydrusDB ):
time_took = HydrusData.GetNowPrecise() - started
HydrusData.Print( 'Analyzed ' + name + ' in ' + HydrusData.ConvertTimeDeltaToPrettyString( time_took ) )
if time_took > 1:
HydrusData.Print( 'Analyzed ' + name + ' in ' + HydrusData.ConvertTimeDeltaToPrettyString( time_took ) )
if HydrusData.TimeHasPassed( stop_time ):
@ -369,7 +370,7 @@ class DB( HydrusDB.HydrusDB ):
def _ApproveFilePetitionOptimised( self, service_id, account_id, hash_ids ):
( biggest_end, ) = self._c.execute( 'SELECT end FROM update_cache ORDER BY end DESC LIMIT 1;' ).fetchone()
( biggest_end, ) = self._c.execute( 'SELECT end FROM update_cache WHERE service_id = ? ORDER BY end DESC LIMIT 1;', ( service_id, ) ).fetchone()
self._c.execute( 'DELETE FROM file_map WHERE service_id = ? AND account_id = ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND timestamp > ?;', ( service_id, account_id, biggest_end ) )
@ -409,10 +410,6 @@ class DB( HydrusDB.HydrusDB ):
self._RewardTagParentPetitioners( service_id, old_tag_id, new_tag_id, 1 )
# get affected timestamps here?
affected_timestamps = [ timestamp for ( timestamp, ) in self._c.execute( 'SELECT DISTINCT timestamp FROM tag_parents WHERE service_id = ? AND old_tag_id = ? AND new_tag_id = ? AND status = ?;', ( service_id, old_tag_id, new_tag_id, HC.CURRENT ) ) ]
self._c.execute( 'DELETE FROM tag_parents WHERE service_id = ? AND old_tag_id = ? AND new_tag_id = ?;', ( service_id, old_tag_id, new_tag_id ) )
if status == HC.PENDING: new_status = HC.CURRENT
@ -422,8 +419,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'INSERT OR IGNORE INTO tag_parents ( service_id, account_id, old_tag_id, new_tag_id, reason_id, status, timestamp ) VALUES ( ?, ?, ?, ?, ?, ?, ? );', ( service_id, account_id, old_tag_id, new_tag_id, reason_id, new_status, now ) )
if len( affected_timestamps ) > 0: self._RefreshUpdateCache( service_id, affected_timestamps )
def _ApproveTagSiblingPetition( self, service_id, account_id, old_tag_id, new_tag_id, reason_id, status ):
@ -442,10 +437,6 @@ class DB( HydrusDB.HydrusDB ):
self._RewardTagSiblingPetitioners( service_id, old_tag_id, new_tag_id, 1 )
# get affected timestamps here?
affected_timestamps = [ timestamp for ( timestamp, ) in self._c.execute( 'SELECT DISTINCT timestamp FROM tag_siblings WHERE service_id = ? AND old_tag_id = ? AND new_tag_id = ? AND status = ?;', ( service_id, old_tag_id, new_tag_id, HC.CURRENT ) ) ]
self._c.execute( 'DELETE FROM tag_siblings WHERE service_id = ? AND old_tag_id = ? AND new_tag_id = ?;', ( service_id, old_tag_id, new_tag_id ) )
if status == HC.PENDING: new_status = HC.CURRENT
@ -455,61 +446,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'INSERT OR IGNORE INTO tag_siblings ( service_id, account_id, old_tag_id, new_tag_id, reason_id, status, timestamp ) VALUES ( ?, ?, ?, ?, ?, ?, ? );', ( service_id, account_id, old_tag_id, new_tag_id, reason_id, new_status, now ) )
if len( affected_timestamps ) > 0: self._RefreshUpdateCache( service_id, affected_timestamps )
def _AttachExternalDatabases( self ):
self._db_filenames[ 'master' ] = self._db_name + '.master.db'
master_db_path = os.path.join( self._db_dir, self._db_filenames[ 'master' ] )
if not os.path.exists( master_db_path ):
db = sqlite3.connect( master_db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES )
c = db.cursor()
HydrusDB.SetupDBCreatePragma( c, no_wal = self._no_wal )
c.execute( 'CREATE TABLE hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
c.execute( 'CREATE TABLE tags ( tag_id INTEGER PRIMARY KEY, tag TEXT UNIQUE );' )
del c
del db
self._c.execute( 'ATTACH ? AS external_master;', ( master_db_path, ) )
self._db_filenames[ 'mappings' ] = self._db_name + '.mappings.db'
mappings_db_path = os.path.join( self._db_dir, self._db_filenames[ 'mappings' ] )
if not os.path.exists( mappings_db_path ):
db = sqlite3.connect( mappings_db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES )
c = db.cursor()
HydrusDB.SetupDBCreatePragma( c, no_wal = self._no_wal )
c.execute( 'CREATE TABLE mapping_petitions ( service_id INTEGER, account_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, timestamp INTEGER, status INTEGER, PRIMARY KEY( service_id, account_id, tag_id, hash_id, status ) ) WITHOUT ROWID;' )
c.execute( 'CREATE INDEX mapping_petitions_service_id_account_id_reason_id_tag_id_index ON mapping_petitions ( service_id, account_id, reason_id, tag_id );' )
c.execute( 'CREATE INDEX mapping_petitions_service_id_tag_id_hash_id_index ON mapping_petitions ( service_id, tag_id, hash_id );' )
c.execute( 'CREATE INDEX mapping_petitions_service_id_status_index ON mapping_petitions ( service_id, status );' )
c.execute( 'CREATE INDEX mapping_petitions_service_id_timestamp_index ON mapping_petitions ( service_id, timestamp );' )
c.execute( 'CREATE TABLE mappings ( service_id INTEGER, tag_id INTEGER, hash_id INTEGER, account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, tag_id, hash_id ) ) WITHOUT ROWID;' )
c.execute( 'CREATE INDEX mappings_account_id_index ON mappings ( account_id );' )
c.execute( 'CREATE INDEX mappings_timestamp_index ON mappings ( timestamp );' )
del c
del db
self._c.execute( 'ATTACH ? AS external_mappings;', ( mappings_db_path, ) )
def _Backup( self ):
@ -667,15 +603,6 @@ class DB( HydrusDB.HydrusDB ):
def _CleanUpdate( self, service_key, begin, end ):
self._GenerateUpdate( service_key, begin, end )
service_id = self._GetServiceId( service_key )
self._c.execute( 'UPDATE update_cache SET dirty = ? WHERE service_id = ? AND begin = ?;', ( False, service_id, begin ) )
def _ClearBans( self ):
now = HydrusData.GetNow()
@ -788,6 +715,26 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE TABLE version ( version INTEGER, year INTEGER, month INTEGER );' )
# mappings
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_mappings.mapping_petitions ( service_id INTEGER, account_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, timestamp INTEGER, status INTEGER, PRIMARY KEY( service_id, account_id, tag_id, hash_id, status ) ) WITHOUT ROWID;' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_account_id_reason_id_tag_id_index ON mapping_petitions ( service_id, account_id, reason_id, tag_id );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_tag_id_hash_id_index ON mapping_petitions ( service_id, tag_id, hash_id );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_status_index ON mapping_petitions ( service_id, status );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_timestamp_index ON mapping_petitions ( service_id, timestamp );' )
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_mappings.mappings ( service_id INTEGER, tag_id INTEGER, hash_id INTEGER, account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, tag_id, hash_id ) ) WITHOUT ROWID;' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mappings_account_id_index ON mappings ( account_id );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mappings_timestamp_index ON mappings ( timestamp );' )
# master
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.tags ( tag_id INTEGER PRIMARY KEY, tag TEXT UNIQUE );' )
# inserts
current_time_struct = time.gmtime()
( current_year, current_month ) = ( current_time_struct.tm_year, current_time_struct.tm_mon )
@ -828,8 +775,6 @@ class DB( HydrusDB.HydrusDB ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
affected_timestamps = [ timestamp for ( timestamp, ) in self._c.execute( 'SELECT DISTINCT timestamp FROM file_map WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) ]
self._c.execute( 'DELETE FROM file_map WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ' AND status = ?;', ( service_id, HC.PETITIONED ) )
@ -837,22 +782,16 @@ class DB( HydrusDB.HydrusDB ):
self._c.executemany( 'INSERT OR IGNORE INTO file_petitions ( service_id, account_id, hash_id, reason_id, timestamp, status ) VALUES ( ?, ?, ?, ?, ?, ? );', ( ( service_id, account_id, hash_id, reason_id, now, HC.DELETED ) for hash_id in hash_ids ) )
self._RefreshUpdateCache( service_id, affected_timestamps )
def _DeleteMappings( self, service_id, account_id, tag_id, hash_ids, reason_id ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
affected_timestamps = [ timestamp for ( timestamp, ) in self._c.execute( 'SELECT DISTINCT timestamp FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) ) ]
self._c.execute( 'DELETE FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) )
self._c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ' AND status = ?;', ( service_id, tag_id, HC.PETITIONED ) )
self._c.executemany( 'INSERT OR IGNORE INTO mapping_petitions ( service_id, tag_id, hash_id, account_id, reason_id, timestamp, status ) VALUES ( ?, ?, ?, ?, ?, ?, ? );', ( ( service_id, tag_id, hash_id, account_id, reason_id, HydrusData.GetNow(), HC.DELETED ) for hash_id in hash_ids ) )
self._RefreshUpdateCache( service_id, affected_timestamps )
def _DeleteOrphans( self ):
@ -1304,15 +1243,6 @@ class DB( HydrusDB.HydrusDB ):
return [ account_type for ( account_type, ) in self._c.execute( 'SELECT account_type FROM account_types WHERE service_id = ?;', ( service_id, ) ) ]
def _GetDirtyUpdates( self, service_key ):
service_id = self._GetServiceId( service_key )
result = self._c.execute( 'SELECT begin, end FROM update_cache WHERE service_id = ? AND dirty = ?;', ( service_id, True ) ).fetchall()
return result
def _GetFile( self, hash ):
path = ServerFiles.GetPath( 'file', hash )
@ -1778,6 +1708,12 @@ class DB( HydrusDB.HydrusDB ):
self._services_over_monthly_data = set()
def _InitExternalDatabases( self ):
self._db_filenames[ 'mappings' ] = 'server.mappings.db'
self._db_filenames[ 'master' ] = 'server.master.db'
def _IterateFileUpdateContentData( self, service_id, begin, end ):
#
@ -2286,7 +2222,6 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'account_key_from_access_key': result = self._GetAccountKeyFromAccessKey( *args, **kwargs )
elif action == 'account_key_from_identifier': result = self._GetAccountKeyFromIdentifier( *args, **kwargs )
elif action == 'account_types': result = self._GetAccountTypes( *args, **kwargs )
elif action == 'dirty_updates': result = self._GetDirtyUpdates( *args, **kwargs )
elif action == 'immediate_content_update': result = self._GenerateImmediateContentUpdate( *args, **kwargs )
elif action == 'init': result = self._InitAdmin( *args, **kwargs )
elif action == 'ip': result = self._GetIPTimestamp( *args, **kwargs )
@ -2306,8 +2241,6 @@ class DB( HydrusDB.HydrusDB ):
return result
def _RefreshUpdateCache( self, service_id, affected_timestamps ): self._c.executemany( 'UPDATE update_cache SET dirty = ? WHERE service_id = ? AND ? BETWEEN begin AND end;', [ ( True, service_id, timestamp ) for timestamp in affected_timestamps ] )
def _RewardAccounts( self, service_id, score_type, scores ):
self._c.executemany( 'INSERT OR IGNORE INTO account_scores ( service_id, account_id, score_type, score ) VALUES ( ?, ?, ?, ? );', [ ( service_id, account_id, score_type, 0 ) for ( account_id, score ) in scores ] )
@ -2557,12 +2490,16 @@ class DB( HydrusDB.HydrusDB ):
HydrusData.Print( 'exporting hashes to external db' )
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
self._c.execute( 'INSERT INTO external_master.hashes SELECT * FROM main.hashes;' )
self._c.execute( 'DROP TABLE main.hashes;' )
HydrusData.Print( 'exporting tags to external db' )
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.tags ( tag_id INTEGER PRIMARY KEY, tag TEXT UNIQUE );' )
self._c.execute( 'INSERT INTO external_master.tags SELECT * FROM main.tags;' )
self._c.execute( 'DROP TABLE main.tags;' )
@ -2582,23 +2519,25 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'ALTER TABLE mapping_petitions RENAME TO mapping_petitions_old;' )
self._c.execute( 'ALTER TABLE mappings RENAME TO mappings_old;' )
self._c.execute( 'CREATE TABLE external_mappings.mapping_petitions ( service_id INTEGER, account_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, timestamp INTEGER, status INTEGER, PRIMARY KEY( service_id, account_id, tag_id, hash_id, status ) ) WITHOUT ROWID;' )
self._c.execute( 'CREATE TABLE external_mappings.mappings ( service_id INTEGER, tag_id INTEGER, hash_id INTEGER, account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, tag_id, hash_id ) ) WITHOUT ROWID;' )
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_mappings.mapping_petitions ( service_id INTEGER, account_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, timestamp INTEGER, status INTEGER, PRIMARY KEY( service_id, account_id, tag_id, hash_id, status ) ) WITHOUT ROWID;' )
self._c.execute( 'INSERT INTO mapping_petitions SELECT * FROM mapping_petitions_old;' )
self._c.execute( 'INSERT INTO mappings SELECT * FROM mappings_old;' )
self._c.execute( 'DROP TABLE mapping_petitions_old;' )
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_mappings.mappings ( service_id INTEGER, tag_id INTEGER, hash_id INTEGER, account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, tag_id, hash_id ) ) WITHOUT ROWID;' )
self._c.execute( 'INSERT INTO mappings SELECT * FROM mappings_old;' )
self._c.execute( 'DROP TABLE mappings_old;' )
self._c.execute( 'CREATE INDEX external_mappings.mapping_petitions_service_id_account_id_reason_id_tag_id_index ON mapping_petitions ( service_id, account_id, reason_id, tag_id );' )
self._c.execute( 'CREATE INDEX external_mappings.mapping_petitions_service_id_tag_id_hash_id_index ON mapping_petitions ( service_id, tag_id, hash_id );' )
self._c.execute( 'CREATE INDEX external_mappings.mapping_petitions_service_id_status_index ON mapping_petitions ( service_id, status );' )
self._c.execute( 'CREATE INDEX external_mappings.mapping_petitions_service_id_timestamp_index ON mapping_petitions ( service_id, timestamp );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_account_id_reason_id_tag_id_index ON mapping_petitions ( service_id, account_id, reason_id, tag_id );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_tag_id_hash_id_index ON mapping_petitions ( service_id, tag_id, hash_id );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_status_index ON mapping_petitions ( service_id, status );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mapping_petitions_service_id_timestamp_index ON mapping_petitions ( service_id, timestamp );' )
self._c.execute( 'CREATE INDEX external_mappings.mappings_account_id_index ON mappings ( account_id );' )
self._c.execute( 'CREATE INDEX external_mappings.mappings_timestamp_index ON mappings ( timestamp );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mappings_account_id_index ON mappings ( account_id );' )
self._c.execute( 'CREATE INDEX IF NOT EXISTS external_mappings.mappings_timestamp_index ON mappings ( timestamp );' )
#
@ -2654,7 +2593,6 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'backup': result = self._Backup( *args, **kwargs )
elif action == 'check_data_usage': result = self._CheckDataUsage( *args, **kwargs )
elif action == 'check_monthly_data': result = self._CheckMonthlyData( *args, **kwargs )
elif action == 'clean_update': result = self._CleanUpdate( *args, **kwargs )
elif action == 'clear_bans': result = self._ClearBans( *args, **kwargs )
elif action == 'create_update': result = self._CreateUpdate( *args, **kwargs )
elif action == 'delete_orphans': result = self._DeleteOrphans( *args, **kwargs )

View File

@ -77,36 +77,6 @@ def DAEMONGenerateUpdates( controller ):
time.sleep( 1 )
time_to_stop = HydrusData.GetNow() + 30
service_keys = controller.Read( 'service_keys', HC.REPOSITORIES )
for service_key in service_keys:
num_petitions = controller.Read( 'num_petitions', service_key )
if num_petitions == 0:
dirty_updates = controller.Read( 'dirty_updates', service_key )
for ( begin, end ) in dirty_updates:
if HydrusGlobals.view_shutdown or HydrusData.TimeHasPassed( time_to_stop ):
return
HydrusGlobals.server_busy = True
controller.WriteSynchronous( 'clean_update', service_key, begin, end )
HydrusGlobals.server_busy = False
time.sleep( 1 )
def DAEMONUPnP( controller ):

View File

@ -664,7 +664,7 @@ class TestClientDB( unittest.TestCase ):
( media_result, ) = self._read( 'media_results', CC.LOCAL_FILE_SERVICE_KEY, ( written_hash, ) )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_locations_manager, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
( mr_hash, mr_inbox, mr_size, mr_mime, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_locations_manager, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
now = HydrusData.GetNow()
@ -672,8 +672,6 @@ class TestClientDB( unittest.TestCase ):
self.assertEqual( mr_inbox, True )
self.assertEqual( mr_size, size )
self.assertEqual( mr_mime, mime )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, width )
self.assertEqual( mr_height, height )
self.assertEqual( mr_duration, duration )
@ -795,7 +793,7 @@ class TestClientDB( unittest.TestCase ):
( media_result, ) = self._read( 'media_results', CC.LOCAL_FILE_SERVICE_KEY, ( hash, ) )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_locations_manager, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
( mr_hash, mr_inbox, mr_size, mr_mime, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_locations_manager, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
now = HydrusData.GetNow()
@ -804,8 +802,6 @@ class TestClientDB( unittest.TestCase ):
self.assertEqual( mr_size, 5270 )
self.assertEqual( mr_mime, HC.IMAGE_PNG )
self.assertEqual( mr_hash, hash )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, 200 )
self.assertEqual( mr_height, 200 )
self.assertEqual( mr_duration, None )
@ -814,7 +810,7 @@ class TestClientDB( unittest.TestCase ):
( media_result, ) = self._read( 'media_results_from_ids', CC.LOCAL_FILE_SERVICE_KEY, ( 1, ) )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_locations_manager, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
( mr_hash, mr_inbox, mr_size, mr_mime, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_locations_manager, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
now = HydrusData.GetNow()
@ -823,8 +819,6 @@ class TestClientDB( unittest.TestCase ):
self.assertEqual( mr_size, 5270 )
self.assertEqual( mr_mime, HC.IMAGE_PNG )
self.assertEqual( mr_hash, hash )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, 200 )
self.assertEqual( mr_height, 200 )
self.assertEqual( mr_duration, None )

View File

@ -255,9 +255,9 @@ class TestServer( unittest.TestCase ):
info[ 'timeout' ] = 0
info[ 'hashes' ] = hashes
# hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings
# hash, inbox, size, mime, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings
media_results = [ ClientMedia.MediaResult( ( hash, True, 500, HC.IMAGE_JPEG, 0, 640, 480, None, None, None, None, None, None, None ) ) for hash in hashes ]
media_results = [ ClientMedia.MediaResult( ( hash, True, 500, HC.IMAGE_JPEG, 640, 480, None, None, None, None, None, None, None ) ) for hash in hashes ]
HydrusGlobals.test_controller.SetRead( 'local_booru_share_keys', [ share_key ] )
HydrusGlobals.test_controller.SetRead( 'local_booru_share', info )

View File

@ -51,7 +51,6 @@ class Controller( object ):
HC.DB_DIR = tempfile.mkdtemp()
HC.CLIENT_ARCHIVES_DIR = os.path.join( HC.DB_DIR, 'client_archives' )
HC.CLIENT_CACHE_DIR = os.path.join( HC.DB_DIR, 'client_cache' )
HC.CLIENT_FILES_DIR = os.path.join( HC.DB_DIR, 'client_files' )
HC.CLIENT_THUMBNAILS_DIR = os.path.join( HC.DB_DIR, 'client_thumbnails' )
HC.CLIENT_UPDATES_DIR = os.path.join( HC.DB_DIR, 'client_updates' )