Version 100

This commit is contained in:
Hydrus 2014-01-22 15:11:22 -06:00
parent 637eb4d39f
commit 95aeddae20
19 changed files with 3116 additions and 2711 deletions

View File

@ -8,7 +8,42 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 98</h3></li>
<li><h3>version 100</h3></li>
<ul>
<li>MVC import_controller system created</li>
<li>download gauges lag reduced</li>
<li>download cancel button desync bug fixed</li>
<li>download error code harmonised</li>
<li>download async code pushed to threads</li>
<li>download job key checking and management improved</li>
<li>download queue feed timer will obey page-hidden pause status</li>
<li>download success and exception handling harmonised</li>
<li>download on-close thread-signalling much improved</li>
<li>download pages now take responsibility for download code factories</li>
<li>download management panel code harmonised</li>
<li>download management panels made much more MVC</li>
<li>download error reporting is much improved</li>
<li>download fail management is much improved</li>
<li>download button response is much faster</li>
<li>download progress display is split into the easier to read file/queue/builder trichotomy</li>
<li>download display indices are less schizophrenic</li>
<li>lots of misc improvements to download code and nomenclature</li>
<li>thumbnail page's refresh and select menu items now only appear when appropriate</li>
<li>thumbnail page's select menu will only show inbox/archive sub-items when appropriate</li>
<li>thumbnail page's menu is less buggy generally</li>
<li>db updates are now parcelised into each version, rather than one big job</li>
<li>improved db version update notification</li>
<li>a problem with updates from ~v70 to >v95 is fixed</li>
<li>fixed a bug in popup message dismissal</li>
<li>database exception formatting improved</li>
<li>database exception display spam reduced</li>
<li>database exception redundant traceback removed</li>
<li>autocomplete character-search-delay time extended from 150ms to 250ms</li>
<li>async wx window destroy code improved in certain places</li>
<li>improved non-wx-thread popup error reporting</li>
<li>some other bugfix, grammar and nomenclature stuff I can't remember</li>
</ul>
<li><h3>version 99</h3></li>
<ul>
<li>added backup database menu option</li>
<li>added restore database menu option</li>
@ -27,7 +62,7 @@
<li>improved hydrus's thread-communication objects</li>
<li>cleaned up downloader code and logic and gui display</li>
</ul>
<li><h3>version 99</h3></li>
<li><h3>version 98</h3></li>
<ul>
<li>update to wxpython 3.0</li>
<li>you can now add tags when importing from a zip</li>

View File

@ -25,7 +25,6 @@ import urlparse
import urllib
import yaml
import wx
import zipfile
import zlib
ID_NULL = wx.NewId()
@ -331,7 +330,7 @@ def CatchExceptionClient( etype, value, tb ):
try: message += traceback.format_exc()
except: pass
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
HC.ShowText( message )
def GenerateCollectByChoices( sort_by_choices ):
@ -629,16 +628,34 @@ def IntersectTags( tags_managers, service_identifier = HC.COMBINED_TAG_SERVICE_I
def ShowExceptionClient( e ):
if not wx.Thread_IsMain():
( etype, value, tb ) = sys.exc_info()
if etype is not None: e = type( e )( os.linesep.join( traceback.format_exception( etype, value, tb ) ) )
etype = type( e )
value = HC.u( e )
trace_list = traceback.format_stack()
trace = ''.join( trace_list )
if etype == HydrusExceptions.DBException:
value = ''
trace = HC.u( e )
else:
value = HC.u( e )
trace_list = traceback.format_stack()
trace = ''.join( trace_list )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_ERROR, ( etype, value, trace ) ) )
def ShowTextClient( text ): HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, text ) )
class AutocompleteMatches():
def __init__( self, matches ):
@ -1884,26 +1901,6 @@ fourchan_imageboards.append( Imageboard( '/vp/', 'https://sys.4chan.org/vp/post'
DEFAULT_IMAGEBOARDS.append( ( '4chan', fourchan_imageboards ) )
class Job( threading.Thread ):
def __init__( self, job_key, name ):
threading.Thread.__init__( self, name = name )
self._job_key = job_key
def _NotifyAllDone( self ): pass
def _NotifyPartDone( self, i ): pass
def _NotifyStart( self ): pass
def run( self ):
pass # think about this more
class Log():
def __init__( self ):

View File

@ -222,7 +222,7 @@ The database will be locked while the backup occurs, which may lock up your gui
message += os.linesep + os.linesep
message += 'If the old instance does not close for a _very_ long time, you can usually safely force-close it from task manager.'
with ClientGUIDialogs.DialogYesNo( None, message, yes_label = 'wait a bit, then try again', no_label = 'quit now' ) as dlg:
with ClientGUIDialogs.DialogYesNo( None, message, yes_label = 'wait a bit, then try again', no_label = 'forget it' ) as dlg:
if dlg.ShowModal() == wx.ID_YES: time.sleep( 3 )
else: raise HydrusExceptions.PermissionException()
@ -288,7 +288,7 @@ The database will be locked while the backup occurs, which may lock up your gui
self.SetSplashText( 'starting daemons' )
if HC.is_first_start: self._gui.DoFirstStart()
if HC.is_db_updated: wx.CallLater( 1, HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'The client has updated to version ' + HC.u( HC.SOFTWARE_VERSION ) + '!' ) )
if HC.is_db_updated: wx.CallLater( 1, HC.ShowText, 'The client has updated to version ' + HC.u( HC.SOFTWARE_VERSION ) + '!' )
self.RestartServer()
self._db.StartDaemons()
@ -315,13 +315,13 @@ The database will be locked while the backup occurs, which may lock up your gui
wx.MessageBox( 'Woah, bad error:' + os.linesep + os.linesep + traceback.format_exc() )
try: self._splash.Close()
except: pass
init_result = False
self._splash.Close()
finally:
try: self._splash.Destroy()
except: pass
return init_result
@ -367,7 +367,7 @@ The database will be locked while the backup occurs, which may lock up your gui
message = 'Something was already bound to port ' + HC.u( port )
wx.CallAfter( HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
wx.CallLater( 1, HC.ShowText, message )
except:
@ -386,7 +386,7 @@ The database will be locked while the backup occurs, which may lock up your gui
message = 'Tried to bind port ' + HC.u( port ) + ' but it failed'
wx.CallAfter( HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
wx.CallLater( 1, HC.ShowText, message )
@ -427,7 +427,7 @@ Once it is done, the client will restart.'''
self._gui.Hide()
self._gui.Destroy()
self._gui.Close()
self._db.Shutdown()

File diff suppressed because it is too large Load Diff

View File

@ -81,7 +81,6 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
HC.pubsub.sub( self, 'ClearClosedPages', 'clear_closed_pages' )
HC.pubsub.sub( self, 'NewCompose', 'new_compose_frame' )
HC.pubsub.sub( self, 'NewPageImportBooru', 'new_page_import_booru' )
HC.pubsub.sub( self, 'NewPageImportGallery', 'new_page_import_gallery' )
HC.pubsub.sub( self, 'NewPageImportHDD', 'new_hdd_import' )
HC.pubsub.sub( self, 'NewPageImportThreadWatcher', 'new_page_import_thread_watcher' )
@ -335,7 +334,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
HC.app.Write( 'update_services', edit_log )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'Auto repo setup done!' ) )
HC.ShowText( 'Auto repo setup done!' )
@ -501,14 +500,6 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _CloseAllPages( self ):
while self._notebook.GetPageCount() > 0:
self._CloseCurrentPage( polite = False )
def _CloseCurrentPage( self, polite = True ):
selection = self._notebook.GetSelection()
@ -525,7 +516,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
if polite:
try: page.TryToClose()
try: page.TestAbleToClose()
except: return
@ -544,9 +535,9 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
self.RefreshMenuBar()
def _DeleteAllPages( self ):
def _DeleteAllClosedPages( self ):
for ( time_closed, selection, name, page ) in self._closed_pages: wx.CallAfter( page.Destroy )
for ( time_closed, selection, name, page ) in self._closed_pages: self._DestroyPage( page )
self._closed_pages = []
@ -563,6 +554,15 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _DestroyPage( self, page ):
page.Hide()
page.CleanBeforeDestroy()
page.Destroy()
def _FetchIP( self, service_identifier ):
with wx.TextEntryDialog( self, 'File Hash' ) as dlg:
@ -610,11 +610,14 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
try: page.TryToClose()
try: page.TestAbleToClose()
except: return
self._CloseAllPages()
while self._notebook.GetPageCount() > 0:
self._CloseCurrentPage( polite = False )
for ( page_name, c_text, args, kwargs ) in info:
@ -758,30 +761,15 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _NewPageImportBooru( self ):
def _NewPageImportGallery( self, name, type ):
with ClientGUIDialogs.DialogSelectBooru( self ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
booru = dlg.GetBooru()
new_page = ClientGUIPages.PageImportBooru( self._notebook, booru )
self._notebook.AddPage( new_page, booru.GetName(), select = True )
self._notebook.SetSelection( self._notebook.GetPageCount() - 1 )
new_page.SetSearchFocus()
new_page = ClientGUIPages.PageImportGallery( self._notebook, name, type )
def _NewPageImportGallery( self, name ):
if name == 'booru': page_name = type.GetName()
elif type is None: page_name = name
else: page_name = name + ' by ' + type
new_page = ClientGUIPages.PageImportGallery( self._notebook, name )
self._notebook.AddPage( new_page, name, select = True )
self._notebook.AddPage( new_page, page_name, select = True )
self._notebook.SetSelection( self._notebook.GetPageCount() - 1 )
@ -1176,7 +1164,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
for ( time_closed, index, name, page ) in self._closed_pages:
if time_closed + timeout < now: page.Destroy()
if time_closed + timeout < now: self._DestroyPage( page )
else: new_closed_pages.append( ( time_closed, index, name, page ) )
@ -1226,14 +1214,18 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
try: page.TryToClose()
try: page.TestAbleToClose()
except Exception as e: return
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
try: page.CleanBeforeDestroy()
except: return
self._DeleteAllPages()
self._message_manager.CleanUp()
self._message_manager.Destroy()
self._message_manager.CleanBeforeDestroy()
self._message_manager.Hide()
self.Hide()
@ -1302,7 +1294,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
print( 'garbage: ' + HC.u( gc.garbage ) )
elif command == 'delete_all_pages': self._DeleteAllPages()
elif command == 'delete_all_closed_pages': self._DeleteAllClosedPages()
elif command == 'delete_gui_session': HC.app.Write( 'gui_session', data, None )
elif command == 'delete_pending': self._DeletePending( data )
elif command == 'exit': self.EventExit( event )
@ -1330,7 +1322,6 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
elif command == 'manage_upnp': self._ManageUPnP( data )
elif command == 'modify_account': self._ModifyAccount( data )
elif command == 'new_accounts': self._GenerateNewAccounts( data )
elif command == 'new_import_booru': self._NewPageImportBooru()
elif command == 'new_import_thread_watcher': self._NewPageImportThreadWatcher()
elif command == 'new_import_url': self._NewPageImportURL()
elif command == 'new_log_page': self._NewPageLog()
@ -1418,9 +1409,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
FrameComposeMessage( empty_draft_message )
def NewPageImportBooru( self ): self._NewPageImportBooru()
def NewPageImportGallery( self, name ): self._NewPageImportGallery( name )
def NewPageImportGallery( self, gallery_name, gallery_type ): self._NewPageImportGallery( gallery_name, gallery_type )
def NewPageImportHDD( self, paths_info, advanced_import_options = {}, paths_to_tags = {}, delete_after_success = False ):
@ -1578,7 +1567,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
undo_pages = wx.Menu()
undo_pages.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete_all_pages' ), 'clear all' )
undo_pages.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete_all_closed_pages' ), 'clear all' )
undo_pages.AppendSeparator()
@ -1882,12 +1871,12 @@ class FrameComposeMessage( ClientGUICommon.Frame ):
def DeleteConversation( self, conversation_key ):
if self._draft_panel.GetConversationKey() == conversation_key: self.Destroy()
if self._draft_panel.GetConversationKey() == conversation_key: self.Close()
def DeleteDraft( self, draft_key ):
if draft_key == self._draft_panel.GetDraftKey(): self.Destroy()
if draft_key == self._draft_panel.GetDraftKey(): self.Close()
class FramePageChooser( ClientGUICommon.Frame ):
@ -1993,7 +1982,13 @@ class FramePageChooser( ClientGUICommon.Frame ):
button.SetLabel( name )
elif entry_type == 'page_import_booru': button.SetLabel( 'booru' )
elif entry_type == 'page_import_gallery': button.SetLabel( obj )
elif entry_type == 'page_import_gallery':
( name, type ) = obj
if type is None: button.SetLabel( name )
else: button.SetLabel( name + ' by ' + type )
elif entry_type == 'page_import_thread_watcher': button.SetLabel( 'thread watcher' )
elif entry_type == 'page_import_url': button.SetLabel( 'url' )
@ -2019,16 +2014,16 @@ class FramePageChooser( ClientGUICommon.Frame ):
elif menu_keyword == 'download': entries = [ ( 'page_import_url', None ), ( 'page_import_thread_watcher', None ), ( 'menu', 'gallery' ) ]
elif menu_keyword == 'gallery':
entries = [ ( 'page_import_booru', None ), ( 'page_import_gallery', 'giphy' ), ( 'page_import_gallery', 'deviant art by artist' ), ( 'menu', 'hentai foundry' ), ( 'page_import_gallery', 'newgrounds' ) ]
entries = [ ( 'page_import_booru', None ), ( 'page_import_gallery', ( 'giphy', None ) ), ( 'page_import_gallery', ( 'deviant art', 'artist' ) ), ( 'menu', 'hentai foundry' ), ( 'page_import_gallery', ( 'newgrounds', None ) ) ]
( id, password ) = HC.app.Read( 'pixiv_account' )
if id != '' and password != '': entries.append( ( 'menu', 'pixiv' ) )
entries.extend( [ ( 'page_import_gallery', 'tumblr' ) ] )
entries.extend( [ ( 'page_import_gallery', ( 'tumblr', None ) ) ] )
elif menu_keyword == 'hentai foundry': entries = [ ( 'page_import_gallery', 'hentai foundry by artist' ), ( 'page_import_gallery', 'hentai foundry by tags' ) ]
elif menu_keyword == 'pixiv': entries = [ ( 'page_import_gallery', 'pixiv by artist' ), ( 'page_import_gallery', 'pixiv by tag' ) ]
elif menu_keyword == 'hentai foundry': entries = [ ( 'page_import_gallery', ( 'hentai foundry', 'artist' ) ), ( 'page_import_gallery', ( 'hentai foundry', 'tags' ) ) ]
elif menu_keyword == 'pixiv': entries = [ ( 'page_import_gallery', ( 'pixiv', 'artist' ) ), ( 'page_import_gallery', ( 'pixiv', 'tag' ) ) ]
elif menu_keyword == 'petitions': entries = [ ( 'page_petitions', service_identifier ) for service_identifier in self._petition_service_identifiers ]
if len( entries ) <= 4:
@ -2067,13 +2062,29 @@ class FramePageChooser( ClientGUICommon.Frame ):
else:
if entry_type == 'page_query': HC.pubsub.pub( 'new_page_query', obj )
elif entry_type == 'page_import_booru': HC.pubsub.pub( 'new_page_import_booru' )
elif entry_type == 'page_import_gallery': HC.pubsub.pub( 'new_page_import_gallery', obj )
elif entry_type == 'page_import_booru':
with ClientGUIDialogs.DialogSelectBooru( self ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
booru = dlg.GetBooru()
HC.pubsub.pub( 'new_page_import_gallery', 'booru', booru )
elif entry_type == 'page_import_gallery':
( gallery_name, gallery_type ) = obj
HC.pubsub.pub( 'new_page_import_gallery', gallery_name, gallery_type )
elif entry_type == 'page_import_thread_watcher': HC.pubsub.pub( 'new_page_import_thread_watcher' )
elif entry_type == 'page_import_url': HC.pubsub.pub( 'new_page_import_url' )
elif entry_type == 'page_petitions': HC.pubsub.pub( 'new_page_petitions', obj )
self.Destroy()
self.Close()
@ -2090,7 +2101,7 @@ class FramePageChooser( ClientGUICommon.Frame ):
self.ProcessEvent( new_event )
elif event.KeyCode == wx.WXK_ESCAPE: self.Destroy()
elif event.KeyCode == wx.WXK_ESCAPE: self.Close()
else: event.Skip()
@ -2218,7 +2229,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
HC.options[ 'pause_repo_sync' ] = original_pause_status
def EventOk( self, event ): self.Destroy()
def EventOk( self, event ): self.Close()
def RefreshServices( self ): self._InitialiseServices()

View File

@ -585,7 +585,7 @@ class Canvas():
self._media_container.Hide()
wx.CallAfter( self._media_container.Destroy )
self._media_container.Close()
self._media_container = None

View File

@ -281,7 +281,7 @@ class AutoCompleteDropdown( wx.TextCtrl ):
def EventText( self, event ):
if len( self.GetValue() ) == 0: self._UpdateList()
else: self._lag_timer.Start( 150, wx.TIMER_ONE_SHOT )
else: self._lag_timer.Start( 250, wx.TIMER_ONE_SHOT )
class AutoCompleteDropdownContacts( AutoCompleteDropdown ):
@ -2248,9 +2248,12 @@ class PopupMessageError( PopupMessage ):
error = wx.StaticText( self, label = HC.u( etype.__name__ ), style = wx.ALIGN_CENTER )
error.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
text = wx.StaticText( self, label = HC.u( value ) )
text.Wrap( 380 )
text.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
if len( HC.u( value ) ) > 0:
text = wx.StaticText( self, label = HC.u( value ) )
text.Wrap( 380 )
text.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
self._show_tb_button = wx.Button( self, label = 'show traceback' )
self._show_tb_button.Bind( wx.EVT_BUTTON, self.EventShowButton )
@ -2267,7 +2270,7 @@ class PopupMessageError( PopupMessage ):
self._copy_tb_button.Hide()
vbox.AddF( error, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( text, FLAGS_EXPAND_PERPENDICULAR )
if len( HC.u( value ) ) > 0: vbox.AddF( text, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._show_tb_button, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._tb_text, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._copy_tb_button, FLAGS_EXPAND_PERPENDICULAR )
@ -2469,7 +2472,7 @@ class PopupMessageText( PopupMessage ):
vbox = wx.BoxSizer( wx.VERTICAL )
text = wx.StaticText( self, label = message_string ) # make this multi-line. There's an easy way to do that, right? A func that takes a pixel width, I think
text = wx.StaticText( self, label = message_string )
text.Wrap( 380 )
text.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
@ -2515,6 +2518,7 @@ class PopupMessageManager( wx.Frame ):
sys.excepthook = CC.CatchExceptionClient
HC.ShowException = CC.ShowExceptionClient
HC.ShowText = CC.ShowTextClient
def _CheckPending( self ):
@ -2636,22 +2640,18 @@ class PopupMessageManager( wx.Frame ):
self._CheckPending()
def CleanUp( self ):
def CleanBeforeDestroy( self ):
sys.excepthook = self._old_excepthook
HC.ShowException = self._old_show_exception
self.DismissAll()
self.Hide()
def Dismiss( self, window ):
self._message_vbox.Detach( window )
wx.CallAfter( window.Destroy )
window.Destroy()
self._SizeAndPositionAndShow()
@ -3607,10 +3607,7 @@ class ShowKeys( Frame ):
self.Show( True )
def EventDone( self, event ):
self.Destroy()
def EventDone( self, event ): self.Close()
def EventSaveToFile( self, event ):

View File

@ -2279,11 +2279,11 @@ class DialogInputLocalFiles( Dialog ):
message = 'Tried to read a key, but did not understand it.'
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
HC.ShowText( message )
job = HC.Job()
job_key = HC.JobKey()
def WXTHREADGetAESKey():
@ -2301,11 +2301,11 @@ class DialogInputLocalFiles( Dialog ):
( aes_key, iv ) = HydrusEncryption.AESTextToKey( key_text )
job.PutResult( ( aes_key, iv ) )
job_key.SetVariable( 'result', ( aes_key, iv ) )
except: wx.MessageBox( 'Did not understand that key!' )
elif result == wx.ID_CANCEL: job.PutResult( ( None, None ) )
elif result == wx.ID_CANCEL: job_key.SetVariable( 'result', ( None, None ) )
@ -2314,7 +2314,14 @@ class DialogInputLocalFiles( Dialog ):
wx.CallAfter( WXTHREADGetAESKey )
( aes_key, iv ) = job.GetResult()
while not job_key.HasVariable( 'result' ):
if job_key.IsCancelled(): return
time.sleep( 0.1 )
( aes_key, iv ) = job_key.GetVariable( 'result' )
if aes_key is not None:

View File

@ -20,7 +20,6 @@ import traceback
import urllib
import wx
import yaml
import zipfile
# Option Enums

File diff suppressed because it is too large Load Diff

View File

@ -863,10 +863,12 @@ class MediaPanelLoading( MediaPanel ):
class MediaPanelThumbnails( MediaPanel ):
def __init__( self, parent, page_key, file_service_identifier, media_results ):
def __init__( self, parent, page_key, file_service_identifier, media_results, refreshable = True ):
MediaPanel.__init__( self, parent, page_key, file_service_identifier, media_results )
self._refreshable = refreshable
self._num_columns = 1
self._num_rows_in_client_height = 0
self._drawn_index_bounds = None
@ -1516,27 +1518,44 @@ class MediaPanelThumbnails( MediaPanel ):
thumbnail = self._GetThumbnailUnderMouse( event )
if thumbnail is not None: self._HitMedia( thumbnail, event.CmdDown(), event.ShiftDown() )
all_service_identifiers = [ media.GetFileServiceIdentifiersCDPP() for media in self._selected_media ]
selection_has_local = True in ( s_is.HasLocal() for s_is in all_service_identifiers )
selection_has_inbox = True in ( media.HasInbox() for media in self._selected_media )
selection_has_archive = True in ( media.HasArchive() for media in self._selected_media )
menu = wx.Menu()
if thumbnail is None:
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
if self._refreshable:
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
menu.AppendSeparator()
select_menu = wx.Menu()
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
if len( self._sorted_media ) > 0:
if menu.GetMenuItemCount() > 0: menu.AppendSeparator()
select_menu = wx.Menu()
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
if selection_has_archive and selection_has_inbox:
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
else:
self._HitMedia( thumbnail, event.CmdDown(), event.ShiftDown() )
if self._focussed_media is not None:
# variables
@ -1562,12 +1581,6 @@ class MediaPanelThumbnails( MediaPanel ):
user_manageable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.MANAGE_USERS ) }
admin_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.GENERAL_ADMIN ) }
all_service_identifiers = [ media.GetFileServiceIdentifiersCDPP() for media in self._selected_media ]
selection_has_local = True in ( s_is.HasLocal() for s_is in all_service_identifiers )
selection_has_inbox = True in ( media.HasInbox() for media in self._selected_media )
selection_has_archive = True in ( media.HasArchive() for media in self._selected_media )
if multiple_selected:
uploaded_phrase = 'all uploaded to'
@ -1826,20 +1839,31 @@ class MediaPanelThumbnails( MediaPanel ):
#
menu.AppendSeparator()
if self._refreshable:
menu.AppendSeparator()
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
menu.AppendSeparator()
select_menu = wx.Menu()
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
if len( self._sorted_media ) > 0:
menu.AppendSeparator()
select_menu = wx.Menu()
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
if selection_has_archive and selection_has_inbox:
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
menu.AppendSeparator()
@ -1854,7 +1878,7 @@ class MediaPanelThumbnails( MediaPanel ):
self.PopupMenu( menu )
if menu.GetMenuItemCount() > 0: self.PopupMenu( menu )
menu.Destroy()

View File

@ -433,7 +433,7 @@ class ConversationPanel( wx.Panel ):
self._drafts_vbox.Detach( draft_panel )
draft_panel.Destroy()
draft_panel.Close()
self._scrolling_messages_window.FitInside()
@ -522,7 +522,7 @@ class ConversationSplitter( wx.SplitterWindow ):
self.ReplaceWindow( self._conversation_panel, new_panel )
self._conversation_panel.Destroy()
self._conversation_panel.Close()
self._conversation_panel = new_panel
@ -694,7 +694,7 @@ class DestinationPanel( wx.Panel ):
self._hbox.Replace( self._status_panel, new_status_panel )
self._status_panel.Destroy()
self._status_panel.Close()
self._status_panel = new_status_panel
@ -1285,7 +1285,7 @@ class DraftPanel( wx.Panel ):
message = 'The hydrus client could not connect to your message depot, so the message could not be sent!'
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
HC.ShowText( message )
return
@ -1413,7 +1413,7 @@ class MessagePanel( wx.Panel ):
self._hbox.Replace( self._body_panel, body_panel )
self._body_panel.Destroy()
self._body_panel.Close()
self._body_panel = body_panel

View File

@ -6,6 +6,7 @@ import ClientGUIManagement
import ClientGUIMedia
import ClientGUIMessages
import ClientGUICanvas
import HydrusDownloading
import inspect
import os
import sys
@ -39,11 +40,21 @@ class PageBase():
self._starting_from_session = starting_from_session
self._page_key = os.urandom( 32 )
self._InitControllers()
self._pretty_status = ''
HC.pubsub.sub( self, 'SetPrettyStatus', 'new_page_status' )
def _InitControllers( self ): pass
def _PauseControllers( self ): pass
def _ResumeControllers( self ): pass
def CleanBeforeDestroy( self ): pass
def GetPrettyStatus( self ): return self._pretty_status
def GetSashPositions( self ):
@ -61,6 +72,8 @@ class PageBase():
def Pause( self ):
self._PauseControllers()
HC.pubsub.pub( 'pause', self._page_key )
HC.pubsub.pub( 'set_focus', self._page_key, None )
@ -86,9 +99,14 @@ class PageBase():
def ShowHideSplit( self ): pass
def TryToClose( self ): pass
def TestAbleToClose( self ): pass
def Resume( self ): HC.pubsub.pub( 'resume', self._page_key )
def Resume( self ):
self._ResumeControllers()
HC.pubsub.pub( 'resume', self._page_key )
class PageLog( PageBase, wx.Panel ):
@ -204,7 +222,7 @@ class PageMessages( PageBase, wx.SplitterWindow ):
else: self._search_preview_split.SplitHorizontally( self._management_panel, self._preview_panel, HC.options[ 'vpos' ] )
def TryToClose( self ): self._management_panel.TryToClose()
def TestAbleToClose( self ): self._management_panel.TestAbleToClose()
class PageWithMedia( PageBase, wx.SplitterWindow ):
@ -243,6 +261,8 @@ class PageWithMedia( PageBase, wx.SplitterWindow ):
HC.pubsub.sub( self, 'SwapMediaPanel', 'swap_media_panel' )
def CleanBeforeDestroy( self ): self._management_panel.CleanBeforeDestroy()
def EventPreviewUnsplit( self, event ): self._search_preview_split.Unsplit( self._preview_panel )
def EventUnsplit( self, event ): self.Unsplit( self._search_preview_split )
@ -285,18 +305,71 @@ class PageWithMedia( PageBase, wx.SplitterWindow ):
self.ReplaceWindow( self._media_panel, new_panel )
self._media_panel.Destroy()
self._media_panel.Close()
self._media_panel = new_panel
def TryToClose( self ): self._management_panel.TryToClose()
def TestAbleToClose( self ): self._management_panel.TestAbleToClose()
class PageImport( PageWithMedia ):
def _GenerateImportArgsGeneratorFactory( self ):
def factory( job_key, item ):
advanced_import_options = self._management_panel.GetAdvancedImportOptions()
return HydrusDownloading.ImportArgsGenerator( job_key, item, advanced_import_options )
return factory
def _GenerateImportQueueGeneratorFactory( self ):
def factory( job_key, item ):
return HydrusDownloading.ImportQueueGenerator( job_key, item )
return factory
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_media_results )
def _InitControllers( self ):
import_args_generator_factory = self._GenerateImportArgsGeneratorFactory()
import_queue_generator_factory = self._GenerateImportQueueGeneratorFactory()
self._import_controller = HydrusDownloading.ImportController( import_args_generator_factory, import_queue_generator_factory, page_key = self._page_key )
self._import_controller.StartThread()
def _PauseControllers( self ):
controller_job_key = self._import_controller.GetJobKey( 'controller' )
controller_job_key.Pause()
def _ResumeControllers( self ):
controller_job_key = self._import_controller.GetJobKey( 'controller' )
controller_job_key.Resume()
def CleanBeforeDestroy( self ):
PageWithMedia.CleanBeforeDestroy( self )
self._import_controller.CleanBeforeDestroy()
def GetSessionArgs( self ):
hashes = [ media.GetHash() for media in self._media_panel.GetFlatMedia() ]
@ -307,55 +380,201 @@ class PageImport( PageWithMedia ):
return ( args, kwargs )
class PageImportBooru( PageImport ):
def __init__( self, parent, booru, initial_hashes = [], starting_from_session = False ):
self._booru = booru
PageImport.__init__( self, parent, initial_hashes = initial_hashes, starting_from_session = starting_from_session )
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedBooru( self._search_preview_split, self, self._page_key, self._booru, starting_from_session = self._starting_from_session )
def GetSessionArgs( self ):
hashes = [ media.GetHash() for media in self._media_panel.GetFlatMedia() ]
args = ( self._booru, )
kwargs = { 'initial_hashes' : hashes }
return ( args, kwargs )
class PageImportGallery( PageImport ):
def __init__( self, parent, name, initial_hashes = [], starting_from_session = False ):
def __init__( self, parent, gallery_name, gallery_type, initial_hashes = [], starting_from_session = False ):
self._name = name
self._gallery_name = gallery_name
self._gallery_type = gallery_type
PageImport.__init__( self, parent, initial_hashes = initial_hashes, starting_from_session = starting_from_session )
def _GenerateImportArgsGeneratorFactory( self ):
def factory( job_key, item ):
advanced_import_options = self._management_panel.GetAdvancedImportOptions()
advanced_tag_options = self._management_panel.GetAdvancedTagOptions()
downloaders_factory = self._GetDownloadersFactory()
return HydrusDownloading.ImportArgsGeneratorGallery( job_key, item, advanced_import_options, advanced_tag_options, downloaders_factory )
return factory
def _GenerateImportQueueGeneratorFactory( self ):
def factory( job_key, item ):
downloaders_factory = self._GetDownloadersFactory()
return HydrusDownloading.ImportQueueGeneratorGallery( job_key, item, downloaders_factory )
return factory
def _GetDownloadersFactory( self ):
if self._gallery_name == 'booru':
def downloaders_factory( raw_tags ):
booru = self._gallery_type
tags = raw_tags.split( ' ' )
return ( HydrusDownloading.DownloaderBooru( booru, tags ), )
elif self._gallery_name == 'deviant art':
if self._gallery_type == 'artist':
def downloaders_factory( artist ):
return ( HydrusDownloading.DownloaderDeviantArt( artist ), )
elif self._gallery_name == 'giphy':
def downloaders_factory( tag ):
return ( HydrusDownloading.DownloaderGiphy( tag ), )
elif self._gallery_name == 'hentai foundry':
if self._gallery_type == 'artist':
def downloaders_factory( artist ):
advanced_hentai_foundry_options = self._management_panel.GetAdvancedHentaiFoundryOptions()
pictures_downloader = HydrusDownloading.DownloaderHentaiFoundry( 'artist pictures', artist, advanced_hentai_foundry_options )
scraps_downloader = HydrusDownloading.DownloaderHentaiFoundry( 'artist scraps', artist, advanced_hentai_foundry_options )
return ( pictures_downloader, scraps_downloader )
elif self._gallery_type == 'tags':
def downloaders_factory( raw_tags ):
advanced_hentai_foundry_options = self._management_panel.GetAdvancedHentaiFoundryOptions()
tags = raw_tags.split( ' ' )
return ( HydrusDownloading.DownloaderHentaiFoundry( 'tags', tags, advanced_hentai_foundry_options ), )
elif self._gallery_name == 'newgrounds':
def downloaders_factory( artist ):
return ( HydrusDownloading.DownloaderNewgrounds( artist ), )
elif self._gallery_name == 'pixiv':
if self._gallery_type == 'artist':
def downloaders_factory( artist ):
return ( HydrusDownloading.DownloaderPixiv( 'artist', artist ), )
elif self._gallery_type == 'tag':
def downloaders_factory( tag ):
return ( HydrusDownloading.DownloaderPixiv( 'tag', tag ), )
elif self._gallery_name == 'tumblr':
def downloaders_factory( username ):
return ( HydrusDownloading.DownloaderTumblr( username ), )
return downloaders_factory
def _InitManagementPanel( self ):
if self._name == 'deviant art by artist': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedDeviantArt
elif self._name == 'hentai foundry by artist': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedHentaiFoundryArtist
elif self._name == 'hentai foundry by tags': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedHentaiFoundryTags
elif self._name == 'giphy': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedGiphy
elif self._name == 'newgrounds': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedNewgrounds
elif self._name == 'pixiv by artist': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedPixivArtist
elif self._name == 'pixiv by tag': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedPixivTag
elif self._name == 'tumblr': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedTumblr
if self._gallery_name == 'hentai foundry':
name = 'hentai foundry'
namespaces = [ 'creator', 'title', '' ]
if self._gallery_type == 'artist': initial_search_value = 'artist username'
elif self._gallery_type == 'tags': initial_search_value = 'search tags'
self._management_panel = ClientGUIManagement.ManagementPanelImportsGalleryHentaiFoundry( self._search_preview_split, self, self._page_key, self._import_controller, name, namespaces, initial_search_value, starting_from_session = self._starting_from_session )
else:
if self._gallery_name == 'booru':
booru = self._gallery_type
name = booru.GetName()
namespaces = booru.GetNamespaces()
initial_search_value = 'search tags'
elif self._gallery_name == 'deviant art':
if self._gallery_type == 'artist':
name = 'deviant art'
namespaces = [ 'creator', 'title', '' ]
initial_search_value = 'artist username'
elif self._gallery_name == 'giphy':
name = 'giphy'
namespaces = [ '' ]
self._management_panel = c( self._search_preview_split, self, self._page_key, starting_from_session = self._starting_from_session )
initial_search_value = 'search tag'
elif self._gallery_name == 'newgrounds':
name = 'newgrounds'
namespaces = [ 'creator', 'title', '' ]
initial_search_value = 'artist username'
elif self._gallery_name == 'pixiv':
name = 'pixiv'
namespaces = [ 'creator', 'title', '' ]
if self._gallery_type == 'artist': initial_search_value = 'artist username'
elif self._gallery_type == 'tag': initial_search_value = 'search tag'
elif self._gallery_name == 'tumblr':
name = 'tumblr'
namespaces = [ '' ]
initial_search_value = 'username'
self._management_panel = ClientGUIManagement.ManagementPanelImportsGallery( self._search_preview_split, self, self._page_key, self._import_controller, name, namespaces, initial_search_value, starting_from_session = self._starting_from_session )
def GetSessionArgs( self ):
hashes = [ media.GetHash() for media in self._media_panel.GetFlatMedia() ]
args = ( self._name, )
args = ( self._gallery_name, self._gallery_type )
kwargs = { 'initial_hashes' : hashes }
return ( args, kwargs )
@ -372,8 +591,20 @@ class PageImportHDD( PageImport ):
PageImport.__init__( self, parent, initial_hashes = initial_hashes, starting_from_session = starting_from_session )
self._import_controller.PendImportQueue( self._paths_info )
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportHDD( self._search_preview_split, self, self._page_key, self._paths_info, advanced_import_options = self._advanced_import_options, paths_to_tags = self._paths_to_tags, delete_after_success = self._delete_after_success, starting_from_session = self._starting_from_session )
def _GenerateImportArgsGeneratorFactory( self ):
def factory( job_key, item ):
return HydrusDownloading.ImportArgsGeneratorHDD( job_key, item, self._advanced_import_options, self._paths_to_tags, self._delete_after_success )
return factory
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportHDD( self._search_preview_split, self, self._page_key, self._import_controller, starting_from_session = self._starting_from_session )
def GetSessionArgs( self ):
@ -387,11 +618,60 @@ class PageImportHDD( PageImport ):
class PageImportThreadWatcher( PageImport ):
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportThreadWatcher( self._search_preview_split, self, self._page_key, starting_from_session = self._starting_from_session )
def _GenerateImportArgsGeneratorFactory( self ):
def factory( job_key, item ):
advanced_import_options = self._management_panel.GetAdvancedImportOptions()
advanced_tag_options = self._management_panel.GetAdvancedTagOptions()
# fourchan_board should be on the job_key or whatever. it is stuck on initial queue generation
# we should not be getting it from the management_panel
# we should have access to this info from the job_key or w/e
return HydrusDownloading.ImportArgsGeneratorThread( job_key, item, advanced_import_options, advanced_tag_options )
return factory
def _GenerateImportQueueGeneratorFactory( self ):
def factory( job_key, item ):
return HydrusDownloading.ImportQueueGeneratorThread( job_key, item )
return factory
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportThreadWatcher( self._search_preview_split, self, self._page_key, self._import_controller, starting_from_session = self._starting_from_session )
class PageImportURL( PageImport ):
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueURL( self._search_preview_split, self, self._page_key, starting_from_session = self._starting_from_session )
def _GenerateImportArgsGeneratorFactory( self ):
def factory( job_key, item ):
advanced_import_options = self._management_panel.GetAdvancedImportOptions()
return HydrusDownloading.ImportArgsGeneratorURLs( job_key, item, advanced_import_options )
return factory
def _GenerateImportQueueGeneratorFactory( self ):
def factory( job_key, item ):
return HydrusDownloading.ImportQueueGeneratorURLs( job_key, item )
return factory
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportsURL( self._search_preview_split, self, self._page_key, self._import_controller, starting_from_session = self._starting_from_session )
class PagePetitions( PageWithMedia ):
@ -430,7 +710,12 @@ class PageQuery( PageWithMedia ):
def _InitMediaPanel( self ):
if len( self._initial_media_results ) == 0: self._media_panel = ClientGUIMedia.MediaPanelNoQuery( self, self._page_key, self._file_service_identifier )
else: self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_media_results )
else:
refreshable = len( self._initial_predicates ) > 0 or len( self._initial_media_results ) == 0
self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_media_results, refreshable = refreshable )
def GetSessionArgs( self ):

View File

@ -16,7 +16,7 @@ def Parse4chanPostScreen( html ):
message = 'You are banned from this board! html written to log.'
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
HC.ShowText( message )
return ( 'big error', message )
@ -33,7 +33,7 @@ def Parse4chanPostScreen( html ):
message = 'Unknown problem; html written to log.'
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
HC.ShowText( message )
return ( 'error', message )

View File

@ -45,7 +45,7 @@ TEMP_DIR = BASE_DIR + os.path.sep + 'temp'
# Misc
NETWORK_VERSION = 13
SOFTWARE_VERSION = 99
SOFTWARE_VERSION = 100
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -1037,23 +1037,6 @@ def ConvertZoomToPercentage( zoom ):
return pretty_zoom
def ShowExceptionDefault( e ):
etype = type( e )
value = u( e )
trace_list = traceback.format_stack()
trace = ''.join( trace_list )
message = u( etype.__name__ ) + ': ' + u( value ) + os.linesep + u( trace )
try: print( message )
except: print( repr( message ) )
ShowException = ShowExceptionDefault
def GetEmptyDataDict():
data = collections.defaultdict( default_dict_list )
@ -1114,20 +1097,6 @@ def MergeKeyToListDicts( key_to_list_dicts ):
return result
def u( text_producing_object ):
if type( text_producing_object ) in ( str, unicode, bs4.element.NavigableString ): text = text_producing_object
else:
try: text = str( text_producing_object ) # dealing with exceptions, etc...
except: text = repr( text_producing_object )
try: return unicode( text )
except:
try: return text.decode( locale.getpreferredencoding() )
except: return str( text )
def SearchEntryMatchesPredicate( search_entry, predicate ):
@ -1168,6 +1137,29 @@ def SearchEntryMatchesTag( search_entry, tag, search_siblings = True ):
return False
def ShowExceptionDefault( e ):
etype = type( e )
value = u( e )
trace_list = traceback.format_stack()
trace = ''.join( trace_list )
message = u( etype.__name__ ) + ': ' + u( value ) + os.linesep + u( trace )
try: print( message )
except: print( repr( message ) )
ShowException = ShowExceptionDefault
def ShowTextDefault( text ):
print( text )
ShowText = ShowTextDefault
def SplayListForDB( xs ): return '(' + ','.join( [ '"' + u( x ) + '"' for x in xs ] ) + ')'
def SplayTupleListForDB( first_column_name, second_column_name, xys ): return ' OR '.join( [ '( ' + first_column_name + '=' + u( x ) + ' AND ' + second_column_name + ' IN ' + SplayListForDB( ys ) + ' )' for ( x, ys ) in xys ] )
@ -1190,7 +1182,21 @@ def ThumbnailResolution( original_resolution, target_resolution ):
return ( int( round( original_width ) ), int( round( original_height ) ) )
def u( text_producing_object ):
if type( text_producing_object ) in ( str, unicode, bs4.element.NavigableString ): text = text_producing_object
else:
try: text = str( text_producing_object ) # dealing with exceptions, etc...
except: text = repr( text_producing_object )
try: return unicode( text )
except:
try: return text.decode( locale.getpreferredencoding() )
except: return str( text )
class AdvancedHTTPConnection():
def __init__( self, url = '', scheme = 'http', host = '', port = None, service_identifier = None, accept_cookies = False ):
@ -2043,16 +2049,28 @@ class DAEMONWorker( DAEMON ):
def set( self, *args, **kwargs ): self._event.set()
class Job():
class JobDatabase():
yaml_tag = u'!Job'
yaml_tag = u'!JobDatabase'
def __init__( self ):
def __init__( self, action, type, synchronous, *args, **kwargs ):
self._action = action
self._type = type
self._synchronous = synchronous
self._args = args
self._kwargs = kwargs
self._result = None
self._result_ready = threading.Event()
def GetAction( self ): return self._action
def GetArgs( self ): return self._args
def GetKWArgs( self ): return self._kwargs
def GetResult( self ):
while True:
@ -2069,13 +2087,19 @@ class Job():
trace_list = traceback.format_stack()
my_trace = ''.join( trace_list )
my_trace = 'Stack Trace (most recent call last):' + os.linesep + os.linesep + os.linesep.join( trace_list )
raise etype( my_trace + os.linesep + db_traceback )
full_message = os.linesep.join( ( 'GUI Thread:', my_trace, 'DB Thread:', db_traceback ) )
raise HydrusExceptions.DBException( full_message )
else: return self._result
def GetType( self ): return self._type
def IsSynchronous( self ): return self._synchronous
def PutResult( self, result ):
self._result = result
@ -2083,31 +2107,6 @@ class Job():
self._result_ready.set()
class JobInternal( Job ):
yaml_tag = u'!JobInternal'
def __init__( self, action, type, synchronous, *args, **kwargs ):
Job.__init__( self )
self._action = action
self._type = type
self._synchronous = synchronous
self._args = args
self._kwargs = kwargs
def GetAction( self ): return self._action
def GetArgs( self ): return self._args
def GetKWArgs( self ): return self._kwargs
def GetType( self ): return self._type
def IsSynchronous( self ): return self._synchronous
class JobKey():
def __init__( self ):
@ -2119,6 +2118,9 @@ class JobKey():
self._cancelled = threading.Event()
self._paused = threading.Event()
self._variable_lock = threading.Lock()
self._variables = dict()
def __eq__( self, other ): return self.__hash__() == other.__hash__()
@ -2139,13 +2141,23 @@ class JobKey():
def GetKey( self ): return self._key
def GetVariable( self, name ):
with self._variable_lock: return self._variables[ name ]
def HasVariable( self, name ):
with self._variable_lock: return name in self._variables
def IsBegun( self ): return self._begun.is_set()
def IsCancelled( self ): return self._cancelled.is_set()
def IsCancelled( self ): return shutdown or self._cancelled.is_set()
def IsDone( self ): return self._done.is_set()
def IsDone( self ): return shutdown or self._done.is_set()
def IsPaused( self ): return self.IsWorking() and self._paused.is_set()
def IsPaused( self ): return self._paused.is_set()
def IsWorking( self ): return self.IsBegun() and not self.IsDone()
@ -2159,16 +2171,73 @@ class JobKey():
def Resume( self ): self._paused.clear()
def SetVariable( self, name, value ):
with self._variable_lock: self._variables[ name ] = value
def WaitOnPause( self ):
while self._paused.is_set():
time.sleep( 0.1 )
if shutdown or self.IsCancelled() or self.IsDone(): return
if shutdown or self.IsDone(): return
class JobNetwork():
yaml_tag = u'!JobNetwork'
def __init__( self, request_type, request, headers = {}, body = None, response_to_path = False, redirects_permitted = 4, service_identifier = None ):
self._request_type = request_type
self._request = request
self._headers = headers
self._body = body
self._response_to_path = response_to_path
self._redirects_permitted = redirects_permitted
self._service_identifier = service_identifier
self._result = None
self._result_ready = threading.Event()
def ToTuple( self ): return ( self._request_type, self._request, self._headers, self._body, self._response_to_path, self._redirects_permitted, self._service_identifier )
def GetResult( self ):
while True:
if self._result_ready.wait( 5 ) == True: break
elif shutdown: raise Exception( 'Application quit before network could serve result!' )
if issubclass( type( self._result ), Exception ):
etype = type( self._result )
network_traceback = unicode( self._result )
trace_list = traceback.format_stack()
my_trace = 'Stack Trace (most recent call last):' + os.linesep + os.linesep + os.linesep.join( trace_list )
full_message = os.linesep.join( ( 'Calling Thread:', my_trace, 'Network Thread:', network_traceback ) )
raise etype( full_message )
else: return self._result
def PutResult( self, result ):
self._result = result
self._result_ready.set()
class Message():
def __init__( self, message_type, info ):

View File

@ -1,15 +1,20 @@
import bs4
import ClientParsers
import collections
import httplib
import HydrusConstants as HC
import HydrusExceptions
import json
import lxml
import os
import pafy
import threading
import time
import traceback
import urllib
import urlparse
import wx
import zipfile
def ConvertServiceIdentifiersToTagsToServiceIdentifiersToContentUpdates( hash, service_identifiers_to_tags ):
@ -1054,110 +1059,779 @@ class DownloaderTumblr( Downloader ):
def GetTags( self, url, tags ): return tags
class DownloaderEngine(): # rename this to something more import related
class ImportArgsGenerator():
# this should be a yamlable thing
def __init__( self, page_key, import_queue_generator ):
def __init__( self, job_key, item, advanced_import_options ):
self._page_key = page_key
self._import_queue_generator = import_queue_generator
self._current_queue_processor = None
self._pending_queue_jobs = []
self._job_key = job_key
self._item = item
self._advanced_import_options = advanced_import_options
def GetCurrentQueueProcessor( self ): return self._current_queue_processor
def ToTuple( self ): return ( self._pending_queue_jobs, )
def PendQueueJob( self, job ):
def __call__( self ):
self._pending_queue_jobs.append( job )
def THREADProcessJobs( self ):
while True:
try:
if len( self._pending_queue_jobs ) > 0:
( result, media_result ) = self._CheckCurrentStatus()
if result == 'new':
job = self._pending_queue_jobs.pop( 0 )
( temp_path, service_identifiers_to_tags, url ) = self._GetArgs()
self._current_queue_processor = self._import_queue_generator( job )
self._job_key.SetVariable( 'status', 'importing' )
self._current_queue_processor.ProcessQueue()
( result, media_result ) = HC.app.WriteSynchronous( 'import_file', temp_path, advanced_import_options = self._advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, generate_media_result = True, url = url )
else: time.sleep( 0.1 )
self._job_key.SetVariable( 'result', result )
if result in ( 'successful', 'redundant' ):
page_key = self._job_key.GetVariable( 'page_key' )
if media_result is not None and page_key is not None:
HC.pubsub.pub( 'add_media_results', page_key, ( media_result, ) )
self._job_key.SetVariable( 'status', '' )
self._job_key.Finish()
self._CleanUp() # e.g. possibly delete the file for hdd importargsgenerator
except Exception as e:
self._job_key.SetVariable( 'result', 'failed' )
HC.ShowException( e )
time.sleep( 2 )
self._job_key.Cancel()
class ImportQueueProcessor():
def _CleanUp( self ): pass
def __init__( self, page_key, import_args_generator ):
def _CheckCurrentStatus( self ): return ( 'new', None )
class ImportArgsGeneratorGallery( ImportArgsGenerator ):
def __init__( self, job_key, item, advanced_import_options, advanced_tag_options, downloaders_factory ):
ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
self._advanced_tag_options = advanced_tag_options
self._downloaders_factory = downloaders_factory
def _GetArgs( self ):
url_args = self._item
url = url_args[0]
self._job_key.SetVariable( 'status', 'downloading' )
downloader = self._downloaders_factory( 'example' )[0]
def hook( range, value ):
self._job_key.SetVariable( 'range', range )
self._job_key.SetVariable( 'value', value )
downloader.AddReportHook( hook )
do_tags = len( self._advanced_tag_options ) > 0
if do_tags: ( temp_path, tags ) = downloader.GetFileAndTags( *url_args )
else:
temp_path = downloader.GetFile( *url_args )
tags = []
downloader.ClearReportHooks()
service_identifiers_to_tags = ConvertTagsToServiceIdentifiersToTags( tags, self._advanced_tag_options )
return ( temp_path, service_identifiers_to_tags, url )
def _CheckCurrentStatus( self ):
url_args = self._item
url = url_args[0]
self._job_key.SetVariable( 'status', 'checking url status' )
downloader = self._downloaders_factory( 'example' )[0]
( status, hash ) = HC.app.Read( 'url_status', url )
if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options: status = 'new'
if status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
do_tags = len( self._advanced_tag_options ) > 0
if do_tags:
tags = downloader.GetTags( *url_args )
service_identifiers_to_tags = ConvertTagsToServiceIdentifiersToTags( tags, self._advanced_tag_options )
service_identifiers_to_content_updates = ConvertServiceIdentifiersToTagsToServiceIdentifiersToContentUpdates( hash, service_identifiers_to_tags )
HC.app.Write( 'content_updates', service_identifiers_to_content_updates )
return ( status, media_result )
else: return ( status, None )
class ImportArgsGeneratorHDD( ImportArgsGenerator ):
def __init__( self, job_key, item, advanced_import_options, paths_to_tags, delete_after_success ):
ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
self._paths_to_tags = paths_to_tags
self._delete_after_success = delete_after_success
def _CleanUp( self ):
result = self._job_key.GetVariable( 'result' )
if self._delete_after_success and result in ( 'successful', 'redundant' ):
( path_type, path_info ) = self._item
if path_type == 'path':
path = path_info
try: os.remove( path )
except: pass
def _GetArgs( self ):
self._job_key.SetVariable( 'status', 'reading from hdd' )
( path_type, path_info ) = self._item
service_identifiers_to_tags = {}
if path_type == 'path':
path = path_info
if path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ path ]
elif path_type == 'zip':
( zip_path, name ) = path_info
path = HC.GetTempPath()
with open( path, 'wb' ) as f:
with zipfile.ZipFile( zip_path, 'r' ) as z: f.write( z.read( name ) )
pretty_path = zip_path + os.path.sep + name
if pretty_path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ pretty_path ]
return ( path, service_identifiers_to_tags, None )
class ImportArgsGeneratorThread( ImportArgsGenerator ):
def __init__( self, job_key, item, advanced_import_options, advanced_tag_options ):
ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
self._advanced_tag_options = advanced_tag_options
def _GetArgs( self ):
self._job_key.SetVariable( 'status', 'downloading' )
( md5, board, image_name, ext, filename ) = self._item
# where do I get 4chan_board from? is it set to the controller_job_key?
# that'd prob be the best place, but think about it
url = 'http://images.4chan.org/' + board + '/src/' + image_name + ext
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
connection = HC.get_connection( scheme = scheme, host = host, port = port )
def hook( range, value ):
self._job_key.SetVariable( 'range', range )
self._job_key.SetVariable( 'value', value )
connection.AddReportHook( hook )
temp_path = connection.geturl( url, response_to_path = True )
connection.ClearReportHooks()
tags = [ 'filename:' + filename + ext ]
service_identifiers_to_tags = ConvertTagsToServiceIdentifiersToTags( tags, self._advanced_tag_options )
return ( temp_path, service_identifiers_to_tags, url )
def _CheckCurrentStatus( self ):
self._job_key.SetVariable( 'status', 'checking md5 status' )
( md5, board, image_name, ext, filename ) = self._item
( status, hash ) = HC.app.Read( 'md5_status', md5 )
if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options: status = 'new'
if status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
return ( status, media_result )
else: return ( status, None )
class ImportArgsGeneratorURLs( ImportArgsGenerator ):
def _GetArgs( self ):
url = self._item
self._job_key.SetVariable( 'status', 'downloading' )
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
connection = HC.get_connection( scheme = scheme, host = host, port = port )
def hook( range, value ):
self._job_key.SetVariable( 'range', range )
self._job_key.SetVariable( 'value', value )
connection.AddReportHook( hook )
temp_path = connection.geturl( url, response_to_path = True )
connection.ClearReportHooks()
service_identifiers_to_tags = {}
return ( temp_path, service_identifiers_to_tags, url )
def _CheckCurrentStatus( self ):
url = self._item
self._job_key.SetVariable( 'status', 'checking url status' )
( status, hash ) = HC.app.Read( 'url_status', url )
if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options: status = 'new'
if status == 'redundant':
( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
return ( status, media_result )
else: return ( status, None )
class ImportQueueGenerator():
def __init__( self, job_key, item ):
self._job_key = job_key
self._item = item
def __call__( self ):
queue = self._item
self._job_key.SetVariable( 'queue', queue )
self._job_key.Finish()
class ImportQueueGeneratorGallery( ImportQueueGenerator ):
def __init__( self, job_key, item, downloaders_factory ):
ImportQueueGenerator.__init__( self, job_key, item )
self._downloaders_factory = downloaders_factory
def __call__( self ):
try:
raw_query = self._item
downloaders = list( self._downloaders_factory( raw_query ) )
downloaders[0].SetupGallerySearch() # for now this is cookie-based for hf, so only have to do it on one
total_urls_found = 0
while True:
downloaders_to_remove = []
for downloader in downloaders:
if self._job_key.IsPaused():
self._job_key.SetVariable( 'status', 'paused after ' + HC.u( total_urls_found ) + ' urls' )
self._job_key.WaitOnPause()
if self._job_key.IsCancelled(): break
self._job_key.SetVariable( 'status', 'found ' + HC.u( total_urls_found ) + ' urls' )
page_of_url_args = downloader.GetAnotherPage()
total_urls_found += len( page_of_url_args )
if len( page_of_url_args ) == 0: downloaders_to_remove.append( downloader )
else:
queue = self._job_key.GetVariable( 'queue' )
queue = list( queue )
queue.extend( page_of_url_args )
self._job_key.SetVariable( 'queue', queue )
for downloader in downloaders_to_remove: downloaders.remove( downloader )
if len( downloaders ) == 0: break
if self._job_key.IsPaused():
self._job_key.SetVariable( 'status', 'paused after ' + HC.u( total_urls_found ) + ' urls' )
self._job_key.WaitOnPause()
if self._job_key.IsCancelled(): break
self._job_key.SetVariable( 'status', '' )
except Exception as e:
self._job_key.SetVariable( 'status', HC.u( e ) )
HC.ShowException( e )
time.sleep( 2 )
finally: self._job_key.Finish()
class ImportQueueGeneratorURLs( ImportQueueGenerator ):
def __call__( self ):
try:
url = self._item
self._job_key.SetVariable( 'status', 'parsing url' )
try:
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
except: raise Exception( 'Could not parse that URL' )
self._job_key.SetVariable( 'status', 'Connecting to address' )
try: connection = HC.get_connection( scheme = scheme, host = host, port = port )
except: raise Exception( 'Could not connect to server' )
try: html = connection.geturl( url )
except: raise Exception( 'Could not download that url' )
self._job_key.SetVariable( 'status', 'parsing html' )
try: urls = ClientParsers.ParsePage( html, url )
except: raise Exception( 'Could not parse that URL\'s html' )
queue = urls
self._job_key.SetVariable( 'queue', queue )
except Exception as e:
self._job_key.SetVariable( 'status', HC.u( e ) )
HC.ShowException( e )
time.sleep( 2 )
finally: self._job_key.Finish()
class ImportQueueGeneratorThread( ImportQueueGenerator ):
def __call__( self ):
try:
( board, thread_id ) = self._item
last_thread_check = 0
image_infos_already_added = set()
while True:
if self._job_key.IsPaused():
self._job_key.SetVariable( 'status', 'paused' )
self._job_key.WaitOnPause()
if self._job_key.IsCancelled(): break
thread_time = self._job_key.GetVariable( 'thread_time' )
if thread_time < 30: thread_time = 30
next_thread_check = last_thread_check + thread_time
if next_thread_check < HC.GetNow():
self._job_key.SetVariable( 'status', 'checking thread' )
url = 'http://api.4chan.org/' + board + '/res/' + thread_id + '.json'
try:
connection = HC.get_connection( url = url )
raw_json = connection.geturl( url )
json_dict = json.loads( raw_json )
posts_list = json_dict[ 'posts' ]
image_infos = [ ( post[ 'md5' ].decode( 'base64' ), board, HC.u( post[ 'tim' ] ), post[ 'ext' ], post[ 'filename' ] ) for post in posts_list if 'md5' in post ]
image_infos_i_can_add = [ image_info for image_info in image_infos if image_info not in image_infos_already_added ]
image_infos_already_added.update( image_infos_i_can_add )
if len( image_infos_i_can_add ) > 0:
queue = self._job_key.GetVariable( 'queue' )
queue = list( queue )
queue.extend( image_infos_i_can_add )
self._job_key.SetVariable( 'queue', queue )
except HydrusExceptions.NotFoundException: raise Exception( 'Thread 404' )
last_thread_check = HC.GetNow()
else: self._job_key.SetVariable( 'status', 'rechecking thread ' + HC.ConvertTimestampToPrettyPending( next_thread_check ) )
except Exception as e:
self._job_key.SetVariable( 'status', HC.u( e ) )
HC.ShowException( e )
time.sleep( 2 )
finally: self._job_key.Finish()
class ImportController():
def __init__( self, import_args_generator_factory, import_queue_generator_factory, page_key = None ):
self._controller_job_key = self._GetNewJobKey( 'controller' )
self._import_args_generator_factory = import_args_generator_factory
self._import_queue_generator_factory = import_queue_generator_factory
self._page_key = page_key
self._import_args_generator = import_args_generator
self._queue_is_done = False
self._queue = []
self._paused = False
self._current_position = 0
self._import_job_key = self._GetNewJobKey( 'import' )
self._import_queue_position_job_key = self._GetNewJobKey( 'import_queue_position' )
self._import_queue_job_key = self._GetNewJobKey( 'import_queue' )
self._pending_import_queue_jobs = []
self._lock = threading.Lock()
HC.pubsub.sub( self, 'SetPaused', 'pause_import_queue_processor' )
def AddToQueue( self, queue_objects ):
def _GetNewJobKey( self, type ):
with self._lock: self._queue.extend( queue_objects )
job_key = HC.JobKey()
def QueueIsDone( self ): self._queue_is_done = True
def SetPaused( self, status ): self._paused = status
def ToTuple( self ):
with self._lock: return ( self._current_position, len( self._queue ) )
def ProcessQueue( self ):
while not self._queue_is_done:
if type == 'controller':
with self._lock: queue_length = len( self._queue )
job_key.SetVariable( 'num_successful', 0 )
job_key.SetVariable( 'num_failed', 0 )
job_key.SetVariable( 'num_deleted', 0 )
job_key.SetVariable( 'num_redundant', 0 )
if not self._paused and self._current_position < queue_length:
else:
job_key.SetVariable( 'status', '' )
if type == 'import':
with self._lock: queue_object = self._queue[ self._current_position ]
job_key.SetVariable( 'page_key', self._page_key )
job_key.SetVariable( 'range', 1 )
job_key.SetVariable( 'value', 0 )
# reorder these params as is best
( temp_path, url, tags, anything_else ) = self._path_generator( self._page_key, queue_object )
elif type == 'import_queue_position':
# synchronously write import to db
job_key.SetVariable( 'queue_position', 0 )
self._current_position += 1
elif type == 'import_queue':
job_key.SetVariable( 'queue', [] )
time.sleep( 1 )
return job_key
def CleanBeforeDestroy( self ): self._controller_job_key.Cancel()
def GetJobKey( self, type ):
with self._lock:
if type == 'controller': return self._controller_job_key
elif type == 'import': return self._import_job_key
elif type == 'import_queue_position': return self._import_queue_position_job_key
elif type == 'import_queue': return self._import_queue_job_key
def PathGeneratorBooru( self, page_key, queue_object ):
def GetPendingImportQueues( self ):
with self._lock: return self._pending_import_queue_jobs
# unpack queue_object
# test url or whatever as appropriate
# fetch file, possibly with help of downloader or whatever!
# downloader should write file to path, returning temp_path
# we should return temp_path
def PendImportQueue( self, job ):
with self._lock: self._pending_import_queue_jobs.append( job )
pass
def RemovePendingImportQueue( self, job ):
with self._lock:
if job in self._pending_import_queue_jobs: self._pending_import_queue_jobs.remove( job )
def MovePendingImportQueueUp( self, job ):
with self._lock:
if job in self._pending_import_queue_jobs:
index = self._pending_import_queue_jobs.index( job )
if index > 0:
self._pending_import_queue_jobs.remove( job )
self._pending_import_queue_jobs.insert( index - 1, job )
def MovePendingImportQueueDown( self, job ):
with self._lock:
if s in self._pending_import_queue_jobs:
index = self._pending_import_queue_jobs.index( job )
if index + 1 < len( self._pending_import_queue_jobs ):
self._pending_import_queue_jobs.remove( job )
self._pending_import_queue_jobs.insert( index + 1, job )
def MainLoop( self ):
try:
while not self._controller_job_key.IsDone():
if self._controller_job_key.IsPaused():
self._import_job_key.Pause()
self._import_queue_position_job_key.Pause()
self._import_queue_job_key.Pause()
self._controller_job_key.WaitOnPause()
with self._lock:
queue_position = self._import_queue_position_job_key.GetVariable( 'queue_position' )
queue = self._import_queue_job_key.GetVariable( 'queue' )
if self._import_job_key.IsDone():
result = self._import_job_key.GetVariable( 'result' )
variable_name = 'num_' + result
num_result = self._controller_job_key.GetVariable( variable_name )
self._controller_job_key.SetVariable( variable_name, num_result + 1 )
self._import_job_key = self._GetNewJobKey( 'import' )
queue_position += 1
self._import_queue_position_job_key.SetVariable( 'queue_position', queue_position )
position_string = HC.u( queue_position + 1 ) + '/' + HC.u( len( queue ) )
if self._import_queue_position_job_key.IsPaused(): self._import_queue_position_job_key.SetVariable( 'status', 'paused at ' + position_string )
elif self._import_queue_position_job_key.IsWorking():
if self._import_job_key.IsWorking():
self._import_queue_position_job_key.SetVariable( 'status', 'processing ' + position_string )
else:
if queue_position < len( queue ):
self._import_queue_position_job_key.SetVariable( 'status', 'preparing ' + position_string )
self._import_job_key.Begin()
item = queue[ queue_position ]
args_generator = self._import_args_generator_factory( self._import_job_key, item )
threading.Thread( target = args_generator, name = 'Generate Import Args' ).start()
else:
if self._import_queue_job_key.IsWorking(): self._import_queue_position_job_key.SetVariable( 'status', 'waiting for more items' )
else: self._import_queue_position_job_key.Finish()
else:
if self._import_queue_position_job_key.IsDone():
if self._import_queue_position_job_key.IsCancelled(): status = 'cancelled at ' + position_string
else: status = 'done'
self._import_queue_position_job_key = self._GetNewJobKey( 'import_queue_position' )
self._import_queue_job_key = self._GetNewJobKey( 'import_queue' )
else: status = ''
self._import_queue_position_job_key.SetVariable( 'status', status )
if len( self._pending_import_queue_jobs ) > 0:
self._import_queue_position_job_key.Begin()
self._import_queue_job_key.Begin()
item = self._pending_import_queue_jobs.pop( 0 )
queue_generator = self._import_queue_generator_factory( self._import_queue_job_key, item )
threading.Thread( target = queue_generator, name = 'Generate Import Items' ).start()
time.sleep( 0.05 )
except Exception as e: HC.ShowException( e )
finally:
self._import_job_key.Cancel()
self._import_queue_position_job_key.Cancel()
self._import_queue_job_key.Cancel()
def StartThread( self ):
threading.Thread( target = self.MainLoop ).start()
def THREADDownloadURL( job_key, url, message_string ):

View File

@ -1,3 +1,4 @@
class DBException( Exception ): pass
class DBAccessException( Exception ): pass
class MimeException( Exception ): pass
class SizeException( Exception ): pass

View File

@ -2798,7 +2798,7 @@ class DB( ServiceDB ):
synchronous = True
job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
job = HC.JobDatabase( action, job_type, synchronous, *args, **kwargs )
self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first
@ -2811,7 +2811,7 @@ class DB( ServiceDB ):
synchronous = True
job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
job = HC.JobDatabase( action, job_type, synchronous, *args, **kwargs )
self._jobs.put( ( priority, job ) )

View File

@ -618,11 +618,15 @@ class TestClientDB( unittest.TestCase ):
generate_media_result = True
( written_result, written_hash, written_media_result ) = self._write( 'import_file', path, generate_media_result = True )
( written_result, written_hash ) = self._write( 'import_file', path )
self.assertEqual( written_result, 'successful' )
self.assertEqual( written_hash, hash )
( written_result, written_media_result ) = self._write( 'import_file', path, generate_media_result = True )
self.assertEqual( written_result, 'redundant' )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_file_service_identifiers_cdpp, mr_local_ratings, mr_remote_ratings ) = written_media_result.ToTuple()
now = HC.GetNow()