Version 90

This commit is contained in:
Hydrus 2013-10-30 17:28:06 -05:00
parent 7e66755dd8
commit 126529acb0
16 changed files with 1084 additions and 275 deletions

View File

@ -8,6 +8,39 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 90</h3></li>
<ul>
<li>client db unit tests added:</li>
<ul>
<li>4chan_pass</li>
<li>autocomplete_tags</li>
<li>booru and boorus</li>
<li>downloads</li>
<li>favourite_custom_filter_actions</li>
<li>imageboard</li>
<li>import_folders</li>
<li>md5_status</li>
<li>media_results</li>
<li>namespace_blacklists</li>
<li>news</li>
<li>pixiv_account</li>
<li>improved services</li>
<li>sessions</li>
<li>shutdown_timestamps</li>
</ul>
<li>fixed modify account</li>
<li>fixed modify account again!</li>
<li>made account_info respond a lot faster with large tag counts</li>
<li>neatened some client db code</li>
<li>youtube 'file already deleted' error now reported correctly</li>
<li>youtube general error reporting improved</li>
<li>fixed multiple-computer-single-account data use reporting</li>
<li>moved file parsing from nested dialogs to a single popup message gauge</li>
<li>fixed an export folder path-display issue when an install moves location</li>
<li>improved how some file downloaders process their files</li>
<li>added prototype file download status bars to all download management panels</li>
<li>fixed a bunch of tests that were cleaning up inelegantly</li>
</ul>
<li><h3>version 89</h3></li>
<ul>
<li>fixed blacklist manager for numtags</li>
@ -24,49 +57,49 @@
<li>improved testing framework to manage client and server sessions</li>
<li>several network request unit tests added:</li>
<ul>
<li>access_key
<li>account
<li>account_info
<li>account_types GET
<li>account_types POST
<li>file_repo file
<li>file_repo thumbnail
<li>init
<li>ip
<li>news
<li>petition
<li>registration_keys
<li>services GET
<li>services POST
<li>session_key
<li>stats
<li>update GET
<li>update POST
<li>access_key</li>
<li>account</li>
<li>account_info</li>
<li>account_types GET</li>
<li>account_types POST</li>
<li>file_repo file</li>
<li>file_repo thumbnail</li>
<li>init</li>
<li>ip</li>
<li>news</li>
<li>petition</li>
<li>registration_keys</li>
<li>services GET</li>
<li>services POST</li>
<li>session_key</li>
<li>stats</li>
<li>update GET</li>
<li>update POST</li>
</ul>
<li>fixed clientside permissions exception</li>
<li>several db unit tests added:</li>
<ul>
<li>import_file
<li>system_predicates:
<li>import_file</li>
<li>system_predicates:</li>
<ul>
<li>age
<li>archive
<li>duration
<li>everything
<li>file_service
<li>hash
<li>height
<li>inbox
<li>local
<li>mime
<li>not_local
<li>num_tags
<li>num_words
<li>ratio
<li>similar_to
<li>size
<li>width
<li>limit
<li>age</li>
<li>archive</li>
<li>duration</li>
<li>everything</li>
<li>file_service</li>
<li>hash</li>
<li>height</li>
<li>inbox</li>
<li>local</li>
<li>mime</li>
<li>not_local</li>
<li>num_tags</li>
<li>num_words</li>
<li>ratio</li>
<li>similar_to</li>
<li>size</li>
<li>width</li>
<li>limit</li>
</ul>
</ul>
<li>fixed a bug in system:age?</li>

View File

@ -424,71 +424,39 @@ def GetAllFileHashes():
return file_hashes
def GetAllPaths( raw_paths, quiet = False ):
def GetAllPaths( raw_paths ):
file_paths = []
title = 'Parsing files and subdirectories'
if not quiet: progress = wx.ProgressDialog( title, u'Preparing', 1000, HC.app.GetTopWindow(), style=wx.PD_APP_MODAL | wx.PD_AUTO_HIDE | wx.PD_CAN_ABORT | wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME )
paths_to_process = raw_paths
try:
paths_to_process = raw_paths
total_paths_to_process = len( paths_to_process )
num_processed = 0
while len( paths_to_process ) > 0:
next_paths_to_process = []
for path in paths_to_process:
if not quiet:
# would rather use progress.SetRange( total_paths_to_process ) here, but for some reason wx python doesn't support it!
permill = int( 1000 * ( float( num_processed ) / float( total_paths_to_process ) ) )
( should_continue, skip ) = progress.Update( permill, 'Done ' + HC.u( num_processed ) + '/' + HC.u( total_paths_to_process ) )
if not should_continue:
progress.Destroy()
return []
if os.path.isdir( path ):
subpaths = [ path + os.path.sep + filename for filename in dircache.listdir( path ) ]
total_paths_to_process += len( subpaths )
next_paths_to_process.extend( subpaths )
else: file_paths.append( path )
num_processed += 1
paths_to_process = next_paths_to_process
except:
if not quiet:
message = 'While parsing files, encountered this error:' + os.linesep + traceback.format_exc()
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
total_paths_to_process = len( paths_to_process )
if not quiet: progress.Destroy()
num_processed = 0
while len( paths_to_process ) > 0:
next_paths_to_process = []
for path in paths_to_process:
if os.path.isdir( path ):
subpaths = [ path + os.path.sep + filename for filename in dircache.listdir( path ) ]
total_paths_to_process += len( subpaths )
next_paths_to_process.extend( subpaths )
else: file_paths.append( path )
num_processed += 1
paths_to_process = next_paths_to_process
gc.collect()
@ -638,7 +606,25 @@ def IntersectTags( tags_managers, service_identifier = HC.COMBINED_TAG_SERVICE_I
return ( current, deleted, pending, petitioned )
def ParseImportablePaths( raw_paths ):
def ShowExceptionClient( e ):
etype = type( e )
value = HC.u( e )
trace_list = traceback.format_stack()
trace = ''.join( trace_list )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_ERROR, ( etype, value, trace ) ) )
def THREADParseImportablePaths( result_callable, raw_paths ):
job_key = HC.JobKey()
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_GAUGE, job_key ) )
HC.pubsub.pub( 'message_gauge_info', job_key, None, None, u'Parsing files and folders.' )
file_paths = GetAllPaths( raw_paths )
@ -647,15 +633,13 @@ def ParseImportablePaths( raw_paths ):
num_file_paths = len( file_paths )
num_odd_files = 0
progress = wx.ProgressDialog( 'Checking files\' mimetypes', u'Preparing', num_file_paths, HC.app.GetTopWindow(), style=wx.PD_APP_MODAL | wx.PD_AUTO_HIDE | wx.PD_CAN_ABORT | wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME )
for ( i, path ) in enumerate( file_paths ):
if i % 500 == 0: gc.collect()
( should_continue, skip ) = progress.Update( i, 'Done ' + HC.u( i ) + '/' + HC.u( num_file_paths ) )
HC.pubsub.pub( 'message_gauge_info', job_key, num_file_paths, i, u'Done ' + HC.u( i ) + '/' + HC.u( num_file_paths ) )
if not should_continue: break
if job_key.IsCancelled(): break
info = os.lstat( path )
@ -675,7 +659,7 @@ def ParseImportablePaths( raw_paths ):
if mime in HC.ALLOWED_MIMES: good_paths_info.append( ( 'path', mime, size, path ) )
elif mime in HC.ARCHIVES:
( should_continue, skip ) = progress.Update( i, u'Found an archive; parsing\u2026' )
HC.pubsub.pub( 'message_gauge_info', job_key, num_file_paths, i, u'Found an archive; parsing\u2026' )
if mime == HC.APPLICATION_HYDRUS_ENCRYPTED_ZIP:
@ -703,26 +687,40 @@ def ParseImportablePaths( raw_paths ):
while aes_key is None:
job = HC.Job()
def WXTHREADGetAESKey():
with wx.TextEntryDialog( HC.app.GetTopWindow(), 'Please enter the key for ' + path ) as dlg:
while aes_key is None:
result = dlg.ShowModal()
if result == wx.ID_OK:
with wx.TextEntryDialog( HC.app.GetTopWindow(), 'Please enter the key for ' + path ) as dlg:
try:
key_text = dlg.GetValue()
( aes_key, iv ) = HydrusEncryption.AESTextToKey( key_text )
except: wx.MessageBox( 'Did not understand that key!' )
result = dlg.ShowModal()
if result == wx.ID_OK:
try:
key_text = dlg.GetValue()
( aes_key, iv ) = HydrusEncryption.AESTextToKey( key_text )
job.PutResult( ( aes_key, iv ) )
except: wx.MessageBox( 'Did not understand that key!' )
elif result == wx.ID_CANCEL: job.PutResult( ( None, None ) )
elif result == wx.ID_CANCEL: break
if aes_key is None:
wx.CallAfter( WXTHREADGetAESKey )
( aes_key, iv ) = job.GetResult()
if aes_key is not None:
path_to = HydrusEncryption.DecryptAESFile( aes_key, iv, path )
@ -785,29 +783,23 @@ def ParseImportablePaths( raw_paths ):
progress.Destroy()
if num_odd_files > 0:
if len( good_paths_info ) > 0:
message = HC.u( num_odd_files ) + ' files could not be added.'
if len( good_paths_info ) == 1: message = '1 file was parsed successfully'
else: message = HC.u( len( good_paths_info ) ) + ' files were parsed successfully'
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
if num_odd_files > 0: message += ', but ' + HC.u( num_odd_files ) + ' failed.'
else: message += '.'
else:
message = HC.u( num_odd_files ) + ' files could not be parsed.'
return good_paths_info
HC.pubsub.pub( 'message_gauge_info', job_key, None, None, message )
def ShowExceptionClient( e ):
wx.CallAfter( result_callable, good_paths_info )
etype = type( e )
value = HC.u( e )
trace_list = traceback.format_stack()
trace = ''.join( trace_list )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_ERROR, ( etype, value, trace ) ) )
class AutocompleteMatches():
def __init__( self, matches ):

View File

@ -2438,7 +2438,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
result = c.execute( 'SELECT blacklist, namespaces FROM namespace_blacklists WHERE service_id = ?;', ( service_id, ) ).fetchone()
if result is None: result = ( True, set() )
if result is None: result = ( True, [] )
return result
@ -2648,8 +2648,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
return reason_id
def _GetResolution( self, c, hash ): return c.execute( 'SELECT width, height FROM files_info, hashes USING ( hash_id ) WHERE service_id = ? AND hash = ?;', ( self._local_file_service_id, sqlite3.Binary( hash ) ) ).fetchone()
def _GetService( self, c, parameter ):
try:
@ -4350,6 +4348,7 @@ class DB( ServiceDB ):
def __init__( self ):
self._local_shutdown = False
self._loop_finished = False
self._db_path = HC.DB_DIR + os.path.sep + 'client.db'
@ -6501,6 +6500,8 @@ class DB( ServiceDB ):
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'vacuumed successfully' ) )
def GetLoopFinished( self ): return self._loop_finished
def pub( self, topic, *args, **kwargs ): self._pubsubs.append( ( topic, args, kwargs ) )
def pub_content_updates( self, service_identifiers_to_content_updates ):
@ -6527,10 +6528,10 @@ class DB( ServiceDB ):
elif action == 'booru': result = self._GetBooru( c, *args, **kwargs )
elif action == 'boorus': result = self._GetBoorus( c, *args, **kwargs )
elif action == 'contact_names': result = self._GetContactNames( c, *args, **kwargs )
elif action == 'file_query_ids': result = self._GetFileQueryIds( c, *args, **kwargs )
elif action == 'do_message_query': result = self._DoMessageQuery( c, *args, **kwargs )
elif action == 'downloads': result = self._GetDownloads( c, *args, **kwargs )
elif action == 'favourite_custom_filter_actions': result = self._GetFavouriteCustomFilterActions( c, *args, **kwargs )
elif action == 'file_query_ids': result = self._GetFileQueryIds( c, *args, **kwargs )
elif action == 'file_system_predicates': result = self._GetFileSystemPredicates( c, *args, **kwargs )
elif action == 'hydrus_sessions': result = self._GetHydrusSessions( c, *args, **kwargs )
elif action == 'identities_and_contacts': result = self._GetIdentitiesAndContacts( c, *args, **kwargs )
@ -6550,7 +6551,6 @@ class DB( ServiceDB ):
elif action == 'pixiv_account': result = self._GetPixivAccount( c, *args, **kwargs )
elif action == 'ratings_filter': result = self._GetRatingsFilter( c, *args, **kwargs )
elif action == 'ratings_media_result': result = self._GetRatingsMediaResult( c, *args, **kwargs )
elif action == 'resolution': result = self._GetResolution( c, *args, **kwargs )
elif action == 'service': result = self._GetService( c, *args, **kwargs )
elif action == 'service_identifiers': result = self._GetServiceIdentifiers( c, *args, **kwargs )
elif action == 'service_info': result = self._GetServiceInfo( c, *args, **kwargs )
@ -6604,7 +6604,6 @@ class DB( ServiceDB ):
elif action == 'reset_service': result = self._ResetService( c, *args, **kwargs )
elif action == 'save_options': result = self._SaveOptions( c, *args, **kwargs )
elif action == 'service_updates': result = self._ProcessServiceUpdates( c, *args, **kwargs )
elif action == 'session': result = self._AddSession( c, *args, **kwargs )
elif action == 'set_password': result = self._SetPassword( c, *args, **kwargs )
elif action == 'set_tag_service_precedence': result = self._SetTagServicePrecedence( c, *args, **kwargs )
elif action == 'subscription': result = self._SetSubscription( c, *args, **kwargs )
@ -6697,6 +6696,11 @@ class DB( ServiceDB ):
except: pass # no jobs this second; let's see if we should shutdown
c.close()
db.close()
self._loop_finished = True
def Read( self, action, priority, *args, **kwargs ):
@ -6769,7 +6773,7 @@ def DAEMONCheckImportFolders():
raw_paths = [ folder_path + os.path.sep + filename for filename in filenames ]
all_paths = CC.GetAllPaths( raw_paths, quiet = True )
all_paths = CC.GetAllPaths( raw_paths )
HC.pubsub.pub( 'service_status', 'Found ' + HC.u( len( all_paths ) ) + ' files to import from ' + folder_path )

View File

@ -3869,7 +3869,12 @@ class DialogSelectLocalFiles( Dialog ):
def _AddPathsToList( self, paths ):
good_paths_info = CC.ParseImportablePaths( paths )
threading.Thread( target = CC.THREADParseImportablePaths, args = ( self.AddParsedPaths, paths ) ).start()
def _GetPathsInfo( self ): return [ row[0] for row in self._paths_list.GetClientData() ]
def AddParsedPaths( self, good_paths_info ):
odd_paths = False
@ -3891,8 +3896,6 @@ class DialogSelectLocalFiles( Dialog ):
if odd_paths: wx.MessageBox( HC.u( len( odd_paths ) ) + ' files could not be added.' )
def _GetPathsInfo( self ): return [ row[0] for row in self._paths_list.GetClientData() ]
def EventAddPaths( self, event ):
with wx.FileDialog( self, 'Select the files to add.', style=wx.FD_MULTIPLE ) as dlg:
@ -4435,7 +4438,12 @@ class DialogSetupExport( Dialog ):
self._paths.Append( pretty_tuple, data_tuple )
if HC.options[ 'export_path' ] is not None: self._directory_picker.SetPath( HC.ConvertPortablePathToAbsPath( HC.options[ 'export_path' ] ) )
if HC.options[ 'export_path' ] is not None:
abs_path = HC.ConvertPortablePathToAbsPath( HC.options[ 'export_path' ] )
if abs_path is not None: self._directory_picker.SetPath( abs_path )
self._zip_name.SetValue( 'archive name.zip' )

View File

@ -2814,7 +2814,12 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
def PopulateControls():
if HC.options[ 'export_path' ] is not None: self._export_location.SetPath( HC.ConvertPortablePathToAbsPath( HC.options[ 'export_path' ] ) )
if HC.options[ 'export_path' ] is not None:
abs_path = HC.ConvertPortablePathToAbsPath( HC.options[ 'export_path' ] )
if abs_path is not None: self._export_location.SetPath( abs_path )
self._exclude_deleted_files.SetValue( HC.options[ 'exclude_deleted_files' ] )

View File

@ -1481,6 +1481,8 @@ class ManagementPanelImportWithQueue( ManagementPanelImport ):
ManagementPanelImport.__init__( self, parent, page, page_key )
self._download_progress_gauge = ClientGUICommon.Gauge( self._processing_panel )
self._import_cancel_button = wx.Button( self._processing_panel, label = 'that\'s enough' )
self._import_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelImport )
self._import_cancel_button.SetForegroundColour( ( 128, 0, 0 ) )
@ -1513,6 +1515,7 @@ class ManagementPanelImportWithQueue( ManagementPanelImport ):
self._outer_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT )
HC.pubsub.sub( self, 'SetOuterQueueInfo', 'set_outer_queue_info' )
HC.pubsub.sub( self, 'SetDownloadProgress', 'set_download_progress' )
def _GetPreprocessStatus( self ):
@ -1633,6 +1636,16 @@ class ManagementPanelImportWithQueue( ManagementPanelImport ):
def SetDownloadProgress( self, range, value ):
if range is None: self._download_progress_gauge.Pulse()
else:
self._download_progress_gauge.SetRange( range )
self._download_progress_gauge.SetValue( value )
def SetOuterQueueInfo( self, page_key, info ):
if self._page_key == page_key: self._outer_queue_info.SetLabel( info )
@ -1670,8 +1683,10 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
c_p_hbox.AddF( self._import_pause_button, FLAGS_EXPAND_BOTH_WAYS )
c_p_hbox.AddF( self._import_cancel_button, FLAGS_EXPAND_BOTH_WAYS )
self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._download_progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( c_p_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
@ -1780,6 +1795,10 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) ) )
def hook( range, value ): wx.CallAfter( self.SetDownloadProgress, range, value )
downloader.AddReportHook( hook )
if do_tags: ( file, tags ) = downloader.GetFileAndTags( *url_args )
else:
@ -1788,6 +1807,8 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
tags = []
downloader.ClearReportHooks()
temp_path = HC.GetTempPath()
with open( temp_path, 'wb' ) as f: f.write( file )
@ -2056,6 +2077,7 @@ class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ):
self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._download_progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( c_p_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
@ -2115,11 +2137,13 @@ class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ):
connection = self._connections[ ( scheme, host, port ) ]
file = connection.geturl( url )
def hook( range, value ): wx.CallAfter( self.SetDownloadProgress, range, value )
temp_path = HC.GetTempPath()
connection.AddReportHook( hook )
with open( temp_path, 'wb' ) as f: f.write( file )
temp_path = connection.geturl( url, response_to_path = True )
connection.ClearReportHooks()
advanced_import_options = self._advanced_import_options.GetInfo()
@ -2169,6 +2193,8 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
ManagementPanelImport.__init__( self, parent, page, page_key )
self._download_progress_gauge = ClientGUICommon.Gauge( self._processing_panel )
self._connections = {}
vbox = wx.BoxSizer( wx.VERTICAL )
@ -2177,6 +2203,7 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._download_progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
self._processing_panel.AddF( self._import_pause_button, FLAGS_EXPAND_PERPENDICULAR )
@ -2325,11 +2352,13 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
connection = self._connections[ ( scheme, host, port ) ]
file = connection.geturl( url )
def hook( range, value ): wx.CallAfter( self.SetDownloadProgress, range, value )
temp_path = HC.GetTempPath()
connection.AddReportHook( hook )
with open( temp_path, 'wb' ) as f: f.write( file )
temp_path = connection.geturl( url, response_to_path = True )
connection.ClearReportHooks()
advanced_import_options = self._advanced_import_options.GetInfo()
@ -2438,6 +2467,16 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
def SetDownloadProgress( self, range, value ):
if range is None: self._download_progress_gauge.Pulse()
else:
self._download_progress_gauge.SetRange( range )
self._download_progress_gauge.SetValue( value )
def SetSearchFocus( self, page_key ):
if page_key == self._page_key: self._thread_input.SetFocus()

View File

@ -1,6 +1,7 @@
import bisect
import bs4
import collections
import cStringIO
import httplib
import HydrusExceptions
import HydrusPubSub
@ -38,7 +39,7 @@ TEMP_DIR = BASE_DIR + os.path.sep + 'temp'
# Misc
NETWORK_VERSION = 11
SOFTWARE_VERSION = 89
SOFTWARE_VERSION = 90
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -1145,6 +1146,8 @@ class AdvancedHTTPConnection():
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
self._report_hooks = []
self._scheme = scheme
self._host = host
self._port = port
@ -1221,17 +1224,50 @@ class AdvancedHTTPConnection():
response = self._connection.getresponse()
content_length = response.getheader( 'Content-Length' )
if content_length is not None: content_length = int( content_length )
block_size = 64 * 1024
if response.status == 200 and response_to_path:
temp_path = GetTempPath()
with open( temp_path, 'wb' ) as f: size_of_response = StreamToStream( response, f )
size_of_response = 0
with open( temp_path, 'wb' ) as f:
next_block = response.read( block_size )
while next_block != '':
size_of_response += len( next_block )
f.write( next_block )
for hook in self._report_hooks: hook( content_length, size_of_response )
next_block = response.read( block_size )
parsed_response = temp_path
else:
data = response.read()
data = ''
next_block = response.read( block_size )
while next_block != '':
data += next_block
for hook in self._report_hooks: hook( content_length, len( data ) )
next_block = response.read( block_size )
size_of_response = len( data )
@ -1284,13 +1320,17 @@ class AdvancedHTTPConnection():
return parsed_response
def AddReportHook( self, hook ): self._report_hooks.append( hook )
def ClearReportHooks( self ): self._report_hooks = []
def close( self ): self._connection.close()
def connect( self ): self._connection.connect()
def GetCookies( self ): return self._cookies
def geturl( self, url, headers = {}, is_redirect = False, follow_redirects = True ):
def geturl( self, url, headers = {}, response_to_path = False, is_redirect = False, follow_redirects = True ):
parse_result = urlparse.urlparse( url )
@ -1300,7 +1340,7 @@ class AdvancedHTTPConnection():
if query != '': request += '?' + query
return self.request( 'GET', request, headers = headers, is_redirect = is_redirect, follow_redirects = follow_redirects )
return self.request( 'GET', request, headers = headers, response_to_path = response_to_path, is_redirect = is_redirect, follow_redirects = follow_redirects )
def request( self, request_type, request, headers = {}, body = None, response_to_path = False, is_redirect = False, follow_redirects = True ):
@ -1908,28 +1948,16 @@ class DAEMONWorker( DAEMON ):
def set( self, *args, **kwargs ): self._event.set()
class JobInternal():
class Job():
yaml_tag = u'!JobInternal'
yaml_tag = u'!Job'
def __init__( self, action, type, synchronous, *args, **kwargs ):
self._action = action
self._type = type
self._synchronous = synchronous
self._args = args
self._kwargs = kwargs
def __init__( self ):
self._result = None
self._result_ready = threading.Event()
def GetAction( self ): return self._action
def GetArgs( self ): return self._args
def GetKWArgs( self ): return self._kwargs
def GetResult( self ):
while True:
@ -1953,10 +1981,6 @@ class JobInternal():
else: return self._result
def GetType( self ): return self._type
def IsSynchronous( self ): return self._synchronous
def PutResult( self, result ):
self._result = result
@ -1964,6 +1988,31 @@ class JobInternal():
self._result_ready.set()
class JobInternal( Job ):
yaml_tag = u'!JobInternal'
def __init__( self, action, type, synchronous, *args, **kwargs ):
Job.__init__( self )
self._action = action
self._type = type
self._synchronous = synchronous
self._args = args
self._kwargs = kwargs
def GetAction( self ): return self._action
def GetArgs( self ): return self._args
def GetKWArgs( self ): return self._kwargs
def GetType( self ): return self._type
def IsSynchronous( self ): return self._synchronous
class JobKey():
def __init__( self ):

View File

@ -129,13 +129,13 @@ def DownloadYoutubeURL( job_key, url, message_string ):
( result, hash ) = HC.app.WriteSynchronous( 'import_file', temp_path )
if result in ( 'successful', 'redundant' ): HC.pubsub.pub( 'message_gauge_show_file_button', job_key, message_string, { hash } )
elif result == 'deleted': HC.pubsub.pub( 'message_gauge_failed', job_key )
elif result == 'deleted': HC.pubsub.pub( 'message_gauge_info', job_key, None, None, 'File was already deleted!' )
except:
except Exception as e:
HC.pubsub.pub( 'message_gauge_failed', job_key )
HC.pubsub.pub( 'message_gauge_info', job_key, None, None, 'Error with ' + message_string + '!' )
raise
HC.ShowException( e )
def GetYoutubeFormats( youtube_url ):
@ -155,11 +155,24 @@ class Downloader():
self._connections = {}
self._report_hooks = []
self._all_urls_so_far = set()
self._num_pages_done = 0
def _DownloadFile( self, connection, *args, **kwargs ):
for hook in self._report_hooks: connection.AddReportHook( hook )
response = connection.geturl( *args, **kwargs )
connection.ClearReportHooks()
return response
def _EstablishSession( self, connection ): pass
def _GetConnection( self, url ):
@ -182,6 +195,10 @@ class Downloader():
def _GetNextGalleryPageURLs( self ): return ( self._GetNextGalleryPageURL(), )
def AddReportHook( self, hook ): self._report_hooks.append( hook )
def ClearReportHooks( self ): self._report_hooks = []
def GetAnotherPage( self ):
if self._we_are_done: return []
@ -216,7 +233,7 @@ class Downloader():
connection = self._GetConnection( url )
return connection.geturl( url )
return self._DownloadFile( connection, url )
def GetFileAndTags( self, url, *args ):
@ -370,7 +387,7 @@ class DownloaderBooru( Downloader ):
connection = self._GetConnection( file_url )
return connection.geturl( file_url )
return self._DownloadFile( connection, file_url )
def GetFileAndTags( self, url ):
@ -379,7 +396,7 @@ class DownloaderBooru( Downloader ):
connection = self._GetConnection( file_url )
file = connection.geturl( file_url )
file = self._DownloadFile( connection, file_url )
return ( file, tags )
@ -491,7 +508,7 @@ class DownloaderDeviantArt( Downloader ):
connection = self._GetConnection( file_url )
return connection.geturl( file_url )
return self._DownloadFile( connection, file_url )
def GetTags( self, url, tags ): return tags
@ -698,7 +715,7 @@ class DownloaderHentaiFoundry( Downloader ):
connection = self._GetConnection( file_url )
return connection.geturl( file_url )
return self._DownloadFile( connection, file_url )
def GetFileAndTags( self, url ):
@ -707,7 +724,7 @@ class DownloaderHentaiFoundry( Downloader ):
connection = self._GetConnection( file_url )
file = connection.geturl( file_url )
file = self._DownloadFile( connection, file_url )
return ( file, tags )
@ -883,7 +900,7 @@ class DownloaderNewgrounds( Downloader ):
connection = self._GetConnection( file_url )
return connection.geturl( file_url )
return self._DownloadFile( connection, file_url )
def GetFileAndTags( self, url ):
@ -892,7 +909,7 @@ class DownloaderNewgrounds( Downloader ):
connection = self._GetConnection( file_url )
file = connection.geturl( file_url )
file = self._DownloadFile( connection, file_url )
return ( file, tags )
@ -1018,7 +1035,7 @@ class DownloaderPixiv( Downloader ):
headers = { 'Referer' : referral_url }
return connection.geturl( image_url, headers = headers )
return self._DownloadFile( connection, image_url, headers = headers )
def GetFileAndTags( self, url ):
@ -1029,7 +1046,7 @@ class DownloaderPixiv( Downloader ):
headers = { 'Referer' : referral_url }
file = connection.geturl( image_url, headers = headers )
file = self._DownloadFile( connection, image_url, headers = headers )
return ( file, tags )

View File

@ -704,9 +704,7 @@ class HydrusResourceCommandSessionKey( HydrusResourceCommand ):
account_identifier = HC.AccountIdentifier( access_key = access_key )
account = HC.app.Read( 'account', self._service_identifier, account_identifier )
( session_key, expiry ) = HC.app.AddSession( self._service_identifier, account )
( session_key, expiry ) = HC.app.AddSession( self._service_identifier, account_identifier )
now = HC.GetNow()
@ -836,7 +834,9 @@ class HydrusResourceCommandRestrictedAccount( HydrusResourceCommandRestricted ):
subject_identifiers = request.hydrus_args[ 'subject_identifiers' ]
HC.app.Write( 'account', self._service_identifier, admin_account, action, subject_identifiers )
kwargs = request.hydrus_args # for things like expiry, title, and so on
HC.app.Write( 'account', self._service_identifier, admin_account, action, subject_identifiers, kwargs )
response_context = HC.ResponseContext( 200 )

View File

@ -79,12 +79,23 @@ class HydrusSessionManagerServer():
existing_sessions = HC.app.Read( 'sessions' )
self._account_cache = dict()
self._sessions = { ( session_key, service_identifier ) : ( account, expiry ) for ( session_key, service_identifier, account, expiry ) in existing_sessions }
self._lock = threading.Lock()
def AddSession( self, service_identifier, account ):
def AddSession( self, service_identifier, account_identifier ):
if ( service_identifier, account_identifier ) not in self._account_cache:
account = HC.app.Read( 'account', service_identifier, account_identifier )
self._account_cache[ ( service_identifier, account_identifier ) ] = account
account = self._account_cache[ ( service_identifier, account_identifier ) ]
session_key = os.urandom( 32 )

View File

@ -34,7 +34,7 @@ class Controller( wx.App ):
def _Write( self, action, priority, *args, **kwargs ): return self._db.Write( action, priority, *args, **kwargs )
def AddSession( self, service_identifier, account ): return self._session_manager.AddSession( service_identifier, account )
def AddSession( self, service_identifier, account_identifier ): return self._session_manager.AddSession( service_identifier, account_identifier )
def GetAccount( self, session_key, service_identifier ): return self._session_manager.GetAccount( session_key, service_identifier )

View File

@ -1290,9 +1290,9 @@ class ServiceDB( FileDB, MessageDB, TagDB ):
def _GetAccountMappingInfo( self, c, service_id, account_id ):
( num_deleted_mappings, ) = c.execute( 'SELECT COUNT( * ) FROM mapping_petitions WHERE service_id = ? AND account_id = ? AND status = ?;', ( service_id, account_id, HC.DELETED ) ).fetchone()
num_deleted_mappings = len( c.execute( 'SELECT COUNT( * ) FROM mapping_petitions WHERE service_id = ? AND account_id = ? AND status = ? LIMIT 5000;', ( service_id, account_id, HC.DELETED ) ).fetchall() )
( num_mappings, ) = c.execute( 'SELECT COUNT( * ) FROM mappings WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone()
num_mappings = len( c.execute( 'SELECT 1 FROM mappings WHERE service_id = ? AND account_id = ? LIMIT 5000;', ( service_id, account_id ) ).fetchall() )
result = c.execute( 'SELECT score FROM account_scores WHERE service_id = ? AND account_id = ? AND score_type = ?;', ( service_id, account_id, HC.SCORE_PETITION ) ).fetchone()
@ -1540,7 +1540,7 @@ class ServiceDB( FileDB, MessageDB, TagDB ):
return access_key
def _ModifyAccount( self, c, service_identifier, admin_account, action, subject_identifiers ):
def _ModifyAccount( self, c, service_identifier, admin_account, action, subject_identifiers, kwargs ):
service_id = self._GetServiceId( c, service_identifier )
@ -1552,22 +1552,22 @@ class ServiceDB( FileDB, MessageDB, TagDB ):
if action in ( HC.BAN, HC.SUPERBAN ):
reason = request_args[ 'reason' ]
reason = kwargs[ 'reason' ]
reason_id = self._GetReasonId( c, reason )
if expiration in request_args: expiration = request_args[ 'expiration' ]
if expiration in request_args: expiration = kwargs[ 'expiration' ]
else: expiration = None
self._Ban( c, service_id, action, admin_account_id, subject_account_ids, reason_id, expiration ) # fold ban and superban together, yo
else:
account.CheckPermission( HC.GENERAL_ADMIN ) # special case, don't let manage_users people do these:
admin_account.CheckPermission( HC.GENERAL_ADMIN ) # special case, don't let manage_users people do these:
if action == HC.CHANGE_ACCOUNT_TYPE:
title = request_args[ 'title' ]
title = kwargs[ 'title' ]
account_type_id = self._GetAccountTypeId( c, service_id, title )
@ -1575,13 +1575,13 @@ class ServiceDB( FileDB, MessageDB, TagDB ):
elif action == HC.ADD_TO_EXPIRES:
expiration = request_args[ 'expiration' ]
expiration = kwargs[ 'expiration' ]
self._AddToExpires( c, service_id, subject_account_ids, expiration )
elif action == HC.SET_EXPIRES:
expires = request_args[ 'expiry' ]
expires = kwargs[ 'expiry' ]
self._SetExpires( c, service_id, subject_account_ids, expires )

View File

@ -38,6 +38,10 @@ class FakeHTTPConnection():
def connect( self ): pass
def AddReportHook( self, hook ): pass
def ClearReportHooks( self ): pass
def GetCookies( self ): return self._cookies
def geturl( self, url, headers = {}, is_redirect = False, follow_redirects = True ):

View File

@ -1,5 +1,6 @@
import ClientConstants as CC
import ClientDB
import collections
import HydrusConstants as HC
import itertools
import os
@ -9,9 +10,21 @@ import TestConstants
import time
import threading
import unittest
import yaml
class TestClientDB( unittest.TestCase ):
def _clear_db( self ):
( db, c ) = self._db._GetDBCursor()
c.execute( 'DELETE FROM files_info;' )
c.execute( 'DELETE FROM mappings;' )
def _read( self, action, *args, **kwargs ): return self._db.Read( action, HC.HIGH_PRIORITY, *args, **kwargs )
def _write( self, action, *args, **kwargs ): return self._db.Write( action, HC.HIGH_PRIORITY, True, *args, **kwargs )
@classmethod
def setUpClass( self ):
@ -37,7 +50,7 @@ class TestClientDB( unittest.TestCase ):
self._db.Shutdown()
time.sleep( 3 )
while not self._db.GetLoopFinished(): time.sleep( 0.1 )
def make_temp_files_deletable( function_called, path, traceback_gumpf ):
@ -53,72 +66,223 @@ class TestClientDB( unittest.TestCase ):
HC.CLIENT_THUMBNAILS_DIR = self._old_client_thumbnails_dir
def test_folders_exist( self ):
def test_4chan_pass( self ):
self.assertTrue( os.path.exists( HC.DB_DIR ) )
result = self._read( '4chan_pass' )
self.assertTrue( os.path.exists( HC.DB_DIR + os.path.sep + 'client.db' ) )
self.assertTrue( result, ( '', '', 0 ) )
self.assertTrue( os.path.exists( HC.CLIENT_FILES_DIR ) )
token = 'token'
pin = 'pin'
timeout = HC.GetNow() + 100000
self.assertTrue( os.path.exists( HC.CLIENT_THUMBNAILS_DIR ) )
hex_chars = '0123456789abcdef'
self._write( '4chan_pass', token, pin, timeout )
for ( one, two ) in itertools.product( hex_chars, hex_chars ):
dir = HC.CLIENT_FILES_DIR + os.path.sep + one + two
self.assertTrue( os.path.exists( dir ) )
dir = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + one + two
self.assertTrue( os.path.exists( dir ) )
result = self._read( '4chan_pass' )
self.assertTrue( result, ( token, pin, timeout ) )
def test_import( self ):
def test_autocomplete( self ):
self._clear_db()
result = self._read( 'autocomplete_tags', half_complete_tag = 'c' )
self.assertEqual( result.GetMatches( 'c' ), [] )
result = self._read( 'autocomplete_tags', half_complete_tag = 'series:' )
self.assertEqual( result.GetMatches( 'series:' ), [] )
#
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
synchronous = True
self._write( 'import_file', path )
generate_media_result = True
#
( written_result, written_hash, written_media_result ) = self._db.Write( 'import_file', HC.HIGH_PRIORITY, synchronous, path, generate_media_result = True )
service_identifiers_to_content_updates = {}
self.assertEqual( written_result, 'successful' )
self.assertEqual( written_hash, hash )
content_updates = []
content_updates.append( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'car', ( hash, ) ) ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:cars', ( hash, ) ) ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'maker:ford', ( hash, ) ) ) )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_file_service_identifiers_cdpp, mr_local_ratings, mr_remote_ratings ) = written_media_result.ToTuple()
service_identifiers_to_content_updates[ HC.LOCAL_TAG_SERVICE_IDENTIFIER ] = content_updates
now = HC.GetNow()
self._write( 'content_updates', service_identifiers_to_content_updates )
self.assertEqual( mr_hash, hash )
self.assertEqual( mr_inbox, True )
self.assertEqual( mr_size, 5270 )
self.assertEqual( mr_mime, HC.IMAGE_PNG )
self.assertEqual( mr_hash, hash )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, 200 )
self.assertEqual( mr_height, 200 )
self.assertEqual( mr_duration, None )
self.assertEqual( mr_num_frames, None )
self.assertEqual( mr_num_words, None )
# cars
content_update = HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, ( hash, ) )
result = self._read( 'autocomplete_tags', half_complete_tag = 'c' )
service_identifiers_to_content_updates = { HC.LOCAL_FILE_SERVICE_IDENTIFIER : ( content_update, ) }
preds = set()
self._db.Write( 'content_updates', HC.HIGH_PRIORITY, synchronous, service_identifiers_to_content_updates )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cars' ), 1 ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'car' ), 1 ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'series:cars' ), 1 ) )
read_preds = result.GetMatches( 'c' )
# count isn't tested in predicate.__eq__, I think
for p in read_preds: self.assertEqual( p.GetCount(), 1 )
self.assertEqual( set( read_preds ), preds )
#
result = self._read( 'autocomplete_tags', half_complete_tag = 'ser' )
read_preds = result.GetMatches( 'ser' )
self.assertEqual( read_preds, [] )
#
result = self._read( 'autocomplete_tags', half_complete_tag = 'series:c' )
pred = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'series:cars' ), 1 )
( read_pred, ) = result.GetMatches( 'series:c' )
self.assertEqual( read_pred.GetCount(), 1 )
self.assertEqual( pred, read_pred )
def test_predicates( self ):
def test_booru( self ):
def run_tests( tests ):
for ( name, booru ) in CC.DEFAULT_BOORUS.items():
read_booru = self._read( 'booru', name )
self.assertEqual( booru.GetData(), read_booru.GetData() )
#
result = self._read( 'boorus' )
read_boorus = { booru.GetName() : booru for booru in result }
for name in CC.DEFAULT_BOORUS: self.assertEqual( read_boorus[ name ].GetData(), CC.DEFAULT_BOORUS[ name ].GetData() )
#
name = 'blah'
search_url = 'url'
search_separator = '%20'
advance_by_page_num = True
thumb_classname = 'thumb'
image_id = None
image_data = 'Download'
tag_classnames_to_namespaces = { 'tag' : '' }
booru = CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
edit_log = [ ( HC.ADD, 'blah' ), ( HC.EDIT, ( 'blah', booru ) ) ]
self._write( 'update_boorus', edit_log )
read_booru = self._read( 'booru', name )
self.assertEqual( booru.GetData(), read_booru.GetData() )
#
edit_log = [ ( HC.DELETE, 'blah' ) ]
self._write( 'update_boorus', edit_log )
with self.assertRaises( Exception ):
read_booru = self._read( 'booru', name )
def test_downloads( self ):
result = self._read( 'downloads' )
self.assertEqual( result, set() )
#
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
service_identifiers_to_content_updates = {}
service_identifiers_to_content_updates[ HC.LOCAL_FILE_SERVICE_IDENTIFIER ] = ( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_PENDING, ( hash, ) ), )
self._write( 'content_updates', service_identifiers_to_content_updates )
#
result = self._read( 'downloads' )
self.assertEqual( result, { hash } )
#
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
service_identifiers_to_content_updates = {}
service_identifiers_to_content_updates[ HC.LOCAL_FILE_SERVICE_IDENTIFIER ] = ( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_RESCIND_PENDING, ( hash, ) ), )
self._write( 'content_updates', service_identifiers_to_content_updates )
#
result = self._read( 'downloads' )
self.assertEqual( result, set() )
def test_favourite_custom_filter_actions( self ):
result = self._read( 'favourite_custom_filter_actions' )
self.assertEqual( result, dict() )
#
favourite_custom_filter_actions = { 'a' : 'blah', 'b' : 'bleh' }
self._write( 'favourite_custom_filter_actions', favourite_custom_filter_actions )
#
result = self._read( 'favourite_custom_filter_actions' )
self.assertEqual( result, favourite_custom_filter_actions )
def test_file_query_ids( self ):
self._clear_db()
def run_namespace_predicate_tests( tests ):
for ( operator, namespace, result ) in tests:
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( operator, namespace ), None ) ]
search_context = CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, predicates = predicates )
file_query_ids = self._read( 'file_query_ids', search_context )
self.assertEqual( len( file_query_ids ), result )
def run_system_predicate_tests( tests ):
for ( predicate_type, info, result ) in tests:
@ -126,7 +290,21 @@ class TestClientDB( unittest.TestCase ):
search_context = CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, predicates = predicates )
file_query_ids = self._db.Read( 'file_query_ids', HC.HIGH_PRIORITY, search_context )
file_query_ids = self._read( 'file_query_ids', search_context )
self.assertEqual( len( file_query_ids ), result )
def run_tag_predicate_tests( tests ):
for ( operator, tag, result ) in tests:
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, tag ), None ) ]
search_context = CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, predicates = predicates )
file_query_ids = self._read( 'file_query_ids', search_context )
self.assertEqual( len( file_query_ids ), result )
@ -144,7 +322,7 @@ class TestClientDB( unittest.TestCase ):
tests.append( ( HC.SYSTEM_PREDICATE_TYPE_NOT_LOCAL, None, 0 ) )
run_tests( tests )
run_system_predicate_tests( tests )
#
@ -152,9 +330,7 @@ class TestClientDB( unittest.TestCase ):
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
synchronous = True
self._db.Write( 'import_file', HC.HIGH_PRIORITY, synchronous, path )
self._write( 'import_file', path )
time.sleep( 1 )
@ -267,7 +443,7 @@ class TestClientDB( unittest.TestCase ):
# limit is not applied in file_query_ids! we do it later!
tests.append( ( HC.SYSTEM_PREDICATE_TYPE_LIMIT, 0, 1 ) )
run_tests( tests )
run_system_predicate_tests( tests )
#
@ -276,7 +452,7 @@ class TestClientDB( unittest.TestCase ):
service_identifiers_to_content_updates[ HC.LOCAL_FILE_SERVICE_IDENTIFIER ] = ( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, ( hash, ) ), )
service_identifiers_to_content_updates[ HC.LOCAL_TAG_SERVICE_IDENTIFIER ] = ( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'car', ( hash, ) ) ), )
self._db.Write( 'content_updates', HC.HIGH_PRIORITY, synchronous, service_identifiers_to_content_updates )
self._write( 'content_updates', service_identifiers_to_content_updates )
#
@ -294,7 +470,58 @@ class TestClientDB( unittest.TestCase ):
tests.append( ( HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, ( '>', 0 ), 1 ) )
tests.append( ( HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, ( '>', 1 ), 0 ) )
run_tests( tests )
run_system_predicate_tests( tests )
#
tests = []
tests.append( ( '+', 'car', 1 ) )
tests.append( ( '-', 'car', 0 ) )
tests.append( ( '+', 'bus', 0 ) )
tests.append( ( '-', 'bus', 1 ) )
run_tag_predicate_tests( tests )
#
tests = []
tests.append( ( '+', 'series', 0 ) )
tests.append( ( '-', 'series', 1 ) )
run_namespace_predicate_tests( tests )
#
service_identifiers_to_content_updates = {}
content_updates = []
content_updates.append( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:cars', ( hash, ) ) ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'maker:ford', ( hash, ) ) ) )
service_identifiers_to_content_updates[ HC.LOCAL_TAG_SERVICE_IDENTIFIER ] = content_updates
self._write( 'content_updates', service_identifiers_to_content_updates )
#
tests = []
tests.append( ( '+', 'maker:ford', 1 ) )
tests.append( ( '+', 'ford', 1 ) )
run_tag_predicate_tests( tests )
#
tests = []
tests.append( ( '+', 'series', 1 ) )
tests.append( ( '-', 'series', 0 ) )
run_namespace_predicate_tests( tests )
#
@ -302,7 +529,7 @@ class TestClientDB( unittest.TestCase ):
service_identifiers_to_content_updates = { HC.LOCAL_FILE_SERVICE_IDENTIFIER : ( content_update, ) }
self._db.Write( 'content_updates', HC.HIGH_PRIORITY, synchronous, service_identifiers_to_content_updates )
self._write( 'content_updates', service_identifiers_to_content_updates )
#
@ -318,26 +545,400 @@ class TestClientDB( unittest.TestCase ):
tests.append( ( HC.SYSTEM_PREDICATE_TYPE_NOT_LOCAL, None, 0 ) )
run_tests( tests )
run_system_predicate_tests( tests )
def test_file_system_predicates( self ):
self._clear_db()
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
self._write( 'import_file', path )
#
result = self._read( 'file_system_predicates', HC.LOCAL_FILE_SERVICE_IDENTIFIER )
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), 1 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_INBOX, None ), 1 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_ARCHIVE, None ), 0 ) )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ), None ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_SIZE, HC.SYSTEM_PREDICATE_TYPE_AGE, HC.SYSTEM_PREDICATE_TYPE_HASH, HC.SYSTEM_PREDICATE_TYPE_WIDTH, HC.SYSTEM_PREDICATE_TYPE_HEIGHT, HC.SYSTEM_PREDICATE_TYPE_RATIO, HC.SYSTEM_PREDICATE_TYPE_DURATION, HC.SYSTEM_PREDICATE_TYPE_NUM_WORDS, HC.SYSTEM_PREDICATE_TYPE_MIME, HC.SYSTEM_PREDICATE_TYPE_RATING, HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, HC.SYSTEM_PREDICATE_TYPE_FILE_SERVICE ] ] )
self.assertEqual( result, predicates )
for i in range( len( predicates ) ): self.assertEqual( result[i].GetCount(), predicates[i].GetCount() )
def test_imageboard( self ):
[ ( site_name_4chan, read_imageboards ) ] = self._read( 'imageboards' )
self.assertEqual( site_name_4chan, '4chan' )
[ ( site_name_4chan, imageboards ) ] = CC.DEFAULT_IMAGEBOARDS
read_imageboards = { imageboard.GetName() : imageboard for imageboard in read_imageboards }
imageboards = { imageboard.GetName() : imageboard for imageboard in imageboards }
self.assertItemsEqual( imageboards, read_imageboards )
def test_import( self ):
self._clear_db()
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
generate_media_result = True
( written_result, written_hash, written_media_result ) = self._write( 'import_file', path, generate_media_result = True )
self.assertEqual( written_result, 'successful' )
self.assertEqual( written_hash, hash )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_file_service_identifiers_cdpp, mr_local_ratings, mr_remote_ratings ) = written_media_result.ToTuple()
now = HC.GetNow()
self.assertEqual( mr_hash, hash )
self.assertEqual( mr_inbox, True )
self.assertEqual( mr_size, 5270 )
self.assertEqual( mr_mime, HC.IMAGE_PNG )
self.assertEqual( mr_hash, hash )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, 200 )
self.assertEqual( mr_height, 200 )
self.assertEqual( mr_duration, None )
self.assertEqual( mr_num_frames, None )
self.assertEqual( mr_num_words, None )
def test_import_folders( self ):
f1 = ( 'path1', { 'details' : 1 } )
f2a = ( 'path2', { 'details' : 2 } )
f2b = ( 'path2', { 'details' : 3 } )
#
result = self._read( 'import_folders' )
self.assertEqual( result, [] )
#
self._write( 'import_folders', [ f1, f2a ] )
#
result = self._read( 'import_folders' )
self.assertItemsEqual( [ f1, f2a ], result )
#
self._write( 'import_folder', *f2b )
#
result = self._read( 'import_folders' )
self.assertItemsEqual( [ f1, f2b ], result )
def test_init( self ):
self.assertTrue( os.path.exists( HC.DB_DIR ) )
self.assertTrue( os.path.exists( HC.DB_DIR + os.path.sep + 'client.db' ) )
self.assertTrue( os.path.exists( HC.CLIENT_FILES_DIR ) )
self.assertTrue( os.path.exists( HC.CLIENT_THUMBNAILS_DIR ) )
hex_chars = '0123456789abcdef'
for ( one, two ) in itertools.product( hex_chars, hex_chars ):
dir = HC.CLIENT_FILES_DIR + os.path.sep + one + two
self.assertTrue( os.path.exists( dir ) )
dir = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + one + two
self.assertTrue( os.path.exists( dir ) )
def test_md5_status( self ):
self._clear_db()
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
md5 = 'fdadb2cae78f2dfeb629449cd005f2a2'.decode( 'hex' )
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
#
result = self._read( 'md5_status', md5 )
self.assertEqual( result, ( 'new', None ) )
#
self._write( 'import_file', path )
#
result = self._read( 'md5_status', md5 )
self.assertEqual( result, ( 'redundant', hash ) )
#
content_update = HC.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, ( hash, ) )
service_identifiers_to_content_updates = { HC.LOCAL_FILE_SERVICE_IDENTIFIER : ( content_update, ) }
self._write( 'content_updates', service_identifiers_to_content_updates )
#
HC.options[ 'exclude_deleted_files' ] = True
result = self._read( 'md5_status', md5 )
self.assertEqual( result, ( 'deleted', None ) )
HC.options[ 'exclude_deleted_files' ] = False
result = self._read( 'md5_status', md5 )
self.assertEqual( result, ( 'new', None ) )
def test_media_results( self ):
self._clear_db()
hash = '\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'
md5 = 'fdadb2cae78f2dfeb629449cd005f2a2'.decode( 'hex' )
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
self._write( 'import_file', path )
#
( media_result, ) = self._read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_file_service_identifiers_cdpp, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
now = HC.GetNow()
self.assertEqual( mr_hash, hash )
self.assertEqual( mr_inbox, True )
self.assertEqual( mr_size, 5270 )
self.assertEqual( mr_mime, HC.IMAGE_PNG )
self.assertEqual( mr_hash, hash )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, 200 )
self.assertEqual( mr_height, 200 )
self.assertEqual( mr_duration, None )
self.assertEqual( mr_num_frames, None )
self.assertEqual( mr_num_words, None )
( media_result, ) = self._read( 'media_results_from_ids', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( 1, ) )
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_file_service_identifiers_cdpp, mr_local_ratings, mr_remote_ratings ) = media_result.ToTuple()
now = HC.GetNow()
self.assertEqual( mr_hash, hash )
self.assertEqual( mr_inbox, True )
self.assertEqual( mr_size, 5270 )
self.assertEqual( mr_mime, HC.IMAGE_PNG )
self.assertEqual( mr_hash, hash )
self.assertLessEqual( now - 10, mr_timestamp )
self.assertLessEqual( mr_timestamp, now + 10 )
self.assertEqual( mr_width, 200 )
self.assertEqual( mr_height, 200 )
self.assertEqual( mr_duration, None )
self.assertEqual( mr_num_frames, None )
self.assertEqual( mr_num_words, None )
def test_namespace_blacklists( self ):
result = self._read( 'namespace_blacklists' )
self.assertEqual( result, [] )
result = self._read( 'namespace_blacklists', HC.LOCAL_TAG_SERVICE_IDENTIFIER )
self.assertEqual( result, ( True, [] ) )
#
namespace_blacklists = []
namespace_blacklists.append( ( HC.LOCAL_TAG_SERVICE_IDENTIFIER, False, [ '', 'series' ] ) )
namespace_blacklists.append( ( HC.LOCAL_FILE_SERVICE_IDENTIFIER, True, [ '' ] ) ) # bit dodgy, but whatever!
self._write( 'namespace_blacklists', namespace_blacklists )
#
result = self._read( 'namespace_blacklists' )
self.assertItemsEqual( result, namespace_blacklists )
result = self._read( 'namespace_blacklists', HC.LOCAL_TAG_SERVICE_IDENTIFIER )
self.assertEqual( result, ( False, [ '', 'series' ] ) )
def test_news( self ):
result = self._read( 'news', HC.LOCAL_TAG_SERVICE_IDENTIFIER )
self.assertEqual( result, [] )
#
news = []
news.append( ( 'hello', HC.GetNow() - 30000 ) )
news.append( ( 'hello again', HC.GetNow() - 20000 ) )
service_updates = dict()
service_updates[ HC.LOCAL_TAG_SERVICE_IDENTIFIER ] = [ HC.ServiceUpdate( HC.SERVICE_UPDATE_NEWS, news ) ]
self._write( 'service_updates', service_updates )
#
result = self._read( 'news', HC.LOCAL_TAG_SERVICE_IDENTIFIER )
self.assertItemsEqual( result, news )
def test_nums_pending( self ):
result = self._read( 'nums_pending' )
self.assertEqual( result, {} )
# we can do more testing when I add repo service to this testing framework
def test_pending( self ):
pass
# result = self._read( 'pending', service_identifier )
# do more when I do remote repos
def test_pixiv_account( self ):
result = self._read( 'pixiv_account' )
self.assertTrue( result, ( '', '' ) )
pixiv_id = 123456
password = 'password'
self._write( 'pixiv_account', pixiv_id, password )
result = self._read( 'pixiv_account' )
self.assertTrue( result, ( pixiv_id, password ) )
def test_ratings_filter( self ):
# add ratings servicess
# fetch no half-ratings
# then add some half-ratings to files
# fetch them
# apply some full ratings
# fetch them, check the difference
pass
def test_ratings_media_result( self ):
# add some ratings, slice different media results?
# check exactly what this does again, figure a good test
pass
def test_services( self ):
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_FILE, ) )
result = self._read( 'service_identifiers', ( HC.LOCAL_FILE, ) )
self.assertEqual( result, { HC.LOCAL_FILE_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_TAG, ) )
result = self._read( 'service_identifiers', ( HC.LOCAL_TAG, ) )
self.assertEqual( result, { HC.LOCAL_TAG_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.COMBINED_FILE, ) )
result = self._read( 'service_identifiers', ( HC.COMBINED_FILE, ) )
self.assertEqual( result, { HC.COMBINED_FILE_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.COMBINED_TAG, ) )
result = self._read( 'service_identifiers', ( HC.COMBINED_TAG, ) )
self.assertEqual( result, { HC.COMBINED_TAG_SERVICE_IDENTIFIER } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_FILE, HC.COMBINED_FILE ) )
result = self._read( 'service_identifiers', ( HC.LOCAL_FILE, HC.COMBINED_FILE ) )
self.assertEqual( result, { HC.LOCAL_FILE_SERVICE_IDENTIFIER, HC.COMBINED_FILE_SERVICE_IDENTIFIER } )
#
result = self._read( 'service', HC.LOCAL_FILE_SERVICE_IDENTIFIER )
self.assertEqual( result.GetServiceIdentifier(), HC.LOCAL_FILE_SERVICE_IDENTIFIER )
result = self._read( 'service', HC.LOCAL_TAG_SERVICE_IDENTIFIER )
self.assertEqual( result.GetServiceIdentifier(), HC.LOCAL_TAG_SERVICE_IDENTIFIER )
result = self._read( 'services', ( HC.LOCAL_FILE, HC.LOCAL_TAG ) )
result_s_is = { service.GetServiceIdentifier() for service in result }
self.assertItemsEqual( { HC.LOCAL_FILE_SERVICE_IDENTIFIER, HC.LOCAL_TAG_SERVICE_IDENTIFIER }, result_s_is )
#
result = self._read( 'service_info', HC.LOCAL_FILE_SERVICE_IDENTIFIER )
self.assertEqual( type( result ), dict )
for ( k, v ) in result.items():
self.assertEqual( type( k ), int )
self.assertEqual( type( v ), int )
#
new_tag_repo = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.TAG_REPOSITORY, 'new tag repo' )
@ -359,15 +960,15 @@ class TestClientDB( unittest.TestCase ):
edit_log.append( ( HC.ADD, ( new_local_like, None, new_local_like_extra_info ) ) )
edit_log.append( ( HC.ADD, ( new_local_numerical, None, new_local_numerical_extra_info ) ) )
self._db.Write( 'update_services', HC.HIGH_PRIORITY, True, edit_log )
self._write( 'update_services', edit_log )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.TAG_REPOSITORY, ) )
result = self._read( 'service_identifiers', ( HC.TAG_REPOSITORY, ) )
self.assertEqual( result, { new_tag_repo, other_new_tag_repo } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_RATING_LIKE, ) )
result = self._read( 'service_identifiers', ( HC.LOCAL_RATING_LIKE, ) )
self.assertEqual( result, { new_local_like } )
result = self._db.Read( 'service_identifiers', HC.HIGH_PRIORITY, ( HC.LOCAL_RATING_NUMERICAL, ) )
result = self._read( 'service_identifiers', ( HC.LOCAL_RATING_NUMERICAL, ) )
self.assertEqual( result, { new_local_numerical } )
#
@ -381,19 +982,60 @@ class TestClientDB( unittest.TestCase ):
edit_log.append( ( HC.DELETE, new_local_like ) )
edit_log.append( ( HC.EDIT, ( other_new_tag_repo, ( other_new_tag_repo_updated, other_new_tag_repo_credentials_updated, None ) ) ) )
self._db.Write( 'update_services', HC.HIGH_PRIORITY, True, edit_log )
self._write( 'update_services', edit_log )
# now delete local_like, test that
# edit other_tag_repo, test that
#
result = self._db.Read( 'service', HC.HIGH_PRIORITY, new_tag_repo )
result = self._read( 'service', new_tag_repo )
# test credentials
result = self._db.Read( 'services', HC.HIGH_PRIORITY, ( HC.TAG_REPOSITORY, ) )
result = self._read( 'services', ( HC.TAG_REPOSITORY, ) )
# test there are two, and test credentials
def test_sessions( self ):
result = self._read( 'hydrus_sessions' )
self.assertEqual( result, [] )
session = ( HC.LOCAL_FILE_SERVICE_IDENTIFIER, os.urandom( 32 ), HC.GetNow() + 100000 )
self._write( 'hydrus_session', *session )
result = self._read( 'hydrus_sessions' )
self.assertEqual( result, [ session ] )
#
result = self._read( 'web_sessions' )
self.assertEqual( result, [] )
session = ( 'website name', [ 'cookie 1', 'cookie 2' ], HC.GetNow() + 100000 )
self._write( 'web_session', *session )
result = self._read( 'web_sessions' )
self.assertEqual( result, [ session ] )
def test_shutdown_timestamps( self ):
result = self._read( 'shutdown_timestamps' )
self.assertEqual( type( result ), collections.defaultdict )
for ( k, v ) in result.items():
self.assertEqual( type( k ), int )
self.assertEqual( type( v ), int )

View File

@ -108,7 +108,8 @@ class TestServer( unittest.TestCase ):
self.assertEqual( data, 'file' )
os.remove( path )
try: os.remove( path )
except: pass
#
@ -124,7 +125,8 @@ class TestServer( unittest.TestCase ):
self.assertEqual( data, 'thumb' )
os.remove( path )
try: os.remove( path )
except: pass
def _test_file_repo( self, host, port ):
@ -149,7 +151,8 @@ class TestServer( unittest.TestCase ):
self.assertEqual( response, 'file' )
os.remove( path )
try: os.remove( path )
except: pass
path = HC.STATIC_DIR + os.path.sep + 'hydrus.png'
@ -191,7 +194,8 @@ class TestServer( unittest.TestCase ):
self.assertEqual( response, 'thumb' )
os.remove( path )
try: os.remove( path )
except: pass
def _test_repo( self, host, port, service_type ):
@ -254,7 +258,8 @@ class TestServer( unittest.TestCase ):
self.assertEqual( response, update )
os.remove( path )
try: os.remove( path )
except: pass
connection.Post( 'update', update = update )

View File

@ -65,7 +65,7 @@ class App( wx.App ):
return True
def AddSession( self, service_identifier, account ): return self._server_session_manager.AddSession( service_identifier, account )
def AddSession( self, service_identifier, account_identifier ): return self._server_session_manager.AddSession( service_identifier, account_identifier )
def GetAccount( self, session_key, service_identifier ): return self._server_session_manager.GetAccount( session_key, service_identifier )