Version 289

This commit is contained in:
Hydrus Network Developer 2018-01-10 16:41:51 -06:00
parent 7b6947f843
commit 2a123aa6b1
28 changed files with 1123 additions and 325 deletions

View File

@ -90,6 +90,23 @@ try:
HydrusData.Print( traceback.format_exc() )
try:
import wx
message = 'The client failed to start. The error follows (it has also been written to the log in the db directory). If it is not obvious, please inform hydrus dev.'
message += os.linesep * 2
message += traceback.format_exc()
wx.SafeShowMessage( 'hydrus client failed', message )
except:
pass
finally:
HG.view_shutdown = True

View File

@ -8,6 +8,36 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 289</h3></li>
<ul>
<li>fixed an issue where scrollbars were only appearing on taglists after a resize event</li>
<li>fixed the raw filename component of file drag and drop events from the client to external programs</li>
<li>fixed the tag lookup scripts</li>
<li>fixed some wx menuhighlight issues</li>
<li>improved some shutdown code</li>
<li>fixed the add/edit namespace colours options panel, which needed to be updated to deal with the new wx's better alpha channel reporting</li>
<li>fixed an issue when hitting 'open externally' on a media collection</li>
<li>fixed a crash on client shutdown whenever closed pages were in the undo menu</li>
<li>think I fixed another shutdown crash</li>
<li>fixed a rare issue with the collect by dropdown not being able to generate a string to display</li>
<li>misc wx fixes</li>
<li>added a 'importing' page to the options dialog, which now sets the default file import options for quiet and loud file import contexts</li>
<li>the old and no longer used 'exclude deleted files' option is now removed from the 'files and trash' panel</li>
<li>finished off default url matches for all downloaders that come with the program--these will be set on update, so if you have custom ones, please export them before you update so you can import them again!</li>
<li>improved how urls are matched and presented for the user in the media viewer</li>
<li>added a 'delete "successful" file imports from the queue' entry to the file import status button right-click menu--this only removes 'successful' and 'already in db', leaving anomalies</li>
<li>improved locale instantiation in the client and added locale strings to the help->about dialog</li>
<li>you can now set the page name prefix for a paused thread checker. it defaults to a unicode pause character: ?</li>
<li>thread watchers will no longer pause on a network error during a check--they now have a 'delay' system like subscriptions, and on a network error, they will delay four hours (unless you hit 'check now')</li>
<li>patched in some simple 'connection cutoff' network error handling, we'll see how it does</li>
<li>wrote in some more proper error handling for a specific connection cutoff error that is being produced</li>
<li>the similar files search tree regen code now clears out orphaned files. if you have had blank 'unknown' files appear in similar files searches, please run database->regen->similar files search tree</li>
<li>bitmap buttons on download pages will now update using less CPU and will flicker less</li>
<li>improved some video rendering error reporting</li>
<li>fixed the 'author name' regex favourite default, which had a superfluous asterisk. if you would like to fix it yourself, please try: [`\\]+(?=\s-)</li>
<li>added 'flush log' debug command</li>
<li>client.pyw now makes a safe 'system' ui error popup if it fails to boot</li>
</ul>
<li><h3>version 288</h3></li>
<ul>
<li>updated to wxPython Phoenix (4.0) build!</li>

View File

@ -86,22 +86,6 @@ class Controller( HydrusController.HydrusController ):
return ClientDB.DB( self, self.db_dir, 'client', no_wal = self._no_wal )
def _CreateSplash( self ):
try:
self._splash = ClientGUI.FrameSplash( self )
except:
HydrusData.Print( 'There was an error trying to start the splash screen!' )
HydrusData.Print( traceback.format_exc() )
raise
def _DestroySplash( self ):
if self._splash is not None:
@ -249,6 +233,22 @@ class Controller( HydrusController.HydrusController ):
def CreateSplash( self ):
try:
self._splash = ClientGUI.FrameSplash( self )
except:
HydrusData.Print( 'There was an error trying to start the splash screen!' )
HydrusData.Print( traceback.format_exc() )
raise
def CurrentlyIdle( self ):
if HG.force_idle_mode:
@ -350,12 +350,12 @@ class Controller( HydrusController.HydrusController ):
self.ShutdownView()
self.ShutdownModel()
HydrusData.CleanRunningFile( self.db_dir, 'client' )
else:
try:
self._CreateSplash()
idle_shutdown_action = self.options[ 'idle_shutdown' ]
if idle_shutdown_action in ( CC.IDLE_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN_ASK_FIRST ):
@ -554,6 +554,8 @@ class Controller( HydrusController.HydrusController ):
domain_manager = ClientNetworkingDomain.NetworkDomainManager()
ClientDefaults.SetDefaultDomainManagerData( domain_manager )
domain_manager._dirty = True
wx.MessageBox( 'Your domain manager was missing on boot! I have recreated a new empty one. Please check that your hard drive and client are ok and let the hydrus dev know the details if there is a mystery.' )
@ -770,7 +772,7 @@ class Controller( HydrusController.HydrusController ):
if HydrusData.TimeHasPassed( self._timestamps[ 'last_page_change' ] + 30 * 60 ):
self.pub( 'clear_closed_pages' )
self.pub( 'delete_old_closed_pages' )
self._timestamps[ 'last_page_change' ] = HydrusData.GetNow()
@ -1004,11 +1006,15 @@ class Controller( HydrusController.HydrusController ):
self._app.locale = wx.Locale( wx.LANGUAGE_DEFAULT ) # Very important to init this here and keep it non garbage collected
import locale
locale.setlocale( locale.LC_ALL, '' )
HydrusData.Print( u'booting controller\u2026' )
self.frame_icon = wx.Icon( os.path.join( HC.STATIC_DIR, 'hydrus_32_non-transparent.png' ), wx.BITMAP_TYPE_PNG )
self._CreateSplash()
self.CreateSplash()
self.CallToThreadLongRunning( self.THREADBootEverything )

View File

@ -1830,6 +1830,10 @@ class DB( HydrusDB.HydrusDB ):
self._controller.pub( 'modal_message', job_key )
job_key.SetVariable( 'popup_text_1', 'purging search info of orphans' )
self._c.execute( 'DELETE FROM shape_perceptual_hash_map WHERE hash_id NOT IN ( SELECT hash_id FROM current_files );' )
job_key.SetVariable( 'popup_text_1', 'gathering all leaves' )
self._c.execute( 'DELETE FROM shape_vptree;' )
@ -10107,6 +10111,15 @@ class DB( HydrusDB.HydrusDB ):
if version == 288:
domain_manager = self._GetJSONDump( HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_DOMAIN_MANAGER )
domain_manager.SetURLMatches( ClientDefaults.GetDefaultURLMatches() )
self._SetJSONDump( domain_manager )
self._controller.pub( 'splash_set_title_text', 'updated db to v' + str( version + 1 ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )

View File

@ -949,6 +949,7 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
self._dictionary[ 'noneable_strings' ][ 'backup_path' ] = None
self._dictionary[ 'noneable_strings' ][ 'thread_watcher_not_found_page_string' ] = '[404]'
self._dictionary[ 'noneable_strings' ][ 'thread_watcher_dead_page_string' ] = '[DEAD]'
self._dictionary[ 'noneable_strings' ][ 'thread_watcher_paused_page_string' ] = u'\u23F8'
self._dictionary[ 'strings' ] = {}
@ -971,6 +972,34 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
#
self._dictionary[ 'default_file_import_options' ] = HydrusSerialisable.SerialisableDictionary()
automatic_archive = False
exclude_deleted = True
min_size = None
min_resolution = None
present_new_files = True
present_already_in_inbox_files = False
present_archived_files = False
import ClientImporting
quiet_file_import_options = ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, present_new_files = present_new_files, present_already_in_inbox_files = present_already_in_inbox_files, present_archived_files = present_archived_files, min_size = min_size, min_resolution = min_resolution )
self._dictionary[ 'default_file_import_options' ][ 'quiet' ] = quiet_file_import_options
present_new_files = True
present_already_in_inbox_files = True
present_archived_files = True
loud_file_import_options = ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, present_new_files = present_new_files, present_already_in_inbox_files = present_already_in_inbox_files, present_archived_files = present_archived_files, min_size = min_size, min_resolution = min_resolution )
self._dictionary[ 'default_file_import_options' ][ 'loud' ] = loud_file_import_options
#
self._dictionary[ 'default_import_tag_options' ] = HydrusSerialisable.SerialisableDictionary()
#
@ -1263,6 +1292,14 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
def GetDefaultFileImportOptions( self, options_type ):
with self._lock:
return self._dictionary[ 'default_file_import_options' ][ options_type ]
def GetDefaultTagImportOptions( self, gallery_identifier = None ):
with self._lock:
@ -1651,6 +1688,14 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
def SetDefaultFileImportOptions( self, options_type, file_import_options ):
with self._lock:
self._dictionary[ 'default_file_import_options' ][ options_type ] = file_import_options
def SetFrameLocation( self, frame_key, remember_size, remember_position, last_size, last_position, default_gravity, default_position, maximised, fullscreen ):
with self._lock:

View File

@ -1,8 +1,5 @@
import ClientConstants as CC
import ClientData
import ClientImporting
import ClientNetworking
import ClientNetworkingDomain
import HydrusConstants as HC
import HydrusGlobals as HG
import HydrusNetworking
@ -11,6 +8,8 @@ import wx
def SetDefaultBandwidthManagerRules( bandwidth_manager ):
import ClientNetworking
KB = 1024
MB = 1024 ** 2
GB = 1024 ** 3
@ -98,6 +97,9 @@ def SetDefaultDomainManagerData( domain_manager ):
#
import ClientNetworking
import ClientNetworkingDomain
custom_header_dict = {}
custom_header_dict[ 'User-Agent' ] = ( 'hydrus client', ClientNetworkingDomain.VALID_APPROVED, 'This is the default User-Agent identifier for the client for all network connections.' )
@ -118,6 +120,10 @@ def SetDefaultDomainManagerData( domain_manager ):
domain_manager.SetNetworkContextsToCustomHeaderDicts( network_contexts_to_custom_header_dicts )
#
domain_manager.SetURLMatches( GetDefaultURLMatches() )
def GetClientDefaultOptions():
options = {}
@ -127,7 +133,6 @@ def GetClientDefaultOptions():
options[ 'export_path' ] = None
options[ 'hpos' ] = 400
options[ 'vpos' ] = 700
options[ 'exclude_deleted_files' ] = False
options[ 'thumbnail_cache_size' ] = 25 * 1048576
options[ 'preview_cache_size' ] = 15 * 1048576
options[ 'fullscreen_cache_size' ] = 150 * 1048576
@ -161,7 +166,7 @@ def GetClientDefaultOptions():
regex_favourites = []
regex_favourites.append( ( r'[1-9]+\d*(?=.{4}$)', u'\u2026' + r'0074.jpg -> 74' ) )
regex_favourites.append( ( r'[^' + os.path.sep.encode( 'string_escape' ) + ']+*(?=\s-)', r'E:\my collection\author name - v4c1p0074.jpg -> author name' ) )
regex_favourites.append( ( r'[^' + os.path.sep.encode( 'string_escape' ) + r']+(?=\s-)', r'E:\my collection\author name - v4c1p0074.jpg -> author name' ) )
options[ 'regex_favourites' ] = regex_favourites
@ -253,31 +258,6 @@ def GetDefaultHentaiFoundryInfo():
return info
def GetDefaultFileImportOptions( for_quiet_queue = False ):
automatic_archive = False
exclude_deleted = HG.client_controller.options[ 'exclude_deleted_files' ]
if for_quiet_queue:
present_new_files = True
present_already_in_inbox_files = False
present_archived_files = False
else:
present_new_files = True
present_already_in_inbox_files = True
present_archived_files = True
min_size = None
min_resolution = None
file_import_options = ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, present_new_files = present_new_files, present_already_in_inbox_files = present_already_in_inbox_files, present_archived_files = present_archived_files, min_size = min_size, min_resolution = min_resolution )
return file_import_options
def GetDefaultNamespacesAndSearchValue( gallery_identifier ):
site_type = gallery_identifier.GetSiteType()
@ -811,7 +791,7 @@ def GetDefaultURLMatches():
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'm' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_REGEX, match_value = '\d+\..+', example_string = '1512858563140.jpg' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_REGEX, match_value = r'\d+\..+', example_string = '1512858563140.jpg' ) )
parameters = {}
@ -834,7 +814,7 @@ def GetDefaultURLMatches():
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'tv' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'res', example_string = 'res' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_REGEX, match_value = '\d+\.html', example_string = '1002432.html' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_REGEX, match_value = r'\d+\.html', example_string = '1002432.html' ) )
parameters = {}
@ -856,7 +836,7 @@ def GetDefaultURLMatches():
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'file_store', example_string = 'file_store' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_REGEX, match_value = '[\dabcdef]+\..+', example_string = '91b6d00f8f274d4505cabe690fbcf0b896a846e35a7faa8d2ddde5214d5bdc71.jpg' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_REGEX, match_value = r'[\dabcdef]+\..+', example_string = '91b6d00f8f274d4505cabe690fbcf0b896a846e35a7faa8d2ddde5214d5bdc71.jpg' ) )
parameters = {}
example_url = 'https://media.8ch.net/file_store/91b6d00f8f274d4505cabe690fbcf0b896a846e35a7faa8d2ddde5214d5bdc71.jpg'
@ -867,29 +847,6 @@ def GetDefaultURLMatches():
#
name = 'hentai foundry artist pictures gallery page base'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'www.hentai-foundry.com'
allow_subdomains = False
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'pictures', example_string = 'pictures' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'user', example_string = 'user' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'daruak' ) )
parameters = {}
example_url = 'https://www.hentai-foundry.com/pictures/user/daruak'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'hentai foundry artist pictures gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
@ -902,8 +859,6 @@ def GetDefaultURLMatches():
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'pictures', example_string = 'pictures' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'user', example_string = 'user' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'daruak' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'page', example_string = 'page' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '2' ) )
parameters = {}
@ -915,30 +870,6 @@ def GetDefaultURLMatches():
#
name = 'hentai foundry artist scraps gallery page base'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'www.hentai-foundry.com'
allow_subdomains = False
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'pictures', example_string = 'pictures' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'user', example_string = 'user' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'Sparrow' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'scraps', example_string = 'scraps' ) )
parameters = {}
example_url = 'https://www.hentai-foundry.com/pictures/user/Sparrow/scraps'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'hentai foundry artist scraps gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
@ -952,8 +883,6 @@ def GetDefaultURLMatches():
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'user', example_string = 'user' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'Sparrow' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'scraps', example_string = 'scraps' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'page', example_string = 'page' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '3' ) )
parameters = {}
@ -965,30 +894,6 @@ def GetDefaultURLMatches():
#
name = 'hentai foundry tag search gallery page base'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'www.hentai-foundry.com'
allow_subdomains = False
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'search', example_string = 'search' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'index', example_string = 'index' ) )
parameters = {}
parameters[ 'query' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'thick_thighs' )
example_url = 'https://www.hentai-foundry.com/search/index?query=thick_thighs'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'hentai foundry tag search gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
@ -1003,9 +908,6 @@ def GetDefaultURLMatches():
parameters = {}
parameters[ 'query' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'thick_thighs' )
parameters[ 'page' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '5' )
example_url = 'https://www.hentai-foundry.com/search/index?query=thick_thighs&page=5'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
@ -1299,7 +1201,7 @@ def GetDefaultURLMatches():
parameters[ 'page' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' )
parameters[ 's' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'list', example_string = 'list' )
example_url = 'http://tbib.org/index.php?page=post&s=list&tags=alternate_hairstyle'
example_url = 'https://tbib.org/index.php?page=post&s=list&tags=alternate_hairstyle'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
@ -1324,7 +1226,7 @@ def GetDefaultURLMatches():
parameters[ 's' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'view', example_string = 'view' )
parameters[ 'id' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '6391256' )
example_url = 'http://tbib.org/index.php?page=post&s=view&id=6391256'
example_url = 'https://tbib.org/index.php?page=post&s=view&id=6391256'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
@ -1332,47 +1234,605 @@ def GetDefaultURLMatches():
#
'''
name = 'e621 gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'e621.net'
allow_subdomains = True
keep_subdomains = False
name = 'e621'
search_url = 'https://e621.net/post/index/%index%/%tags%'
path_components = []
name = 'rule34@paheal'
search_url = 'https://rule34.paheal.net/post/list/%tags%/%index%'
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'index', example_string = 'index' ) )
name = 'rule34hentai'
search_url = 'https://rule34hentai.net/post/list/%tags%/%index%'
parameters = {}
name = 'mishimmie'
search_url = 'https://shimmie.katawa-shoujo.com/post/list/%tags%/%index%'
example_url = 'https://e621.net/post/index/1/smile'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'danbooru'
search_url = 'https://danbooru.donmai.us/posts?page=%index%&tags=%tags%'
name = 'e621 file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'e621.net'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'show', example_string = 'show' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '1421754' ) )
parameters = {}
example_url = 'https://e621.net/post/show/1421754/abstract_background-animal_humanoid-blush-brown_ey'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'konachan'
search_url = 'https://konachan.com/post?page=%index%&tags=%tags%'
name = 'rule34.paheal gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'rule34.paheal.net'
allow_subdomains = True
keep_subdomains = False
name = 'yande.re'
search_url = 'https://yande.re/post?page=%index%&tags=%tags%'
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'list', example_string = 'list' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'Pyra' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '1' ) )
parameters = {}
example_url = 'http://rule34.paheal.net/post/list/Pyra/1'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'sankaku chan'
search_url = 'https://chan.sankakucomplex.com/?tags=%tags%&page=%index%'
name = 'rule34.paheal file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'rule34.paheal.net'
allow_subdomains = True
keep_subdomains = False
name = 'sankaku idol'
search_url = 'https://idol.sankakucomplex.com/?tags=%tags%&page=%index%'
path_components = []
'''
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'view', example_string = 'view' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '2442974' ) )
# Deviant Art
# Newgrounds
# pixiv
# tumblr
parameters = {}
example_url = 'http://rule34.paheal.net/post/view/2442974'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'rule34hentai gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'rule34hentai.net'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'list', example_string = 'list' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'Triss_Merigold' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '1' ) )
parameters = {}
example_url = 'https://rule34hentai.net/post/list/Triss_Merigold/1'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'rule34hentai file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'rule34hentai.net'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'view', example_string = 'view' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '289558' ) )
parameters = {}
example_url = 'https://rule34hentai.net/post/view/289558'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'mishimmie gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'shimmie.katawa-shoujo.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'list', example_string = 'list' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'hanako' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '1' ) )
parameters = {}
example_url = 'http://shimmie.katawa-shoujo.com/post/list/hanako/1'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'mishimmie file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'shimmie.katawa-shoujo.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'view', example_string = 'view' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '4565' ) )
parameters = {}
example_url = 'http://shimmie.katawa-shoujo.com/post/view/4565'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'danbooru gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'danbooru.donmai.us'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'posts', example_string = 'posts' ) )
parameters = {}
example_url = 'https://danbooru.donmai.us/posts?page=1&tags=mikasa_ackerman'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'danbooru file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'danbooru.donmai.us'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'posts', example_string = 'posts' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '2982422' ) )
parameters = {}
example_url = 'https://danbooru.donmai.us/posts/2982422'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'konachan gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'konachan.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
parameters = {}
example_url = 'https://konachan.com/post?page=1&tags=landscape'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'konachan file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'konachan.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'show', example_string = 'show' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '258390' ) )
parameters = {}
example_url = 'https://konachan.com/post/show/258390/black_hair-dress-flowers-landscape-long_hair-neckl'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'yande.re gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'yande.re'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
parameters = {}
parameters[ 'page' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '1' )
example_url = 'https://yande.re/post?page=1&tags=atago_%28azur_lane%29'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'yande.re file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'yande.re'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'show', example_string = 'show' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '428714' ) )
parameters = {}
example_url = 'https://yande.re/post/show/428714'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'sankaku chan gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'chan.sankakucomplex.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
parameters = {}
example_url = 'https://chan.sankakucomplex.com/?tags=tomboy&page=1'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'sankaku chan file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'chan.sankakucomplex.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'show', example_string = 'show' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '6586014' ) )
parameters = {}
example_url = 'https://chan.sankakucomplex.com/post/show/6586014'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'sankaku idol gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'idol.sankakucomplex.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
parameters = {}
example_url = 'https://idol.sankakucomplex.com/?tags=akagi_kuro&page=1'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'sankaku idol file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'idol.sankakucomplex.com'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'show', example_string = 'show' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '695512' ) )
parameters = {}
example_url = 'https://idol.sankakucomplex.com/post/show/695512'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'deviant art artist gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'deviantart.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'gallery', example_string = 'gallery' ) )
parameters = {}
example_url = 'https://starca.deviantart.com/gallery/?catpath=/&offset=0'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'deviant art file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'deviantart.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'art', example_string = 'art' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_ANY, example_string = 'Commission-animation-Elsa-and-Anna-541820782' ) )
parameters = {}
example_url = 'https://starca.deviantart.com/art/Commission-animation-Elsa-and-Anna-541820782'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'newgrounds games gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'newgrounds.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'games', example_string = 'games' ) )
parameters = {}
example_url = 'https://matt-likes-swords.newgrounds.com/games/'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'newgrounds movies gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'newgrounds.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'movies', example_string = 'movies' ) )
parameters = {}
example_url = 'https://sambakza.newgrounds.com/movies/'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'newgrounds file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'newgrounds.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'portal', example_string = 'portal' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'view', example_string = 'view' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '161181' ) )
parameters = {}
example_url = 'https://www.newgrounds.com/portal/view/161181'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'pixiv artist gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'pixiv.net'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'member_illust.php', example_string = 'member_illust.php' ) )
parameters = {}
parameters[ 'id' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '115625' )
example_url = 'https://www.pixiv.net/member_illust.php?id=115625&type=illust&p=1'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'pixiv file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'https'
netloc = 'pixiv.net'
allow_subdomains = True
keep_subdomains = False
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'member_illust.php', example_string = 'member_illust.php' ) )
parameters = {}
parameters[ 'illust_id' ] = ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '66476204' )
example_url = 'https://www.pixiv.net/member_illust.php?mode=medium&illust_id=66476204'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'tumblr api gallery page'
url_type = HC.URL_TYPE_GALLERY
preferred_scheme = 'https'
netloc = 'tumblr.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'api', example_string = 'api' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'read', example_string = 'read' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'json', example_string = 'json' ) )
parameters = {}
example_url = 'https://sautte-fashion.tumblr.com/api/read/json?start=0&num=50'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
#
name = 'tumblr file page'
url_type = HC.URL_TYPE_POST
preferred_scheme = 'http' # wew
netloc = 'tumblr.com'
allow_subdomains = True
keep_subdomains = True
path_components = []
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FIXED, match_value = 'post', example_string = 'post' ) )
path_components.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC, example_string = '169381609093' ) )
parameters = {}
example_url = 'http://sautte-fashion.tumblr.com/post/169381609093/favorite-looks-from-alexander-mcqueen-resort-2012'
url_match = ClientNetworkingDomain.URLMatch( name, url_type = url_type, preferred_scheme = preferred_scheme, netloc = netloc, allow_subdomains = allow_subdomains, keep_subdomains = keep_subdomains, path_components = path_components, parameters = parameters, example_url = example_url )
url_matches.append( url_match )
return url_matches

View File

@ -114,7 +114,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
self.Bind( wx.EVT_TIMER, self.TIMEREventAnimationUpdate, id = ID_TIMER_ANIMATION_UPDATE )
self._controller.sub( self, 'AddModalMessage', 'modal_message' )
self._controller.sub( self, 'ClearClosedPages', 'clear_closed_pages' )
self._controller.sub( self, 'DeleteOldClosedPages', 'delete_old_closed_pages' )
self._controller.sub( self, 'NewPageImportHDD', 'new_hdd_import' )
self._controller.sub( self, 'NewPageQuery', 'new_page_query' )
self._controller.sub( self, 'NotifyClosedPage', 'notify_closed_page' )
@ -201,6 +201,13 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
library_versions.append( ( 'wx', wx.version() ) )
library_versions.append( ( 'temp dir', HydrusPaths.tempfile.gettempdir() ) )
import locale
l_string = locale.getlocale()[0]
wxl_string = self._controller._app.locale.GetCanonicalName()
library_versions.append( ( 'locale strings', HydrusData.ToUnicode( ( l_string, wxl_string ) ) ) )
description = 'This client is the media management application of the hydrus software suite.'
description += os.linesep * 2 + os.linesep.join( ( lib + ': ' + version for ( lib, version ) in library_versions ) )
@ -1578,6 +1585,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
ClientGUIMenus.AppendMenuItem( self, debug, 'make some popups', 'Throw some varied popups at the message manager, just to check it is working.', self._DebugMakeSomePopups )
ClientGUIMenus.AppendMenuItem( self, debug, 'make a popup in five seconds', 'Throw a delayed popup at the message manager, giving you time to minimise or otherwise alter the client before it arrives.', ClientThreading.CallLater, self, 5, HydrusData.ShowText, 'This is a delayed popup message.' )
ClientGUIMenus.AppendMenuItem( self, debug, 'force a gui layout now', 'Tell the gui to relayout--useful to test some gui bootup layout issues.', self.Layout )
ClientGUIMenus.AppendMenuItem( self, debug, 'flush log', 'Command the log to write any buffered contents to hard drive.', HydrusData.DebugPrint, 'Flushing log' )
ClientGUIMenus.AppendMenuItem( self, debug, 'print garbage', 'Print some information about the python garbage to the log.', self._DebugPrintGarbage )
ClientGUIMenus.AppendMenuItem( self, debug, 'clear image rendering cache', 'Tell the image rendering system to forget all current images. This will often free up a bunch of memory immediately.', self._controller.ClearCaches )
ClientGUIMenus.AppendMenuItem( self, debug, 'clear db service info cache', 'Delete all cached service info like total number of mappings or files, in case it has become desynchronised. Some parts of the gui may be laggy immediately after this as these numbers are recalculated.', self._DeleteServiceInfo )
@ -3040,7 +3048,31 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def ClearClosedPages( self ):
def CurrentlyBusy( self ):
return False
def DeleteAllClosedPages( self ):
with self._lock:
deletee_pages = [ page for ( time_closed, page ) in self._closed_pages ]
self._closed_pages = []
if len( deletee_pages ) > 0:
self._DestroyPages( deletee_pages )
self._focus_holder.SetFocus()
self._controller.pub( 'notify_new_undo' )
def DeleteOldClosedPages( self ):
new_closed_pages = []
@ -3077,27 +3109,6 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
self._DestroyPages( deletee_pages )
def CurrentlyBusy( self ):
return False
def DeleteAllClosedPages( self ):
with self._lock:
deletee_pages = [ page for ( time_closed, page ) in self._closed_pages ]
self._closed_pages = []
self._DestroyPages( deletee_pages )
self._focus_holder.SetFocus()
self._controller.pub( 'notify_new_undo' )
def EventCharHook( self, event ):
if ClientGUIShortcuts.IShouldCatchCharHook( self ):
@ -3254,6 +3265,8 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def Exit( self, restart = False ):
# the return value here is 'exit allowed'
if not HG.emergency_exit:
if HC.options[ 'confirm_client_exit' ]:
@ -3303,6 +3316,8 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
self._DestroyTimers()
self.DeleteAllClosedPages() # wx crashes if any are left in here, wew
self._message_manager.CleanBeforeDestroy()
self._message_manager.Hide()
@ -3334,10 +3349,16 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
if HG.emergency_exit:
self.Destroy()
self._controller.Exit()
else:
ClientThreading.CallLater( self, 2, self.Destroy )
self._controller.CreateSplash()
wx.CallAfter( self._controller.Exit )

View File

@ -693,6 +693,22 @@ class AnimationBar( wx.Window ):
self.Bind( wx.EVT_ERASE_BACKGROUND, self.EventEraseBackground )
def _GetAnimationBarStatus( self ):
if FLASHWIN_OK and isinstance( self._media_window, wx.lib.flashwin.FlashWindow ):
current_frame = self._media_window.CurrentFrame()
paused = False
buffer_indices = None
return ( current_frame, paused, buffer_indices )
else:
return self._media_window.GetAnimationBarStatus()
def _GetXFromFrameIndex( self, index, width_offset = 0 ):
if self._num_frames < 2:
@ -707,7 +723,7 @@ class AnimationBar( wx.Window ):
def _Redraw( self, dc ):
self._last_drawn_info = self._media_window.GetAnimationBarStatus()
self._last_drawn_info = self._GetAnimationBarStatus()
( current_frame_index, paused, buffer_indices ) = self._last_drawn_info
@ -961,7 +977,7 @@ class AnimationBar( wx.Window ):
raise
if self._last_drawn_info != self._media_window.GetAnimationBarStatus():
if self._last_drawn_info != self._GetAnimationBarStatus():
self._dirty = True
@ -4883,7 +4899,6 @@ class MediaContainer( wx.Window ):
media_window.Hide()
media_window.Destroy()
#ClientThreading.CallLater( self, 0.05, media_window.Destroy )

View File

@ -142,6 +142,24 @@ def NotebookScreenToHitTest( notebook, screen_position ):
return notebook.HitTest( position )
def SetBitmapButtonBitmap( button, bitmap ):
# the button's bitmap, retrieved via GetBitmap, is not the same as the one we gave it!
# hence testing bitmap vs that won't work to save time on an update loop, so we'll just save it here custom
# this isn't a big memory deal for our purposes since they are small and mostly if not all from the GlobalBMPs library so shared anyway
if hasattr( button, 'last_bitmap' ):
if button.last_bitmap == bitmap:
return
button.SetBitmap( bitmap )
button.last_bitmap = bitmap
def TLPHasFocus( window ):
focus_tlp = GetFocusTLP()
@ -495,6 +513,8 @@ class CheckboxCollect( wx.ComboCtrl ):
self._initial_collect_by = collect_by
self._control = None
def Create( self, parent ):
@ -513,6 +533,21 @@ class CheckboxCollect( wx.ComboCtrl ):
return self._control
def GetStringValue( self ):
# this is an abstract method that provides the strin to put in the comboctrl
# I've never used/needed it, but one user reported getting the NotImplemented thing by repeatedly clicking, so let's add it anyway
if self._control is None:
return 'initialising'
else:
return self._control.GetDescription()
class _Control( wx.CheckListBox ):
def __init__( self, parent, special_parent, collect_by ):
@ -608,6 +643,13 @@ class CheckboxCollect( wx.ComboCtrl ):
self._BroadcastCollect()
def GetDescription( self ):
( collect_by, description ) = self._GetValues()
return description
def SetValue( self, collect_by ):
# an old possible value, now collapsed to []

View File

@ -810,7 +810,7 @@ class FrameInputLocalFiles( wx.Frame ):
self._progress_cancel = ClientGUICommon.BetterBitmapButton( self, CC.GlobalBMPs.stop, self.StopProgress )
self._progress_cancel.Disable()
file_import_options = ClientDefaults.GetDefaultFileImportOptions()
file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' )
self._file_import_options = ClientGUIImport.FileImportOptionsButton( self, file_import_options )
@ -1064,14 +1064,14 @@ class FrameInputLocalFiles( wx.Frame ):
self._add_button.Enable()
self._tag_button.Enable()
self._progress_pause.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._progress_pause, CC.GlobalBMPs.play )
else:
self._add_button.Disable()
self._tag_button.Disable()
self._progress_pause.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._progress_pause, CC.GlobalBMPs.pause )

View File

@ -333,12 +333,12 @@ class FullscreenHoverFrameTop( FullscreenHoverFrame ):
if self._current_media.HasInbox():
self._archive_button.SetBitmapLabel( CC.GlobalBMPs.archive )
ClientGUICommon.SetBitmapButtonBitmap( self._archive_button, CC.GlobalBMPs.archive )
self._archive_button.SetToolTip( 'archive' )
else:
self._archive_button.SetBitmapLabel( CC.GlobalBMPs.to_inbox )
ClientGUICommon.SetBitmapButtonBitmap( self._archive_button, CC.GlobalBMPs.to_inbox )
self._archive_button.SetToolTip( 'return to inbox' )
@ -535,7 +535,7 @@ class FullscreenHoverFrameTopArchiveDeleteFilter( FullscreenHoverFrameTop ):
def _ResetArchiveButton( self ):
self._archive_button.SetBitmapLabel( CC.GlobalBMPs.archive )
ClientGUICommon.SetBitmapButtonBitmap( self._archive_button, CC.GlobalBMPs.archive )
self._archive_button.SetToolTip( 'archive' )

View File

@ -387,6 +387,8 @@ class ListBox( wx.ScrolledWindow ):
def _DataHasChanged( self ):
self._SetVirtualSize()
self._SetDirty()
wx.PostEvent( self.GetEventHandler(), ListBoxEvent( -1 ) )
@ -730,6 +732,18 @@ class ListBox( wx.ScrolledWindow ):
self.Refresh()
def _SetVirtualSize( self ):
( my_x, my_y ) = self.GetClientSize()
ideal_virtual_size = ( my_x, max( self._text_y * len( self._ordered_terms ), my_y ) )
if ideal_virtual_size != self.GetVirtualSize():
self.SetVirtualSize( ideal_virtual_size )
def _SortByText( self ):
def lexicographic_key( term ):
@ -849,12 +863,7 @@ class ListBox( wx.ScrolledWindow ):
self._num_rows_per_page = my_y / self._text_y
ideal_virtual_size = ( my_x, max( self._text_y * len( self._ordered_terms ), my_y ) )
if ideal_virtual_size != self.GetVirtualSize():
self.SetVirtualSize( ideal_virtual_size )
self._SetVirtualSize()
self._SetDirty()
@ -1765,7 +1774,9 @@ class ListBoxTagsColourOptions( ListBoxTags ):
def SetNamespaceColour( self, namespace, colour ):
colour = tuple( colour )
( r, g, b, a ) = colour.Get()
colour_tuple = ( r, g, b )
for ( existing_namespace, existing_colour ) in self._terms:
@ -1777,7 +1788,7 @@ class ListBoxTagsColourOptions( ListBoxTags ):
self._AppendTerm( ( namespace, colour ) )
self._AppendTerm( ( namespace, colour_tuple ) )
self._SortByText()

View File

@ -1109,9 +1109,9 @@ class ManagementPanelDuplicateFilter( ManagementPanel ):
self._cog_button.Enable()
self._phashes_button.SetBitmap( CC.GlobalBMPs.play )
self._branches_button.SetBitmap( CC.GlobalBMPs.play )
self._search_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._phashes_button, CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._branches_button, CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._search_button, CC.GlobalBMPs.play )
total_num_files = max( num_phashes_to_regen, sum( searched_distances_to_count.values() ) )
@ -1466,32 +1466,20 @@ class ManagementPanelImporterGallery( ManagementPanelImporter ):
if files_paused:
if self._files_pause_button.GetBitmap() != CC.GlobalBMPs.play:
self._files_pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._files_pause_button, CC.GlobalBMPs.play )
else:
if self._files_pause_button.GetBitmap() != CC.GlobalBMPs.pause:
self._files_pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._files_pause_button, CC.GlobalBMPs.pause )
if gallery_paused:
if self._gallery_pause_button.GetBitmap() != CC.GlobalBMPs.play:
self._gallery_pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._gallery_pause_button, CC.GlobalBMPs.play )
else:
if self._gallery_pause_button.GetBitmap() != CC.GlobalBMPs.pause:
self._gallery_pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._gallery_pause_button, CC.GlobalBMPs.pause )
if gallery_cancellable:
@ -1671,17 +1659,11 @@ class ManagementPanelImporterHDD( ManagementPanelImporter ):
if paused:
if self._pause_button.GetBitmap() != CC.GlobalBMPs.play:
self._pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_button, CC.GlobalBMPs.play )
else:
if self._pause_button.GetBitmap() != CC.GlobalBMPs.pause:
self._pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_button, CC.GlobalBMPs.pause )
if paused:
@ -1905,32 +1887,20 @@ class ManagementPanelImporterPageOfImages( ManagementPanelImporter ):
if queue_paused:
if self._pause_queue_button.GetBitmap() != CC.GlobalBMPs.play:
self._pause_queue_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_queue_button, CC.GlobalBMPs.play )
else:
if self._pause_queue_button.GetBitmap() != CC.GlobalBMPs.pause:
self._pause_queue_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_queue_button, CC.GlobalBMPs.pause )
if files_paused:
if self._pause_files_button.GetBitmap() != CC.GlobalBMPs.play:
self._pause_files_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_files_button, CC.GlobalBMPs.play )
else:
if self._pause_files_button.GetBitmap() != CC.GlobalBMPs.pause:
self._pause_files_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_files_button, CC.GlobalBMPs.pause )
@ -2076,8 +2046,10 @@ class ManagementPanelImporterThreadWatcher( ManagementPanelImporter ):
( thread_url, file_import_options, tag_import_options ) = self._thread_watcher_import.GetOptions()
( namespaces, search_value ) = ClientDefaults.GetDefaultNamespacesAndSearchValue( ClientDownloading.GalleryIdentifier( HC.SITE_TYPE_THREAD_WATCHER ) )
self._file_import_options = ClientGUIImport.FileImportOptionsButton( self._thread_watcher_panel, file_import_options, self._thread_watcher_import.SetFileImportOptions )
self._tag_import_options = ClientGUIImport.TagImportOptionsButton( self._thread_watcher_panel, [ 'filename' ], tag_import_options, self._thread_watcher_import.SetTagImportOptions )
self._tag_import_options = ClientGUIImport.TagImportOptionsButton( self._thread_watcher_panel, namespaces, tag_import_options, self._thread_watcher_import.SetTagImportOptions )
#
@ -2208,17 +2180,11 @@ class ManagementPanelImporterThreadWatcher( ManagementPanelImporter ):
current_action = 'pausing, ' + current_action
if self._files_pause_button.GetBitmap() != CC.GlobalBMPs.play:
self._files_pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._files_pause_button, CC.GlobalBMPs.play )
else:
if self._files_pause_button.GetBitmap() != CC.GlobalBMPs.pause:
self._files_pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._files_pause_button, CC.GlobalBMPs.pause )
self._current_action.SetLabelText( current_action )
@ -2232,10 +2198,7 @@ class ManagementPanelImporterThreadWatcher( ManagementPanelImporter ):
watcher_status = 'paused'
if self._thread_pause_button.GetBitmap() != CC.GlobalBMPs.play:
self._thread_pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._thread_pause_button, CC.GlobalBMPs.play )
else:
@ -2244,10 +2207,7 @@ class ManagementPanelImporterThreadWatcher( ManagementPanelImporter ):
watcher_status = 'next check ' + HydrusData.ConvertTimestampToPrettyPending( next_check_time )
if self._thread_pause_button.GetBitmap() != CC.GlobalBMPs.pause:
self._thread_pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._thread_pause_button, CC.GlobalBMPs.pause )
self._watcher_status.SetLabelText( watcher_status )
@ -2481,17 +2441,11 @@ class ManagementPanelImporterURLs( ManagementPanelImporter ):
if paused:
if self._pause_button.GetBitmap() != CC.GlobalBMPs.play:
self._pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_button, CC.GlobalBMPs.play )
else:
if self._pause_button.GetBitmap() != CC.GlobalBMPs.pause:
self._pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_button, CC.GlobalBMPs.pause )

View File

@ -926,10 +926,12 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
if self._focussed_media is not None:
if self._focussed_media.GetLocationsManager().IsLocal():
open_externally_media = self._focussed_media.GetDisplayMedia()
if open_externally_media.GetLocationsManager().IsLocal():
hash = self._focussed_media.GetHash()
mime = self._focussed_media.GetMime()
hash = open_externally_media.GetHash()
mime = open_externally_media.GetMime()
client_files_manager = HG.client_controller.client_files_manager
@ -2508,8 +2510,6 @@ class MediaPanelThumbnails( MediaPanel ):
original_paths.append( original_path )
original_paths = []
#
do_temp_dnd = False

View File

@ -2793,7 +2793,7 @@ class ScriptManagementControl( wx.Panel ):
def TIMERUIUpdate( self, event ):
def TIMERUIUpdate( self ):
with self._lock:

View File

@ -244,11 +244,11 @@ class PopupMessage( PopupWindow ):
if self._job_key.IsPaused():
self._pause_button.SetBitmap( CC.GlobalBMPs.play )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_button, CC.GlobalBMPs.play )
else:
self._pause_button.SetBitmap( CC.GlobalBMPs.pause )
ClientGUICommon.SetBitmapButtonBitmap( self._pause_button, CC.GlobalBMPs.pause )

View File

@ -3324,11 +3324,12 @@ class EditURLMatchesPanel( ClientGUIScrolledPanels.EditPanel ):
self._list_ctrl_panel.AddSeparator()
self._list_ctrl_panel.AddButton( 'add the examples', self._AddExamples )
self._list_ctrl.Sort( 0 )
#
self._list_ctrl.AddDatas( url_matches )
self._list_ctrl.Sort( 0 )
#
vbox = wx.BoxSizer( wx.VERTICAL )

View File

@ -1293,6 +1293,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._listbook.AddPage( 'regex favourites', 'regex favourites', self._RegexPanel( self._listbook ) )
self._listbook.AddPage( 'sort/collect', 'sort/collect', self._SortCollectPanel( self._listbook ) )
self._listbook.AddPage( 'downloading', 'downloading', self._DownloadingPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'importing', 'importing', self._ImportingPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tags', 'tags', self._TagsPanel( self._listbook, self._new_options ) )
#
@ -1661,6 +1662,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._thread_watcher_not_found_page_string = ClientGUICommon.NoneableTextCtrl( thread_checker, none_phrase = 'do not show' )
self._thread_watcher_dead_page_string = ClientGUICommon.NoneableTextCtrl( thread_checker, none_phrase = 'do not show' )
self._thread_watcher_paused_page_string = ClientGUICommon.NoneableTextCtrl( thread_checker, none_phrase = 'do not show' )
checker_options = self._new_options.GetDefaultThreadCheckerOptions()
@ -1676,6 +1678,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._thread_watcher_not_found_page_string.SetValue( self._new_options.GetNoneableString( 'thread_watcher_not_found_page_string' ) )
self._thread_watcher_dead_page_string.SetValue( self._new_options.GetNoneableString( 'thread_watcher_dead_page_string' ) )
self._thread_watcher_paused_page_string.SetValue( self._new_options.GetNoneableString( 'thread_watcher_paused_page_string' ) )
#
@ -1697,8 +1700,9 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
rows = []
rows.append( ( 'Permit thread checkers to name their own pages:', self._permit_watchers_to_name_their_pages ) )
rows.append( ( 'Prepend thread checker page names with this on 404:', self._thread_watcher_not_found_page_string ) )
rows.append( ( 'Prepend thread checker page names with this on death:', self._thread_watcher_dead_page_string ) )
rows.append( ( 'Prepend 404 thread checker page names with this:', self._thread_watcher_not_found_page_string ) )
rows.append( ( 'Prepend dead thread checker page names with this:', self._thread_watcher_dead_page_string ) )
rows.append( ( 'Prepend paused thread checker page names with this:', self._thread_watcher_paused_page_string ) )
gridbox = ClientGUICommon.WrapInGrid( thread_checker, rows )
@ -1728,6 +1732,56 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._new_options.SetNoneableString( 'thread_watcher_not_found_page_string', self._thread_watcher_not_found_page_string.GetValue() )
self._new_options.SetNoneableString( 'thread_watcher_dead_page_string', self._thread_watcher_dead_page_string.GetValue() )
self._new_options.SetNoneableString( 'thread_watcher_paused_page_string', self._thread_watcher_paused_page_string.GetValue() )
class _ImportingPanel( wx.Panel ):
def __init__( self, parent, new_options ):
wx.Panel.__init__( self, parent )
self._new_options = new_options
#
default_fios = ClientGUICommon.StaticBox( self, 'default file import options' )
import ClientGUIImport
quiet_file_import_options = self._new_options.GetDefaultFileImportOptions( 'quiet' )
self._quiet_fios = ClientGUIImport.FileImportOptionsButton( default_fios, quiet_file_import_options )
loud_file_import_options = self._new_options.GetDefaultFileImportOptions( 'loud' )
self._loud_fios = ClientGUIImport.FileImportOptionsButton( default_fios, loud_file_import_options )
#
rows = []
rows.append( ( 'For \'quiet\' import contexts like import folders and subscriptions:', self._quiet_fios ) )
rows.append( ( 'For import contexts that work on pages:', self._loud_fios ) )
gridbox = ClientGUICommon.WrapInGrid( default_fios, rows )
default_fios.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( default_fios, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
def UpdateOptions( self ):
self._new_options.SetDefaultFileImportOptions( 'quiet', self._quiet_fios.GetValue() )
self._new_options.SetDefaultFileImportOptions( 'loud', self._loud_fios.GetValue() )
@ -2150,7 +2204,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._export_location = wx.DirPickerCtrl( self, style = wx.DIRP_USE_TEXTCTRL )
self._delete_to_recycle_bin = wx.CheckBox( self, label = '' )
self._exclude_deleted_files = wx.CheckBox( self, label = '' )
self._remove_filtered_files = wx.CheckBox( self, label = '' )
self._remove_trashed_files = wx.CheckBox( self, label = '' )
@ -2175,7 +2228,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
self._delete_to_recycle_bin.SetValue( HC.options[ 'delete_to_recycle_bin' ] )
self._exclude_deleted_files.SetValue( HC.options[ 'exclude_deleted_files' ] )
self._remove_filtered_files.SetValue( HC.options[ 'remove_filtered_files' ] )
self._remove_trashed_files.SetValue( HC.options[ 'remove_trashed_files' ] )
self._trash_max_age.SetValue( HC.options[ 'trash_max_age' ] )
@ -2200,7 +2252,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
rows.append( ( 'Default export directory: ', self._export_location ) )
rows.append( ( 'When deleting files or folders, send them to the OS\'s recycle bin: ', self._delete_to_recycle_bin ) )
rows.append( ( 'By default, do not reimport files that have been previously deleted: ', self._exclude_deleted_files ) )
rows.append( ( 'Remove files from view when they are filtered: ', self._remove_filtered_files ) )
rows.append( ( 'Remove files from view when they are sent to the trash: ', self._remove_trashed_files ) )
rows.append( ( 'Number of hours a file can be in the trash before being deleted: ', self._trash_max_age ) )
@ -2292,7 +2343,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
HC.options[ 'export_path' ] = HydrusPaths.ConvertAbsPathToPortablePath( HydrusData.ToUnicode( self._export_location.GetPath() ) )
HC.options[ 'delete_to_recycle_bin' ] = self._delete_to_recycle_bin.GetValue()
HC.options[ 'exclude_deleted_files' ] = self._exclude_deleted_files.GetValue()
HC.options[ 'remove_filtered_files' ] = self._remove_filtered_files.GetValue()
HC.options[ 'remove_trashed_files' ] = self._remove_trashed_files.GetValue()
HC.options[ 'trash_max_age' ] = self._trash_max_age.GetValue()

View File

@ -223,7 +223,7 @@ class EditSeedCachePanel( ClientGUIScrolledPanels.EditPanel ):
ClientGUIMenus.AppendMenuItem( self, menu, 'try again', 'Reset the progress of all the selected imports.', HydrusData.Call( self._SetSelected, CC.STATUS_UNKNOWN ) )
ClientGUIMenus.AppendMenuItem( self, menu, 'skip', 'Skip all the selected imports.', HydrusData.Call( self._SetSelected, CC.STATUS_SKIPPED ) )
ClientGUIMenus.AppendMenuItem( self, menu, 'delete', 'Remove all the selected imports.', self._DeleteSelected )
ClientGUIMenus.AppendMenuItem( self, menu, 'delete from list', 'Remove all the selected imports.', self._DeleteSelected )
HG.client_controller.PopupMenu( self, menu )
@ -311,7 +311,7 @@ class SeedCacheButton( ClientGUICommon.BetterBitmapButton ):
def _ClearProcessed( self ):
message = 'Are you sure you want to delete all the processed (i.e. anything with a non-blank status in the larger window) files? This is useful for cleaning up and de-laggifying a very large list, but not much else.'
message = 'Are you sure you want to delete all the processed (i.e. anything with a non-blank status in the larger window) file imports? This is useful for cleaning up and de-laggifying a very large list, but not much else.'
with ClientGUIDialogs.DialogYesNo( self, message ) as dlg:
@ -324,6 +324,21 @@ class SeedCacheButton( ClientGUICommon.BetterBitmapButton ):
def _ClearSuccessful( self ):
message = 'Are you sure you want to delete all the successful/already in db file imports? This is useful for cleaning up and de-laggifying a very large list and leaving only failed and otherwise skipped entries.'
with ClientGUIDialogs.DialogYesNo( self, message ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
seed_cache = self._seed_cache_get_callable()
seed_cache.RemoveSuccessfulSeeds()
def _GetExportableSourcesString( self ):
seed_cache = self._seed_cache_get_callable()
@ -502,11 +517,18 @@ class SeedCacheButton( ClientGUICommon.BetterBitmapButton ):
num_unknown = seed_cache.GetSeedCount( CC.STATUS_UNKNOWN )
num_successful = seed_cache.GetSeedCount( CC.STATUS_SUCCESSFUL ) + seed_cache.GetSeedCount( CC.STATUS_REDUNDANT )
if num_successful > 0:
ClientGUIMenus.AppendMenuItem( self, menu, 'delete ' + HydrusData.ConvertIntToPrettyString( num_successful ) + ' \'successful\' file imports from the queue', 'Tell this cache to clear out successful/already in db files, reducing the size of the queue.', self._ClearSuccessful )
num_processed = len( seed_cache ) - num_unknown
if num_processed > 0:
ClientGUIMenus.AppendMenuItem( self, menu, 'delete ' + HydrusData.ConvertIntToPrettyString( num_processed ) + ' \'processed\' files from the queue', 'Tell this cache to clear out processed files, reducing the size of the queue.', self._ClearProcessed )
ClientGUIMenus.AppendMenuItem( self, menu, 'delete ' + HydrusData.ConvertIntToPrettyString( num_processed ) + ' \'processed\' file imports from the queue', 'Tell this cache to clear out processed files, reducing the size of the queue.', self._ClearProcessed )
ClientGUIMenus.AppendSeparator( menu )

View File

@ -336,14 +336,17 @@ class NewDialog( wx.Dialog ):
def EventMenuHighlight( self, event ):
status_bar = HG.client_controller.GetGUI().GetStatusBar()
if len( self._menu_stack ) > 0:
text = ''
menu = self._menu_stack[-1]
if not menu:
return
if menu is not None:
menu_item = menu.FindItemById( event.GetMenuId() )
@ -354,6 +357,8 @@ class NewDialog( wx.Dialog ):
status_bar = HG.client_controller.GetGUI().GetStatusBar()
status_bar.SetStatusText( text )
@ -706,14 +711,17 @@ class Frame( wx.Frame ):
def EventMenuHighlight( self, event ):
status_bar = HG.client_controller.GetGUI().GetStatusBar()
if len( self._menu_stack ) > 0:
text = ''
menu = self._menu_stack[-1]
if not menu:
return
if bool( menu ) and menu is not None:
menu_item = menu.FindItemById( event.GetMenuId() )
@ -724,6 +732,8 @@ class Frame( wx.Frame ):
status_bar = HG.client_controller.GetGUI().GetStatusBar()
status_bar.SetStatusText( text )

View File

@ -245,7 +245,7 @@ class FileImportJob( object ):
if file_import_options is None:
file_import_options = ClientDefaults.GetDefaultFileImportOptions()
file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' )
self._temp_path = temp_path
@ -441,7 +441,7 @@ class GalleryImport( HydrusSerialisable.SerialisableBase ):
self._gallery_paused = False
self._files_paused = False
self._file_import_options = ClientDefaults.GetDefaultFileImportOptions()
self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' )
self._tag_import_options = new_options.GetDefaultTagImportOptions( self._gallery_identifier )
@ -1896,14 +1896,12 @@ class ImportFolder( HydrusSerialisable.SerialisableBaseNamed ):
if file_import_options is None:
file_import_options = ClientDefaults.GetDefaultFileImportOptions( for_quiet_queue = True )
file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'quiet' )
if tag_import_options is None:
new_options = HG.client_controller.new_options
tag_import_options = new_options.GetDefaultTagImportOptions( ClientDownloading.GalleryIdentifier( HC.SITE_TYPE_DEFAULT ) )
tag_import_options = HG.client_controller.new_options.GetDefaultTagImportOptions( ClientDownloading.GalleryIdentifier( HC.SITE_TYPE_DEFAULT ) )
if tag_service_keys_to_filename_tagging_options is None:
@ -2413,7 +2411,7 @@ class PageOfImagesImport( HydrusSerialisable.SerialisableBase ):
HydrusSerialisable.SerialisableBase.__init__( self )
file_import_options = ClientDefaults.GetDefaultFileImportOptions()
file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' )
self._pending_page_urls = []
self._urls_cache = SeedCache()
@ -3798,6 +3796,16 @@ class SeedCache( HydrusSerialisable.SerialisableBase ):
self.RemoveSeeds( seeds_to_delete )
def RemoveSuccessfulSeeds( self ):
with self._lock:
seeds_to_delete = [ seed for seed in self._seeds if seed.status in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ) ]
self.RemoveSeeds( seeds_to_delete )
def RetryFailures( self ):
with self._lock:
@ -3863,7 +3871,7 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
self._periodic_file_limit = 50
self._paused = False
self._file_import_options = ClientDefaults.GetDefaultFileImportOptions( for_quiet_queue = True )
self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'quiet' )
new_options = HG.client_controller.new_options
@ -5154,7 +5162,7 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_THREAD_WATCHER_IMPORT
SERIALISABLE_NAME = 'Thread Watcher'
SERIALISABLE_VERSION = 3
SERIALISABLE_VERSION = 4
MIN_CHECK_PERIOD = 30
@ -5162,7 +5170,7 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
HydrusSerialisable.SerialisableBase.__init__( self )
file_import_options = ClientDefaults.GetDefaultFileImportOptions()
file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' )
new_options = HG.client_controller.new_options
@ -5190,6 +5198,9 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
self._files_paused = False
self._thread_paused = False
self._no_work_until = 0
self._no_work_until_reason = ''
self._file_velocity_status = ''
self._current_action = ''
self._watcher_status = ''
@ -5313,6 +5324,14 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
watcher_status = ''
except HydrusExceptions.NetworkException as e:
self._DelayWork( 4 * 3600, 'Network problem: ' + HydrusData.ToUnicode( e ) )
watcher_status = ''
HydrusData.PrintException( e )
except Exception as e:
error_occurred = True
@ -5361,6 +5380,12 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
def _DelayWork( self, time_delta, reason ):
self._no_work_until = HydrusData.GetNow() + time_delta
self._no_work_until_reason = reason
def _GetSerialisableInfo( self ):
serialisable_url_cache = self._urls_cache.GetSerialisableTuple()
@ -5368,7 +5393,7 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
serialisable_file_options = self._file_import_options.GetSerialisableTuple()
serialisable_tag_options = self._tag_import_options.GetSerialisableTuple()
return ( self._thread_url, serialisable_url_cache, self._urls_to_filenames, self._urls_to_md5_base64, serialisable_checker_options, serialisable_file_options, serialisable_tag_options, self._last_check_time, self._files_paused, self._thread_paused, self._thread_status, self._thread_subject )
return ( self._thread_url, serialisable_url_cache, self._urls_to_filenames, self._urls_to_md5_base64, serialisable_checker_options, serialisable_file_options, serialisable_tag_options, self._last_check_time, self._files_paused, self._thread_paused, self._thread_status, self._thread_subject, self._no_work_until, self._no_work_until_reason )
def _HasThread( self ):
@ -5413,6 +5438,15 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
page_name = thread_watcher_dead_page_string + ' ' + page_name
elif self._thread_paused:
thread_watcher_paused_page_string = new_options.GetNoneableString( 'thread_watcher_paused_page_string' )
if thread_watcher_paused_page_string is not None:
page_name = thread_watcher_paused_page_string + ' ' + page_name
if page_name != self._last_pubbed_page_name:
@ -5425,7 +5459,7 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( self._thread_url, serialisable_url_cache, self._urls_to_filenames, self._urls_to_md5_base64, serialisable_checker_options, serialisable_file_options, serialisable_tag_options, self._last_check_time, self._files_paused, self._thread_paused, self._thread_status, self._thread_subject ) = serialisable_info
( self._thread_url, serialisable_url_cache, self._urls_to_filenames, self._urls_to_md5_base64, serialisable_checker_options, serialisable_file_options, serialisable_tag_options, self._last_check_time, self._files_paused, self._thread_paused, self._thread_status, self._thread_subject, self._no_work_until, self._no_work_until_reason ) = serialisable_info
self._urls_cache = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_url_cache )
self._checker_options = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_checker_options )
@ -5446,17 +5480,24 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
else:
if self._thread_status != CHECKER_STATUS_404:
if not HydrusData.TimeHasPassed( self._no_work_until ):
if self._checker_options.IsDead( self._urls_cache, self._last_check_time ):
self._next_check_time = self._no_work_until + 1
else:
if self._thread_status != CHECKER_STATUS_404:
self._thread_status = CHECKER_STATUS_DEAD
self._thread_paused = True
if self._checker_options.IsDead( self._urls_cache, self._last_check_time ):
self._thread_status = CHECKER_STATUS_DEAD
self._thread_paused = True
self._next_check_time = self._checker_options.GetNextCheckTime( self._urls_cache, self._last_check_time )
self._next_check_time = self._checker_options.GetNextCheckTime( self._urls_cache, self._last_check_time )
@ -5490,6 +5531,18 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
return ( 3, new_serialisable_info )
if version == 3:
( thread_url, serialisable_url_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_options, serialisable_tag_options, last_check_time, files_paused, thread_paused, thread_status, thread_subject ) = old_serialisable_info
no_work_until = 0
no_work_until_reason = ''
new_serialisable_info = ( thread_url, serialisable_url_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_options, serialisable_tag_options, last_check_time, files_paused, thread_paused, thread_status, thread_subject, no_work_until, no_work_until_reason )
return ( 4, new_serialisable_info )
def _WorkOnFiles( self, page_key ):
@ -5736,8 +5789,9 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
able_to_check = self._HasThread() and not self._thread_paused
check_due = HydrusData.TimeHasPassed( self._next_check_time )
no_delays = HydrusData.TimeHasPassed( self._no_work_until )
time_to_check = able_to_check and check_due
time_to_check = able_to_check and check_due and no_delays
if not time_to_check or HG.client_controller.PageClosedButNotDestroyed( page_key ):
@ -5779,6 +5833,9 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
self._thread_paused = False
self._no_work_until = 0
self._no_work_until_reason = ''
self._thread_status = CHECKER_STATUS_OK
self._UpdateNextCheckTime()
@ -5814,7 +5871,11 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
with self._lock:
if self._thread_status == CHECKER_STATUS_404:
if not HydrusData.TimeHasPassed( self._no_work_until ):
watcher_status = self._no_work_until_reason + ' - ' + 'next check ' + HydrusData.ConvertTimestampToPrettyPending( self._next_check_time )
elif self._thread_status == CHECKER_STATUS_404:
watcher_status = 'Thread 404'
@ -5958,7 +6019,7 @@ class URLsImport( HydrusSerialisable.SerialisableBase ):
HydrusSerialisable.SerialisableBase.__init__( self )
file_import_options = ClientDefaults.GetDefaultFileImportOptions()
file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' )
self._urls_cache = SeedCache()
self._file_import_options = file_import_options

View File

@ -1200,6 +1200,11 @@ class NetworkJob( object ):
if self._num_bytes_to_read is not None and self._num_bytes_read < self._num_bytes_to_read * 0.8:
raise HydrusExceptions.NetworkException( 'Did not read enough data! Was expecting ' + HydrusData.ConvertIntToBytes( self._num_bytes_to_read ) + ' but only got ' + HydrusData.ConvertIntToBytes( self._num_bytes_read ) + '.' )
def _ReportDataUsed( self, num_bytes ):
@ -1616,6 +1621,22 @@ class NetworkJob( object ):
request_completed = True
except requests.exceptions.ChunkedEncodingError:
self._current_connection_attempt_number += 1
if not self._CanReattemptRequest():
raise HydrusExceptions.ConnectionException( 'Unable to complete request--it broke mid-way!' )
with self._lock:
self._status_text = u'connection broke mid-request--retrying'
time.sleep( 3 )
except requests.exceptions.ConnectionError, requests.exceptions.ConnectTimeout:
self._current_connection_attempt_number += 1

View File

@ -192,7 +192,7 @@ class NetworkDomainManager( HydrusSerialisable.SerialisableBase ):
def key( u_m ):
return len( u_m.GetExampleURL() )
return u_m.GetExampleURL().count( '/' )
for url_matches in self._domains_to_url_matches.values():

View File

@ -509,6 +509,24 @@ class WXAwareTimer( object ):
def CallLater( window, seconds, callable, *args, **kwargs ):
if HG.callto_report_mode:
what_to_report = [ callable ]
if len( args ) > 0:
what_to_report.append( args )
if len( kwargs ) > 0:
what_to_report.append( kwargs )
HydrusData.ShowText( tuple( what_to_report ) )
call = HydrusData.Call( callable, *args, **kwargs )
timer = WXAwareTimer( window, call )

View File

@ -49,7 +49,7 @@ options = {}
# Misc
NETWORK_VERSION = 18
SOFTWARE_VERSION = 288
SOFTWARE_VERSION = 289
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )

View File

@ -743,6 +743,11 @@ class VideoRendererFFMPEG( object ):
if len(s) != nbytes:
if self.lastread is None:
raise Exception( 'Unable to render that video! Please send it to hydrus dev so he can look at it!' )
result = self.lastread
self.close()

View File

@ -67,7 +67,9 @@ class TestListBoxes( unittest.TestCase ):
new_namespace_colours = dict( initial_namespace_colours )
new_namespace_colours[ 'character' ] = ( 0, 170, 0 )
panel.SetNamespaceColour( 'character', ( 0, 170, 0 ) )
colour = wx.Colour( 0, 170, 0 )
panel.SetNamespaceColour( 'character', colour )
self.assertEqual( panel.GetNamespaceColours(), new_namespace_colours )

View File

@ -764,13 +764,7 @@ class TestClientDB( unittest.TestCase ):
test_files.append( ( 'muh_webm.webm', '55b6ce9d067326bf4b2fbe66b8f51f366bc6e5f776ba691b0351364383c43fcb', 84069, HC.VIDEO_WEBM, 640, 360, 4010, 120, None ) )
test_files.append( ( 'muh_jpg.jpg', '5d884d84813beeebd59a35e474fa3e4742d0f2b6679faa7609b245ddbbd05444', 42296, HC.IMAGE_JPEG, 392, 498, None, None, None ) )
test_files.append( ( 'muh_png.png', 'cdc67d3b377e6e1397ffa55edc5b50f6bdf4482c7a6102c6f27fa351429d6f49', 31452, HC.IMAGE_PNG, 191, 196, None, None, None ) )
if '3.2.4' in HydrusVideoHandling.GetFFMPEGVersion():
apng_duration = 3133
else:
apng_duration = 1880
test_files.append( ( 'muh_apng.png', '9e7b8b5abc7cb11da32db05671ce926a2a2b701415d1b2cb77a28deea51010c3', 616956, HC.IMAGE_APNG, 500, 500, apng_duration, 47, None ) )
test_files.append( ( 'muh_apng.png', '9e7b8b5abc7cb11da32db05671ce926a2a2b701415d1b2cb77a28deea51010c3', 616956, HC.IMAGE_APNG, 500, 500, 'apng_duration', 47, None ) )
test_files.append( ( 'muh_gif.gif', '00dd9e9611ebc929bfc78fde99a0c92800bbb09b9d18e0946cea94c099b211c2', 15660, HC.IMAGE_GIF, 329, 302, 600, 5, None ) )
for ( filename, hex_hash, size, mime, width, height, duration, num_frames, num_words ) in test_files:
@ -820,7 +814,11 @@ class TestClientDB( unittest.TestCase ):
self.assertEqual( mr_width, width )
self.assertEqual( mr_height, height )
if duration == 'mp4_duration': # diff ffmpeg versions report differently
if duration == 'apng_duration': # diff ffmpeg versions report differently
self.assertIn( mr_duration, ( 3133, 1880 ) )
elif duration == 'mp4_duration':
self.assertIn( mr_duration, ( 6266, 6290 ) )
@ -931,8 +929,6 @@ class TestClientDB( unittest.TestCase ):
#
HC.options[ 'exclude_deleted_files' ] = True
( status, hash, note ) = self._read( 'md5_status', md5 )
self.assertEqual( ( status, hash ), ( CC.STATUS_DELETED, hash ) )
@ -944,8 +940,6 @@ class TestClientDB( unittest.TestCase ):
path = os.path.join( HC.STATIC_DIR, 'hydrus.png' )
HC.options[ 'exclude_deleted_files' ] = False
file_import_job = ClientImporting.FileImportJob( path )
file_import_job.GenerateHashAndStatus()