Version 261
This commit is contained in:
parent
64bf9bcebb
commit
99c3b2fbea
|
@ -13,6 +13,7 @@ The client can do quite a lot! Please check out the help inside the release or [
|
|||
* [8chan board](https://8ch.net/hydrus/index.html)
|
||||
* [twitter](https://twitter.com/hydrusnetwork)
|
||||
* [tumblr](http://hydrus.tumblr.com/)
|
||||
* [discord](https://discord.gg/vy8CUB4)
|
||||
|
||||
## Attribution
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<h3>this is non-comprehensive</h3>
|
||||
<p>I am always changing and adding little things. The best way to learn is just to look around. If you think a shortcut should probably do something, try it out! If you can't find something, let me know and I'll try to add it!</p>
|
||||
<h3>advanced mode</h3>
|
||||
<p>To avoid confusing clutter, some advanced features are hidden by default. When you are comfortable with the program, hit <i>help->advanced mode</i> to reveal these buttons and menu options!</p>
|
||||
<p>To avoid confusing clutter, several advanced menu items and buttons are hidden by default. When you are comfortable with the program, hit <i>help->advanced mode</i> to reveal them!</p>
|
||||
<h3>searching with wildcards</h3>
|
||||
<p>The autocomplete tag dropdown supports wildcard searching with '*'.</p>
|
||||
<p><img src="wildcard_gelion.png"/></p>
|
||||
|
|
|
@ -8,6 +8,36 @@
|
|||
<div class="content">
|
||||
<h3>changelog</h3>
|
||||
<ul>
|
||||
<li><h3>version 261</h3></li>
|
||||
<ul>
|
||||
<li>wrote a new manage dialog for urls</li>
|
||||
<li>added 'manage known urls' to media right-click menus!</li>
|
||||
<li>double-left-clicking on any video animation will 'open externally'!</li>
|
||||
<li>added an option to options->gui to set where new pages will appear by default--either far left/right or left/right of current page</li>
|
||||
<li>the tumblr parser now produces '_raw' urls when the post was posted in 2013 or later</li>
|
||||
<li>created a new 'number' subtag cache that will be populated on update</li>
|
||||
<li>created a new 'tag as number' system predicate that can search for, say, all 'page:' tags > 200</li>
|
||||
<li>bandwidth management now tracks requests and num_bytes more sensibly</li>
|
||||
<li>bandwidth tracking objects can now better handle bandwidth usage and rule application in short intervals (i.e. sub-5-second) (however, the current networking engine cannot yet use this information accurately)</li>
|
||||
<li>wrote unit test for bandwidth tracker, including for the new short interval timing</li>
|
||||
<li>fleshed out the new network engine</li>
|
||||
<li>fleshed out the new network job</li>
|
||||
<li>thread watchers can now have a time delta that includes days (giving a max check period of 360 days, wew)</li>
|
||||
<li>adminside mapping petition processing now has adaptive max total petition weight--lower file count range petitions will have much higher max total permitted weight</li>
|
||||
<li>added a new 'callto' debug reporting mode that reports on current thread pool jobs</li>
|
||||
<li>improved calltothread pre-spawning checks to reduce outside chance of deadlock in busy periods</li>
|
||||
<li>the advanced review services repository panel buttons are now hidden unless in advanced mode</li>
|
||||
<li>eliminated some animation buffer looping redundancy</li>
|
||||
<li>fixed a little animation next-frame prediction code</li>
|
||||
<li>fixed up some '1 minutes'-type time_delta->string conversion</li>
|
||||
<li>fixed up label on time delta control button</li>
|
||||
<li>fixed some shutdown thread interactions</li>
|
||||
<li>fixed an issue where sometimes empty tags could be entered into the manage tags panel</li>
|
||||
<li>added some pydeadobject error handling during client shutdown</li>
|
||||
<li>refactored some db multi-tag->file search code, cleaned up wildcard searching</li>
|
||||
<li>misc string cleanup</li>
|
||||
<li>misc dialog cleanup</li>
|
||||
</ul>
|
||||
<li><h3>version 260</h3></li>
|
||||
<ul>
|
||||
<li>?fixed video parsing when the video metadata includes random non-utf-friendly garbage</li>
|
||||
|
|
|
@ -232,6 +232,18 @@ media_viewer_scale_string_lookup[ MEDIA_VIEWER_SCALE_100 ] = 'show at 100%'
|
|||
media_viewer_scale_string_lookup[ MEDIA_VIEWER_SCALE_MAX_REGULAR ] = 'scale to the largest regular zoom that fits'
|
||||
media_viewer_scale_string_lookup[ MEDIA_VIEWER_SCALE_TO_CANVAS ] = 'scale to the canvas size'
|
||||
|
||||
NEW_PAGE_GOES_FAR_LEFT = 0
|
||||
NEW_PAGE_GOES_LEFT_OF_CURRENT = 1
|
||||
NEW_PAGE_GOES_RIGHT_OF_CURRENT = 2
|
||||
NEW_PAGE_GOES_FAR_RIGHT = 3
|
||||
|
||||
new_page_goes_string_lookup = {}
|
||||
|
||||
new_page_goes_string_lookup[ NEW_PAGE_GOES_FAR_LEFT ] = 'go far left'
|
||||
new_page_goes_string_lookup[ NEW_PAGE_GOES_LEFT_OF_CURRENT ] = 'go left of current page'
|
||||
new_page_goes_string_lookup[ NEW_PAGE_GOES_RIGHT_OF_CURRENT ] = 'go right of current page'
|
||||
new_page_goes_string_lookup[ NEW_PAGE_GOES_FAR_RIGHT ] = 'go far right'
|
||||
|
||||
SHORTCUT_MODIFIER_CTRL = 0
|
||||
SHORTCUT_MODIFIER_ALT = 1
|
||||
SHORTCUT_MODIFIER_SHIFT = 2
|
||||
|
|
|
@ -522,7 +522,14 @@ class Controller( HydrusController.HydrusController ):
|
|||
|
||||
def GoodTimeToDoForegroundWork( self ):
|
||||
|
||||
return not self._gui.CurrentlyBusy()
|
||||
if self._gui:
|
||||
|
||||
return not self._gui.CurrentlyBusy()
|
||||
|
||||
else:
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def InitClientFilesManager( self ):
|
||||
|
@ -819,11 +826,11 @@ class Controller( HydrusController.HydrusController ):
|
|||
|
||||
def PageCompletelyDestroyed( self, page_key ):
|
||||
|
||||
try:
|
||||
if self._gui:
|
||||
|
||||
return self._gui.PageCompletelyDestroyed( page_key )
|
||||
|
||||
except wx.PyDeadObjectError:
|
||||
else:
|
||||
|
||||
return True
|
||||
|
||||
|
@ -831,7 +838,14 @@ class Controller( HydrusController.HydrusController ):
|
|||
|
||||
def PageClosedButNotDestroyed( self, page_key ):
|
||||
|
||||
return self._gui.PageClosedButNotDestroyed( page_key )
|
||||
if self._gui:
|
||||
|
||||
return self._gui.PageClosedButNotDestroyed( page_key )
|
||||
|
||||
else:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def PopupMenu( self, window, menu ):
|
||||
|
|
|
@ -44,6 +44,14 @@ YAML_DUMP_ID_EXPORT_FOLDER = 6
|
|||
YAML_DUMP_ID_SUBSCRIPTION = 7
|
||||
YAML_DUMP_ID_LOCAL_BOORU = 8
|
||||
|
||||
# Sqlite can handle -( 2 ** 63 ) -> ( 2 ** 63 ) - 1, but the user won't be searching that distance, so np
|
||||
MIN_CACHED_INTEGER = -99999999
|
||||
MAX_CACHED_INTEGER = 99999999
|
||||
|
||||
def CanCacheInteger( num ):
|
||||
|
||||
return MIN_CACHED_INTEGER <= num and num <= MAX_CACHED_INTEGER
|
||||
|
||||
def ConvertWildcardToSQLiteLikeParameter( wildcard ):
|
||||
|
||||
like_param = wildcard.replace( '*', '%' )
|
||||
|
@ -2678,6 +2686,9 @@ class DB( HydrusDB.HydrusDB ):
|
|||
self._c.execute( 'CREATE TABLE external_caches.duplicate_pairs ( smaller_hash_id INTEGER, larger_hash_id INTEGER, duplicate_type INTEGER, PRIMARY KEY ( smaller_hash_id, larger_hash_id ) );' )
|
||||
self._CreateIndex( 'external_caches.duplicate_pairs', [ 'larger_hash_id', 'smaller_hash_id' ], unique = True )
|
||||
|
||||
self._c.execute( 'CREATE TABLE external_caches.integer_subtags ( subtag_id INTEGER PRIMARY KEY, integer_subtag INTEGER );' )
|
||||
self._CreateIndex( 'external_caches.integer_subtags', [ 'integer_subtag' ] )
|
||||
|
||||
# master
|
||||
|
||||
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
|
||||
|
@ -3578,7 +3589,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
predicates.append( ClientSearch.Predicate( HC.PREDICATE_TYPE_SYSTEM_EVERYTHING, min_current_count = num_everything ) )
|
||||
|
||||
predicates.extend( [ ClientSearch.Predicate( predicate_type, None ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_UNTAGGED, HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_HASH, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ] ] )
|
||||
predicates.extend( [ ClientSearch.Predicate( predicate_type, None ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_UNTAGGED, HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_HASH, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ] ] )
|
||||
|
||||
elif service_type in HC.FILE_SERVICES:
|
||||
|
||||
|
@ -3629,7 +3640,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
predicates.append( ClientSearch.Predicate( HC.PREDICATE_TYPE_SYSTEM_RATING ) )
|
||||
|
||||
|
||||
predicates.extend( [ ClientSearch.Predicate( predicate_type ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ] ] )
|
||||
predicates.extend( [ ClientSearch.Predicate( predicate_type ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ] ] )
|
||||
|
||||
|
||||
return predicates
|
||||
|
@ -3777,6 +3788,61 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return hash_ids
|
||||
|
||||
|
||||
def _GetHashIdsFromNamespaceIdsSubtagIds( self, file_service_key, tag_service_key, namespace_ids, subtag_ids, include_current_tags, include_pending_tags ):
|
||||
|
||||
file_service_id = self._GetServiceId( file_service_key )
|
||||
|
||||
if tag_service_key == CC.COMBINED_TAG_SERVICE_KEY:
|
||||
|
||||
search_tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
|
||||
|
||||
else:
|
||||
|
||||
search_tag_service_ids = [ self._GetServiceId( tag_service_key ) ]
|
||||
|
||||
|
||||
current_selects = []
|
||||
pending_selects = []
|
||||
|
||||
for search_tag_service_id in search_tag_service_ids:
|
||||
|
||||
if file_service_key == CC.COMBINED_FILE_SERVICE_KEY:
|
||||
|
||||
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + current_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + pending_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
|
||||
else:
|
||||
|
||||
( cache_files_table_name, cache_current_mappings_table_name, cache_pending_mappings_table_name, ac_cache_table_name ) = GenerateSpecificMappingsCacheTableNames( file_service_id, search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + cache_current_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + cache_pending_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
|
||||
|
||||
|
||||
hash_ids = set()
|
||||
|
||||
if include_current_tags:
|
||||
|
||||
for current_select in current_selects:
|
||||
|
||||
hash_ids.update( ( id for ( id, ) in self._c.execute( current_select ) ) )
|
||||
|
||||
|
||||
|
||||
if include_pending_tags:
|
||||
|
||||
for pending_select in pending_selects:
|
||||
|
||||
hash_ids.update( ( id for ( id, ) in self._c.execute( pending_select ) ) )
|
||||
|
||||
|
||||
|
||||
return hash_ids
|
||||
|
||||
|
||||
def _GetHashIdsFromQuery( self, search_context ):
|
||||
|
||||
self._controller.ResetIdleTimer()
|
||||
|
@ -4235,6 +4301,26 @@ class DB( HydrusDB.HydrusDB ):
|
|||
query_hash_ids.intersection_update( good_tag_count_hash_ids )
|
||||
|
||||
|
||||
#
|
||||
|
||||
if 'min_tag_as_number' in simple_preds:
|
||||
|
||||
( namespace, num ) = simple_preds[ 'min_tag_as_number' ]
|
||||
|
||||
good_hash_ids = self._GetHashIdsThatHaveTagAsNum( file_service_key, tag_service_key, namespace, num, '>', include_current_tags, include_pending_tags )
|
||||
|
||||
query_hash_ids.intersection_update( good_hash_ids )
|
||||
|
||||
|
||||
if 'max_tag_as_number' in simple_preds:
|
||||
|
||||
( namespace, num ) = simple_preds[ 'max_tag_as_number' ]
|
||||
|
||||
good_hash_ids = self._GetHashIdsThatHaveTagAsNum( file_service_key, tag_service_key, namespace, num, '<', include_current_tags, include_pending_tags )
|
||||
|
||||
query_hash_ids.intersection_update( good_hash_ids )
|
||||
|
||||
|
||||
#
|
||||
|
||||
limit = system_predicates.GetLimit()
|
||||
|
@ -4251,6 +4337,61 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return query_hash_ids
|
||||
|
||||
|
||||
def _GetHashIdsFromSubtagIds( self, file_service_key, tag_service_key, subtag_ids, include_current_tags, include_pending_tags ):
|
||||
|
||||
file_service_id = self._GetServiceId( file_service_key )
|
||||
|
||||
if tag_service_key == CC.COMBINED_TAG_SERVICE_KEY:
|
||||
|
||||
search_tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
|
||||
|
||||
else:
|
||||
|
||||
search_tag_service_ids = [ self._GetServiceId( tag_service_key ) ]
|
||||
|
||||
|
||||
current_selects = []
|
||||
pending_selects = []
|
||||
|
||||
for search_tag_service_id in search_tag_service_ids:
|
||||
|
||||
if file_service_key == CC.COMBINED_FILE_SERVICE_KEY:
|
||||
|
||||
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + current_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + pending_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
|
||||
else:
|
||||
|
||||
( cache_files_table_name, cache_current_mappings_table_name, cache_pending_mappings_table_name, ac_cache_table_name ) = GenerateSpecificMappingsCacheTableNames( file_service_id, search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + cache_current_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + cache_pending_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( subtag_ids ) + ';' )
|
||||
|
||||
|
||||
|
||||
hash_ids = set()
|
||||
|
||||
if include_current_tags:
|
||||
|
||||
for current_select in current_selects:
|
||||
|
||||
hash_ids.update( ( id for ( id, ) in self._c.execute( current_select ) ) )
|
||||
|
||||
|
||||
|
||||
if include_pending_tags:
|
||||
|
||||
for pending_select in pending_selects:
|
||||
|
||||
hash_ids.update( ( id for ( id, ) in self._c.execute( pending_select ) ) )
|
||||
|
||||
|
||||
|
||||
return hash_ids
|
||||
|
||||
|
||||
def _GetHashIdsFromTag( self, file_service_key, tag_service_key, tag, include_current_tags, include_pending_tags ):
|
||||
|
||||
siblings_manager = self._controller.GetManager( 'tag_siblings' )
|
||||
|
@ -4401,87 +4542,20 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
|
||||
|
||||
file_service_id = self._GetServiceId( file_service_key )
|
||||
|
||||
if tag_service_key == CC.COMBINED_TAG_SERVICE_KEY:
|
||||
|
||||
search_tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
|
||||
|
||||
else:
|
||||
|
||||
search_tag_service_ids = [ self._GetServiceId( tag_service_key ) ]
|
||||
|
||||
|
||||
current_selects = []
|
||||
pending_selects = []
|
||||
|
||||
( namespace_wildcard, subtag_wildcard ) = HydrusTags.SplitTag( wildcard )
|
||||
|
||||
possible_subtag_ids = GetSubtagIdsFromWildcard( subtag_wildcard )
|
||||
|
||||
if namespace_wildcard != '':
|
||||
|
||||
possible_namespace_ids = GetNamespaceIdsFromWildcard( namespace_wildcard )
|
||||
possible_subtag_ids = GetSubtagIdsFromWildcard( subtag_wildcard )
|
||||
|
||||
for search_tag_service_id in search_tag_service_ids:
|
||||
|
||||
if file_service_key == CC.COMBINED_FILE_SERVICE_KEY:
|
||||
|
||||
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + current_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( possible_namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + pending_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( possible_namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
|
||||
else:
|
||||
|
||||
( cache_files_table_name, cache_current_mappings_table_name, cache_pending_mappings_table_name, ac_cache_table_name ) = GenerateSpecificMappingsCacheTableNames( file_service_id, search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + cache_current_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( possible_namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + cache_pending_mappings_table_name + ' NATURAL JOIN tags WHERE namespace_id IN ' + HydrusData.SplayListForDB( possible_namespace_ids ) + ' AND subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
|
||||
|
||||
return self._GetHashIdsFromNamespaceIdsSubtagIds( file_service_key, tag_service_key, possible_namespace_ids, possible_subtag_ids, include_current_tags, include_pending_tags )
|
||||
|
||||
else:
|
||||
|
||||
possible_subtag_ids = GetSubtagIdsFromWildcard( subtag_wildcard )
|
||||
return self._GetHashIdsFromSubtagIds( file_service_key, tag_service_key, possible_subtag_ids, include_current_tags, include_pending_tags )
|
||||
|
||||
for search_tag_service_id in search_tag_service_ids:
|
||||
|
||||
if file_service_key == CC.COMBINED_FILE_SERVICE_KEY:
|
||||
|
||||
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + current_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + pending_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
|
||||
else:
|
||||
|
||||
( cache_files_table_name, cache_current_mappings_table_name, cache_pending_mappings_table_name, ac_cache_table_name ) = GenerateSpecificMappingsCacheTableNames( file_service_id, search_tag_service_id )
|
||||
|
||||
current_selects.append( 'SELECT hash_id FROM ' + cache_current_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
pending_selects.append( 'SELECT hash_id FROM ' + cache_pending_mappings_table_name + ' NATURAL JOIN tags WHERE subtag_id IN ' + HydrusData.SplayListForDB( possible_subtag_ids ) + ';' )
|
||||
|
||||
|
||||
|
||||
|
||||
hash_ids = set()
|
||||
|
||||
if include_current_tags:
|
||||
|
||||
for current_select in current_selects:
|
||||
|
||||
hash_ids.update( ( id for ( id, ) in self._c.execute( current_select ) ) )
|
||||
|
||||
|
||||
|
||||
if include_pending_tags:
|
||||
|
||||
for pending_select in pending_selects:
|
||||
|
||||
hash_ids.update( ( id for ( id, ) in self._c.execute( pending_select ) ) )
|
||||
|
||||
|
||||
|
||||
return hash_ids
|
||||
|
||||
|
||||
def _GetHashIdsTagCounts( self, tag_service_key, include_current, include_pending, hash_ids = None ):
|
||||
|
@ -4607,6 +4681,24 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return nonzero_tag_hash_ids
|
||||
|
||||
|
||||
def _GetHashIdsThatHaveTagAsNum( self, file_service_key, tag_service_key, namespace, num, operator, include_current_tags, include_pending_tags ):
|
||||
|
||||
possible_subtag_ids = self._STS( self._c.execute( 'SELECT subtag_id FROM integer_subtags WHERE integer_subtag ' + operator + ' ' + str( num ) + ';' ) )
|
||||
|
||||
if namespace == '':
|
||||
|
||||
return self._GetHashIdsFromSubtagIds( file_service_key, tag_service_key, possible_subtag_ids, include_current_tags, include_pending_tags )
|
||||
|
||||
else:
|
||||
|
||||
namespace_id = self._GetNamespaceId( namespace )
|
||||
|
||||
possible_namespace_ids = { namespace_id }
|
||||
|
||||
return self._GetHashIdsFromNamespaceIdsSubtagIds( file_service_key, tag_service_key, possible_namespace_ids, possible_subtag_ids, include_current_tags, include_pending_tags )
|
||||
|
||||
|
||||
|
||||
def _GetHashIdsToHashes( self, hash_ids ):
|
||||
|
||||
# this is actually a bit faster than saying "hash_id IN ( bigass_list )"
|
||||
|
@ -5665,6 +5757,20 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
self._c.execute( 'REPLACE INTO subtags_fts4 ( docid, subtag ) VALUES ( ?, ? );', ( subtag_id, subtag_searchable ) )
|
||||
|
||||
try:
|
||||
|
||||
integer_subtag = int( subtag )
|
||||
|
||||
if CanCacheInteger( integer_subtag ):
|
||||
|
||||
self._c.execute( 'INSERT OR IGNORE INTO integer_subtags ( subtag_id, integer_subtag ) VALUES ( ?, ? );', ( subtag_id, integer_subtag ) )
|
||||
|
||||
|
||||
except ValueError:
|
||||
|
||||
pass
|
||||
|
||||
|
||||
else:
|
||||
|
||||
( subtag_id, ) = result
|
||||
|
@ -9295,6 +9401,63 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
|
||||
|
||||
if version == 260:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'generating some new tag search data' )
|
||||
|
||||
self._c.execute( 'CREATE TABLE external_caches.integer_subtags ( subtag_id INTEGER PRIMARY KEY, integer_subtag INTEGER );' )
|
||||
|
||||
existing_subtag_data = self._c.execute( 'SELECT subtag_id, subtag FROM subtags;' ).fetchall()
|
||||
|
||||
inserts = []
|
||||
|
||||
for ( subtag_id, subtag ) in existing_subtag_data:
|
||||
|
||||
try:
|
||||
|
||||
integer_subtag = int( subtag )
|
||||
|
||||
if CanCacheInteger( integer_subtag ):
|
||||
|
||||
inserts.append( ( subtag_id, integer_subtag ) )
|
||||
|
||||
|
||||
except ValueError:
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
||||
self._c.executemany( 'INSERT OR IGNORE INTO integer_subtags ( subtag_id, integer_subtag ) VALUES ( ?, ? );', inserts )
|
||||
|
||||
self._CreateIndex( 'external_caches.integer_subtags', [ 'integer_subtag' ] )
|
||||
|
||||
#
|
||||
|
||||
do_the_message = False
|
||||
|
||||
subscriptions = self._GetJSONDumpNamed( HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION )
|
||||
|
||||
for subscription in subscriptions:
|
||||
|
||||
g_i = subscription._gallery_identifier
|
||||
|
||||
if g_i.GetSiteType() == HC.SITE_TYPE_TUMBLR:
|
||||
|
||||
do_the_message = True
|
||||
|
||||
break
|
||||
|
||||
|
||||
|
||||
if do_the_message:
|
||||
|
||||
message = 'The tumblr downloader can now produce \'raw\' urls for images that have >1280px width. It is possible some of your tumblr subscriptions\' urls are resizes, so at some point you may want to reset their url caches. I recommend you not do it yet--wait for the upcoming downloader overhaul, which will provide other benefits such as associating the \'post\' url with the image, rather than the ugly API url.'
|
||||
|
||||
self.pub_initial_message( message )
|
||||
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_title_text', 'updated db to v' + str( version + 1 ) )
|
||||
|
||||
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
|
||||
|
|
|
@ -814,6 +814,8 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
self._dictionary[ 'integers' ][ 'similar_files_duplicate_pairs_search_distance' ] = 0
|
||||
|
||||
self._dictionary[ 'integers' ][ 'default_new_page_goes' ] = CC.NEW_PAGE_GOES_FAR_RIGHT
|
||||
|
||||
#
|
||||
|
||||
self._dictionary[ 'keys' ] = {}
|
||||
|
|
|
@ -1779,6 +1779,29 @@ class GalleryTumblr( Gallery ):
|
|||
|
||||
def _ParseGalleryPage( self, data, url_base ):
|
||||
|
||||
def ConvertRegularToRawURL( regular_url ):
|
||||
|
||||
# convert this:
|
||||
# http://68.media.tumblr.com/5af0d991f26ef9fdad5a0c743fb1eca2/tumblr_opl012ZBOu1tiyj7vo1_500.jpg
|
||||
# to this:
|
||||
# http://68.media.tumblr.com/5af0d991f26ef9fdad5a0c743fb1eca2/tumblr_opl012ZBOu1tiyj7vo1_raw.jpg
|
||||
# the 500 part can be a bunch of stuff, including letters
|
||||
|
||||
url_components = regular_url.split( '_' )
|
||||
|
||||
last_component = url_components[ -1 ]
|
||||
|
||||
( number_gubbins, file_ext ) = last_component.split( '.' )
|
||||
|
||||
raw_last_component = 'raw.' + file_ext
|
||||
|
||||
url_components[ -1 ] = raw_last_component
|
||||
|
||||
raw_url = '_'.join( url_components )
|
||||
|
||||
return raw_url
|
||||
|
||||
|
||||
definitely_no_more_pages = False
|
||||
|
||||
processed_raw_json = data.split( 'var tumblr_api_read = ' )[1][:-2] # -1 takes a js ';' off the end
|
||||
|
@ -1791,6 +1814,13 @@ class GalleryTumblr( Gallery ):
|
|||
|
||||
for post in json_object[ 'posts' ]:
|
||||
|
||||
# 2012-06-20 15:59:00 GMT
|
||||
date = post[ 'date-gmt' ]
|
||||
|
||||
date_struct = time.strptime( date, '%Y-%m-%d %H:%M:%S %Z' )
|
||||
|
||||
raw_url_available = date_struct.tm_year > 2012
|
||||
|
||||
if 'tags' in post: tags = post[ 'tags' ]
|
||||
else: tags = []
|
||||
|
||||
|
@ -1804,11 +1834,19 @@ class GalleryTumblr( Gallery ):
|
|||
|
||||
url = post[ 'photo-url-1280' ]
|
||||
|
||||
if raw_url_available:
|
||||
|
||||
url = ConvertRegularToRawURL( url )
|
||||
|
||||
|
||||
SetExtraURLInfo( url, tags )
|
||||
|
||||
urls.append( url )
|
||||
|
||||
except: pass
|
||||
except:
|
||||
|
||||
pass
|
||||
|
||||
|
||||
else:
|
||||
|
||||
|
@ -1818,11 +1856,19 @@ class GalleryTumblr( Gallery ):
|
|||
|
||||
url = photo[ 'photo-url-1280' ]
|
||||
|
||||
if raw_url_available:
|
||||
|
||||
url = ConvertRegularToRawURL( url )
|
||||
|
||||
|
||||
SetExtraURLInfo( url, tags )
|
||||
|
||||
urls.append( url )
|
||||
|
||||
except: pass
|
||||
except:
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -1545,6 +1545,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
|
|||
|
||||
ClientGUIMenus.AppendMenuItem( self, debug, 'make some popups', 'Throw some varied popups at the message manager, just to check it is working.', self._DebugMakeSomePopups )
|
||||
ClientGUIMenus.AppendMenuItem( self, debug, 'make a popup in five seconds', 'Throw a delayed popup at the message manager, giving you time to minimise or otherwise alter the client before it arrives.', wx.CallLater, 5000, HydrusData.ShowText, 'This is a delayed popup message.' )
|
||||
ClientGUIMenus.AppendMenuCheckItem( self, debug, 'callto report mode', 'Report whenever the thread pool is given a task.', HG.callto_report_mode, self._SwitchBoolean, 'callto_report_mode' )
|
||||
ClientGUIMenus.AppendMenuCheckItem( self, debug, 'db report mode', 'Have the db report query information, where supported.', HG.db_report_mode, self._SwitchBoolean, 'db_report_mode' )
|
||||
ClientGUIMenus.AppendMenuCheckItem( self, debug, 'db profile mode', 'Run detailed \'profiles\' on every database query and dump this information to the log (this is very useful for hydrus dev to have, if something is running slow for you!).', HG.db_profile_mode, self._SwitchBoolean, 'db_profile_mode' )
|
||||
ClientGUIMenus.AppendMenuCheckItem( self, debug, 'gui report mode', 'Have the gui report inside information, where supported.', HG.gui_report_mode, self._SwitchBoolean, 'gui_report_mode' )
|
||||
|
@ -2000,15 +2001,41 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
|
|||
|
||||
if self._next_new_page_index is None:
|
||||
|
||||
self._notebook.AddPage( page, page_name, select = True )
|
||||
new_page_goes = self._new_options.GetInteger( 'default_new_page_goes' )
|
||||
|
||||
current_index = self._notebook.GetSelection()
|
||||
|
||||
if current_index == wx.NOT_FOUND:
|
||||
|
||||
new_page_goes = CC.NEW_PAGE_GOES_FAR_LEFT
|
||||
|
||||
|
||||
if new_page_goes == CC.NEW_PAGE_GOES_FAR_LEFT:
|
||||
|
||||
insertion_index = 0
|
||||
|
||||
elif new_page_goes == CC.NEW_PAGE_GOES_LEFT_OF_CURRENT:
|
||||
|
||||
insertion_index = current_index
|
||||
|
||||
elif new_page_goes == CC.NEW_PAGE_GOES_RIGHT_OF_CURRENT:
|
||||
|
||||
insertion_index = current_index + 1
|
||||
|
||||
elif new_page_goes == CC.NEW_PAGE_GOES_FAR_RIGHT:
|
||||
|
||||
insertion_index = self._notebook.GetPageCount()
|
||||
|
||||
|
||||
else:
|
||||
|
||||
self._notebook.InsertPage( self._next_new_page_index, page, page_name, select = True )
|
||||
insertion_index = self._next_new_page_index
|
||||
|
||||
self._next_new_page_index = None
|
||||
|
||||
|
||||
self._notebook.InsertPage( insertion_index, page, page_name, select = True )
|
||||
|
||||
wx.CallAfter( page.SetSearchFocus )
|
||||
|
||||
|
||||
|
@ -2587,7 +2614,11 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
|
|||
|
||||
def _SwitchBoolean( self, name ):
|
||||
|
||||
if name == 'db_report_mode':
|
||||
if name == 'callto_report_mode':
|
||||
|
||||
HG.callto_report_mode = not HG.callto_report_mode
|
||||
|
||||
elif name == 'db_report_mode':
|
||||
|
||||
HG.db_report_mode = not HG.db_report_mode
|
||||
|
||||
|
@ -3225,7 +3256,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
|
|||
|
||||
( predicate_type, value, inclusive ) = predicate.GetInfo()
|
||||
|
||||
if value is None and predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_SIZE, HC.PREDICATE_TYPE_SYSTEM_DIMENSIONS, HC.PREDICATE_TYPE_SYSTEM_AGE, HC.PREDICATE_TYPE_SYSTEM_HASH, HC.PREDICATE_TYPE_SYSTEM_DURATION, HC.PREDICATE_TYPE_SYSTEM_NUM_WORDS, HC.PREDICATE_TYPE_SYSTEM_MIME, HC.PREDICATE_TYPE_SYSTEM_RATING, HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ]:
|
||||
if value is None and predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_SIZE, HC.PREDICATE_TYPE_SYSTEM_DIMENSIONS, HC.PREDICATE_TYPE_SYSTEM_AGE, HC.PREDICATE_TYPE_SYSTEM_HASH, HC.PREDICATE_TYPE_SYSTEM_DURATION, HC.PREDICATE_TYPE_SYSTEM_NUM_WORDS, HC.PREDICATE_TYPE_SYSTEM_MIME, HC.PREDICATE_TYPE_SYSTEM_RATING, HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ]:
|
||||
|
||||
with ClientGUIDialogs.DialogInputFileSystemPredicates( self, predicate_type ) as dlg:
|
||||
|
||||
|
|
|
@ -416,11 +416,24 @@ class Animation( wx.Window ):
|
|||
|
||||
if self._animation_bar is not None:
|
||||
|
||||
etype = event.GetEventType()
|
||||
|
||||
if not ( event.ShiftDown() or event.CmdDown() or event.AltDown() ):
|
||||
|
||||
if etype == wx.wxEVT_LEFT_DOWN:
|
||||
if event.LeftDClick():
|
||||
|
||||
hash = self._media.GetHash()
|
||||
mime = self._media.GetMime()
|
||||
|
||||
client_files_manager = HG.client_controller.GetClientFilesManager()
|
||||
|
||||
path = client_files_manager.GetFilePath( hash, mime )
|
||||
|
||||
HydrusPaths.LaunchFile( path )
|
||||
|
||||
self.Pause()
|
||||
|
||||
return
|
||||
|
||||
elif event.LeftDown():
|
||||
|
||||
self.PausePlay()
|
||||
|
||||
|
@ -1545,9 +1558,9 @@ class Canvas( wx.Window ):
|
|||
|
||||
if len( HG.client_controller.GetServicesManager().GetServices( HC.RATINGS_SERVICES ) ) > 0:
|
||||
|
||||
if self._current_media is not None:
|
||||
with ClientGUIDialogsManage.DialogManageRatings( self, ( self._current_media, ) ) as dlg:
|
||||
|
||||
with ClientGUIDialogsManage.DialogManageRatings( self, ( self._current_media, ) ) as dlg: dlg.ShowModal()
|
||||
dlg.ShowModal()
|
||||
|
||||
|
||||
|
||||
|
@ -1563,7 +1576,7 @@ class Canvas( wx.Window ):
|
|||
|
||||
self._manage_tags_panel.SetFocus()
|
||||
|
||||
elif self._current_media is not None:
|
||||
else:
|
||||
|
||||
# take any focus away from hover window, which will mess up window order when it hides due to the new frame
|
||||
self.SetFocus()
|
||||
|
@ -1581,6 +1594,25 @@ class Canvas( wx.Window ):
|
|||
|
||||
|
||||
|
||||
def _ManageURLs( self ):
|
||||
|
||||
if self._current_media is None:
|
||||
|
||||
return
|
||||
|
||||
|
||||
title = 'manage known urls'
|
||||
|
||||
with ClientGUITopLevelWindows.DialogManage( self, title ) as dlg:
|
||||
|
||||
panel = ClientGUIScrolledPanelsManagement.ManageURLsPanel( dlg, self._current_media )
|
||||
|
||||
dlg.SetPanel( panel )
|
||||
|
||||
dlg.ShowModal()
|
||||
|
||||
|
||||
|
||||
def _MouseIsOverFlash( self ):
|
||||
|
||||
if self._current_media is not None and self._current_media.GetMime() == HC.APPLICATION_FLASH:
|
||||
|
@ -2426,45 +2458,44 @@ class CanvasPanel( Canvas ):
|
|||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
manage_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'tags', 'Manage this file\'s tags.', self._ManageTags )
|
||||
|
||||
if i_can_post_ratings:
|
||||
|
||||
manage_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'tags', 'Manage tags for the selected files.', self._ManageTags )
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'ratings', 'Manage ratings for the selected files.', self._ManageRatings )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, manage_menu, 'manage' )
|
||||
|
||||
else:
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'manage tags', 'Manage tags for the selected files.', self._ManageTags )
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'ratings', 'Manage this file\'s ratings.', self._ManageRatings )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'known urls', 'Manage this file\'s known URLs.', self._ManageURLs )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, manage_menu, 'manage' )
|
||||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
if self._current_media.HasInbox():
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'archive', 'Archive the selected files.', self._Archive )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'archive', 'Archive this file.', self._Archive )
|
||||
|
||||
|
||||
if self._current_media.HasArchive():
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'inbox', 'Send the selected files back to the inbox.', self._Inbox )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'inbox', 'Send this files back to the inbox.', self._Inbox )
|
||||
|
||||
|
||||
if CC.LOCAL_FILE_SERVICE_KEY in locations_manager.GetCurrent():
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'delete', 'Delete the selected files.', self._Delete, CC.LOCAL_FILE_SERVICE_KEY )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'delete', 'Delete this file.', self._Delete, CC.LOCAL_FILE_SERVICE_KEY )
|
||||
|
||||
elif CC.TRASH_SERVICE_KEY in locations_manager.GetCurrent():
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'delete completely', 'Physically delete the selected files from disk.', self._Delete, CC.TRASH_SERVICE_KEY )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'undelete', 'Take the selected files out of the trash.', self._Undelete )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'delete completely', 'Physically delete this file from disk.', self._Delete, CC.TRASH_SERVICE_KEY )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'undelete', 'Take this file out of the trash.', self._Undelete )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'open externally', 'Open the file in your OS\'s default program.', self._OpenExternally )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'open externally', 'Open this file in your OS\'s default program.', self._OpenExternally )
|
||||
|
||||
urls = self._current_media.GetLocationsManager().GetURLs()
|
||||
|
||||
|
@ -2495,23 +2526,23 @@ class CanvasPanel( Canvas ):
|
|||
|
||||
copy_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_menu, 'file', 'Copy the file to your clipboard.', self._CopyFileToClipboard )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_menu, 'file', 'Copy this file to your clipboard.', self._CopyFileToClipboard )
|
||||
|
||||
copy_hash_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'sha256 (hydrus default)', 'Open the file\'s SHA256 hash.', self._CopyHashToClipboard, 'sha256' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'md5', 'Open the file\'s MD5 hash.', self._CopyHashToClipboard, 'md5' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'sha1', 'Open the file\'s SHA1 hash.', self._CopyHashToClipboard, 'sha1' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'sha512', 'Open the file\'s SHA512 hash.', self._CopyHashToClipboard, 'sha512' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'sha256 (hydrus default)', 'Open this file\'s SHA256 hash.', self._CopyHashToClipboard, 'sha256' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'md5', 'Open this file\'s MD5 hash.', self._CopyHashToClipboard, 'md5' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'sha1', 'Open this file\'s SHA1 hash.', self._CopyHashToClipboard, 'sha1' )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_hash_menu, 'sha512', 'Open this file\'s SHA512 hash.', self._CopyHashToClipboard, 'sha512' )
|
||||
|
||||
ClientGUIMenus.AppendMenu( copy_menu, copy_hash_menu, 'hash' )
|
||||
|
||||
if self._current_media.GetMime() in HC.IMAGES and self._current_media.GetDuration() is None:
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_menu, 'image', 'Copy the file to your clipboard as a bmp.', self._CopyBMPToClipboard )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_menu, 'image', 'Copy this file to your clipboard as a bmp.', self._CopyBMPToClipboard )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_menu, 'path', 'Copy the file\'s path to your clipboard.', self._CopyPathToClipboard )
|
||||
ClientGUIMenus.AppendMenuItem( self, copy_menu, 'path', 'Copy this file\'s path to your clipboard.', self._CopyPathToClipboard )
|
||||
|
||||
ClientGUIMenus.AppendMenu( share_menu, copy_menu, 'copy' )
|
||||
|
||||
|
@ -4718,19 +4749,18 @@ class CanvasMediaListBrowser( CanvasMediaListNavigable ):
|
|||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
|
||||
manage_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'tags', 'Manage this file\'s tags.', self._ManageTags )
|
||||
|
||||
if i_can_post_ratings:
|
||||
|
||||
manage_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'tags', 'Manage this file\'s tags.', self._ManageTags )
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'ratings', 'Manage this file\'s ratings.', self._ManageRatings )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, manage_menu, 'manage' )
|
||||
|
||||
else:
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'manage tags', 'Manage this file\'s tags.', self._ManageTags )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'known urls', 'Manage this file\'s known urls.', self._ManageURLs )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, manage_menu, 'manage' )
|
||||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
|
@ -5095,13 +5125,25 @@ class MediaContainer( wx.Window ):
|
|||
|
||||
if direction == 1:
|
||||
|
||||
if current_frame_index == num_frames - 1: current_frame_index = 0
|
||||
else: current_frame_index += 1
|
||||
if current_frame_index == num_frames - 1:
|
||||
|
||||
current_frame_index = 0
|
||||
|
||||
else:
|
||||
|
||||
current_frame_index += 1
|
||||
|
||||
|
||||
else:
|
||||
|
||||
if current_frame_index == 0: current_frame_index = num_frames - 1
|
||||
else: current_frame_index -= 1
|
||||
if current_frame_index == 0:
|
||||
|
||||
current_frame_index = num_frames - 1
|
||||
|
||||
else:
|
||||
|
||||
current_frame_index -= 1
|
||||
|
||||
|
||||
|
||||
self._media_window.GotoFrame( current_frame_index )
|
||||
|
|
|
@ -3809,7 +3809,14 @@ class ThreadToGUIUpdater( object ):
|
|||
|
||||
with self._lock:
|
||||
|
||||
self._func( *self._args, **self._kwargs )
|
||||
try:
|
||||
|
||||
self._func( *self._args, **self._kwargs )
|
||||
|
||||
except HydrusExceptions.ShutdownException:
|
||||
|
||||
pass
|
||||
|
||||
|
||||
self._dirty_count = 0
|
||||
|
||||
|
@ -3898,51 +3905,7 @@ class TimeDeltaButton( wx.Button ):
|
|||
|
||||
else:
|
||||
|
||||
if self._show_days:
|
||||
|
||||
days = value / 86400
|
||||
|
||||
if days > 0:
|
||||
|
||||
text_components.append( HydrusData.ConvertIntToPrettyString( days ) + ' days' )
|
||||
|
||||
|
||||
value %= 86400
|
||||
|
||||
|
||||
if self._show_hours:
|
||||
|
||||
hours = value / 3600
|
||||
|
||||
if hours > 0:
|
||||
|
||||
text_components.append( HydrusData.ConvertIntToPrettyString( hours ) + ' hours' )
|
||||
|
||||
|
||||
value %= 3600
|
||||
|
||||
|
||||
if self._show_minutes:
|
||||
|
||||
minutes = value / 60
|
||||
|
||||
if minutes > 0:
|
||||
|
||||
text_components.append( HydrusData.ConvertIntToPrettyString( minutes ) + ' minutes' )
|
||||
|
||||
|
||||
value %= 60
|
||||
|
||||
|
||||
if self._show_seconds:
|
||||
|
||||
if value > 0 or len( text_components ) == 0:
|
||||
|
||||
text_components.append( HydrusData.ConvertIntToPrettyString( value ) + ' seconds' )
|
||||
|
||||
|
||||
|
||||
text = ' '.join( text_components )
|
||||
text = HydrusData.ConvertTimeDeltaToPrettyString( value )
|
||||
|
||||
|
||||
self.SetLabelText( text )
|
||||
|
|
|
@ -160,7 +160,7 @@ class BandwidthRulesCtrl( ClientGUICommon.StaticBox ):
|
|||
|
||||
self._bandwidth_type.Bind( wx.EVT_CHOICE, self.EventBandwidth )
|
||||
|
||||
self._time_delta = ClientGUICommon.TimeDeltaButton( self, min = 3600, days = True, hours = True, monthly_allowed = True )
|
||||
self._time_delta = ClientGUICommon.TimeDeltaButton( self, min = 1, days = True, hours = True, minutes = True, seconds = True, monthly_allowed = True )
|
||||
|
||||
self._max_allowed = wx.SpinCtrl( self, min = 1, max = 1024 * 1024 * 1024 )
|
||||
|
||||
|
|
|
@ -587,6 +587,10 @@ class DialogInputFileSystemPredicates( Dialog ):
|
|||
|
||||
pred_classes.append( ClientGUIPredicates.PanelPredicateSystemSize )
|
||||
|
||||
elif predicate_type == HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER:
|
||||
|
||||
pred_classes.append( ClientGUIPredicates.PanelPredicateSystemTagAsNumber )
|
||||
|
||||
elif predicate_type == HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS:
|
||||
|
||||
pred_classes.append( ClientGUIPredicates.PanelPredicateSystemDuplicateRelationships )
|
||||
|
|
|
@ -2267,7 +2267,7 @@ class ManagementPanelImporterThreadWatcher( ManagementPanelImporter ):
|
|||
self._thread_times_to_check.SetValue( times_to_check )
|
||||
self._thread_times_to_check.Bind( wx.EVT_SPINCTRL, self.EventTimesToCheck )
|
||||
|
||||
self._thread_check_period = ClientGUICommon.TimeDeltaButton( self._options_panel, min = 30, hours = True, minutes = True, seconds = True )
|
||||
self._thread_check_period = ClientGUICommon.TimeDeltaButton( self._options_panel, min = 30, days = True, hours = True, minutes = True, seconds = True )
|
||||
self._thread_check_period.SetValue( check_period )
|
||||
self._thread_check_period.Bind( ClientGUICommon.EVT_TIME_DELTA, self.EventCheckPeriod )
|
||||
|
||||
|
|
|
@ -879,6 +879,27 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
|
|||
|
||||
|
||||
|
||||
def _ManageURLs( self ):
|
||||
|
||||
if self._focussed_media is None:
|
||||
|
||||
return
|
||||
|
||||
|
||||
title = 'manage known urls'
|
||||
|
||||
with ClientGUITopLevelWindows.DialogManage( self, title ) as dlg:
|
||||
|
||||
panel = ClientGUIScrolledPanelsManagement.ManageURLsPanel( dlg, self._focussed_media.GetDisplayMedia() )
|
||||
|
||||
dlg.SetPanel( panel )
|
||||
|
||||
dlg.ShowModal()
|
||||
|
||||
|
||||
self.SetFocus()
|
||||
|
||||
|
||||
def _ModifyUploaders( self, file_service_key ):
|
||||
|
||||
wx.MessageBox( 'this does not work yet!' )
|
||||
|
@ -3026,28 +3047,18 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
#
|
||||
|
||||
manage_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, manage_tags_phrase, 'Manage tags for the selected files.', self._ManageTags )
|
||||
|
||||
if i_can_post_ratings:
|
||||
|
||||
manage_menu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, manage_tags_phrase, 'Manage tags for the selected files.', self._ManageTags )
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, manage_ratings_phrase, 'Manage ratings for the selected files.', self._ManageRatings )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, manage_menu, 'manage' )
|
||||
|
||||
else:
|
||||
|
||||
if multiple_selected:
|
||||
|
||||
phrase = 'manage files\' tags'
|
||||
|
||||
else:
|
||||
|
||||
phrase = 'manage file\'s tags'
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, phrase, 'Manage tags for the selected files.', self._ManageTags )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, manage_menu, 'file\'s known urls', 'Manage urls for the focused file.', self._ManageURLs )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, manage_menu, 'manage' )
|
||||
|
||||
#
|
||||
|
||||
|
|
|
@ -727,6 +727,15 @@ class ReviewServicePanel( wx.Panel ):
|
|||
|
||||
#
|
||||
|
||||
new_options = HG.client_controller.GetNewOptions()
|
||||
|
||||
if not new_options.GetBoolean( 'advanced_mode' ):
|
||||
|
||||
self._sync_now_button.Hide()
|
||||
self._export_updates_button.Hide()
|
||||
self._reset_button.Hide()
|
||||
|
||||
|
||||
hbox = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
hbox.AddF( self._sync_now_button, CC.FLAGS_LONE_BUTTON )
|
||||
|
|
|
@ -123,7 +123,6 @@ class PanelPredicateSystemDuplicateRelationships( PanelPredicateSystem ):
|
|||
return info
|
||||
|
||||
|
||||
|
||||
class PanelPredicateSystemDuration( PanelPredicateSystem ):
|
||||
|
||||
PREDICATE_TYPE = HC.PREDICATE_TYPE_SYSTEM_DURATION
|
||||
|
@ -791,6 +790,49 @@ class PanelPredicateSystemSize( PanelPredicateSystem ):
|
|||
return info
|
||||
|
||||
|
||||
class PanelPredicateSystemTagAsNumber( PanelPredicateSystem ):
|
||||
|
||||
PREDICATE_TYPE = HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER
|
||||
|
||||
def __init__( self, parent ):
|
||||
|
||||
PanelPredicateSystem.__init__( self, parent )
|
||||
|
||||
self._namespace = wx.TextCtrl( self )
|
||||
|
||||
choices = [ '<', u'\u2248', '>' ]
|
||||
|
||||
self._sign = wx.RadioBox( self, choices = choices, style = wx.RA_SPECIFY_COLS )
|
||||
|
||||
self._num = wx.SpinCtrl( self, min = -99999999, max = 99999999 )
|
||||
|
||||
#
|
||||
|
||||
self._namespace.SetValue( 'page' )
|
||||
self._sign.SetStringSelection( '>' )
|
||||
self._num.SetValue( 0 )
|
||||
|
||||
#
|
||||
|
||||
hbox = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
hbox.AddF( ClientGUICommon.BetterStaticText( self, 'system:tag as number' ), CC.FLAGS_VCENTER )
|
||||
hbox.AddF( self._namespace, CC.FLAGS_VCENTER )
|
||||
hbox.AddF( self._sign, CC.FLAGS_VCENTER )
|
||||
hbox.AddF( self._num, CC.FLAGS_VCENTER )
|
||||
|
||||
self.SetSizer( hbox )
|
||||
|
||||
wx.CallAfter( self._num.SetFocus )
|
||||
|
||||
|
||||
def GetInfo( self ):
|
||||
|
||||
info = ( self._namespace.GetValue(), self._sign.GetStringSelection(), self._num.GetValue() )
|
||||
|
||||
return info
|
||||
|
||||
|
||||
class PanelPredicateSystemWidth( PanelPredicateSystem ):
|
||||
|
||||
PREDICATE_TYPE = HC.PREDICATE_TYPE_SYSTEM_WIDTH
|
||||
|
|
|
@ -34,6 +34,7 @@ import itertools
|
|||
import os
|
||||
import random
|
||||
import traceback
|
||||
import urlparse
|
||||
import wx
|
||||
|
||||
class ManageAccountTypesPanel( ClientGUIScrolledPanels.ManagePanel ):
|
||||
|
@ -1804,7 +1805,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
self._thread_times_to_check = wx.SpinCtrl( thread_checker, min = 0, max = 65536 )
|
||||
self._thread_times_to_check.SetToolTipString( 'how many times the thread checker will check' )
|
||||
|
||||
self._thread_check_period = ClientGUICommon.TimeDeltaButton( thread_checker, min = 30, hours = True, minutes = True, seconds = True )
|
||||
self._thread_check_period = ClientGUICommon.TimeDeltaButton( thread_checker, min = 30, days = True, hours = True, minutes = True, seconds = True )
|
||||
self._thread_check_period.SetToolTipString( 'how long the checker will wait between checks' )
|
||||
|
||||
#
|
||||
|
@ -2351,6 +2352,13 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._default_gui_session = wx.Choice( self )
|
||||
|
||||
self._default_new_page_goes = ClientGUICommon.BetterChoice( self )
|
||||
|
||||
for value in [ CC.NEW_PAGE_GOES_FAR_LEFT, CC.NEW_PAGE_GOES_LEFT_OF_CURRENT, CC.NEW_PAGE_GOES_RIGHT_OF_CURRENT, CC.NEW_PAGE_GOES_FAR_RIGHT ]:
|
||||
|
||||
self._default_new_page_goes.Append( CC.new_page_goes_string_lookup[ value ], value )
|
||||
|
||||
|
||||
self._confirm_client_exit = wx.CheckBox( self )
|
||||
self._confirm_trash = wx.CheckBox( self )
|
||||
self._confirm_archive = wx.CheckBox( self )
|
||||
|
@ -2394,6 +2402,8 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
try: self._default_gui_session.SetStringSelection( HC.options[ 'default_gui_session' ] )
|
||||
except: self._default_gui_session.SetSelection( 0 )
|
||||
|
||||
self._default_new_page_goes.SelectClientData( self._new_options.GetInteger( 'default_new_page_goes' ) )
|
||||
|
||||
self._confirm_client_exit.SetValue( HC.options[ 'confirm_client_exit' ] )
|
||||
|
||||
self._confirm_trash.SetValue( HC.options[ 'confirm_trash' ] )
|
||||
|
@ -2430,6 +2440,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
rows.append( ( 'Main gui title: ', self._main_gui_title ) )
|
||||
rows.append( ( 'Default session on startup: ', self._default_gui_session ) )
|
||||
rows.append( ( 'By default, new pages: ', self._default_new_page_goes ) )
|
||||
rows.append( ( 'Confirm client exit: ', self._confirm_client_exit ) )
|
||||
rows.append( ( 'Confirm sending files to trash: ', self._confirm_trash ) )
|
||||
rows.append( ( 'Confirm sending more than one file to archive or inbox: ', self._confirm_archive ) )
|
||||
|
@ -2516,6 +2527,8 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._new_options.SetString( 'main_gui_title', title )
|
||||
|
||||
self._new_options.SetInteger( 'default_new_page_goes', self._default_new_page_goes.GetChoice() )
|
||||
|
||||
HG.client_controller.pub( 'main_gui_title', title )
|
||||
|
||||
self._new_options.SetBoolean( 'show_thumbnail_title_banner', self._show_thumbnail_title_banner.GetValue() )
|
||||
|
@ -5414,6 +5427,8 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
def _AddTags( self, tags, only_add = False, only_remove = False, forced_reason = None ):
|
||||
|
||||
tags = HydrusTags.CleanTags( tags )
|
||||
|
||||
if not self._i_am_local_tag_service and self._service.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_OVERRULE ):
|
||||
|
||||
forced_reason = 'admin'
|
||||
|
@ -5887,6 +5902,169 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
|
||||
|
||||
class ManageURLsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
||||
|
||||
def __init__( self, parent, media ):
|
||||
|
||||
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
|
||||
|
||||
self._media = media
|
||||
|
||||
self._urls_listbox = wx.ListBox( self, style = wx.LB_SORT | wx.LB_SINGLE )
|
||||
self._urls_listbox.Bind( wx.EVT_LISTBOX_DCLICK, self.EventListDoubleClick )
|
||||
|
||||
ideal_size = ClientData.ConvertTextToPixels( self._urls_listbox, ( 120, 10 ) )
|
||||
|
||||
self._urls_listbox.SetBestFittingSize( ideal_size )
|
||||
|
||||
self._url_input = wx.TextCtrl( self, style = wx.TE_PROCESS_ENTER )
|
||||
self._url_input.Bind( wx.EVT_CHAR_HOOK, self.EventInputCharHook )
|
||||
|
||||
self._urls_to_add = set()
|
||||
self._urls_to_remove = set()
|
||||
|
||||
#
|
||||
|
||||
locations_manager = self._media.GetLocationsManager()
|
||||
|
||||
self._original_urls = set( locations_manager.GetURLs() )
|
||||
|
||||
for url in self._original_urls:
|
||||
|
||||
self._urls_listbox.Append( url, url )
|
||||
|
||||
|
||||
self._current_urls = set( self._original_urls )
|
||||
|
||||
#
|
||||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
vbox.AddF( self._urls_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
vbox.AddF( self._url_input, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
self.SetSizer( vbox )
|
||||
|
||||
wx.CallAfter( self._url_input.SetFocus )
|
||||
|
||||
|
||||
def _EnterURL( self, url ):
|
||||
|
||||
if url in self._current_urls:
|
||||
|
||||
for index in range( self._urls_listbox.GetCount() ):
|
||||
|
||||
existing_url = self._urls_listbox.GetClientData( index )
|
||||
|
||||
if existing_url == url:
|
||||
|
||||
self._RemoveURL( index )
|
||||
|
||||
return
|
||||
|
||||
|
||||
|
||||
else:
|
||||
|
||||
self._urls_listbox.Append( url, url )
|
||||
|
||||
self._current_urls.add( url )
|
||||
|
||||
if url not in self._original_urls:
|
||||
|
||||
self._urls_to_add.add( url )
|
||||
|
||||
|
||||
|
||||
|
||||
def _RemoveURL( self, index ):
|
||||
|
||||
url = self._urls_listbox.GetClientData( index )
|
||||
|
||||
self._urls_listbox.Delete( index )
|
||||
|
||||
self._current_urls.discard( url )
|
||||
|
||||
self._urls_to_add.discard( url )
|
||||
|
||||
if url in self._original_urls:
|
||||
|
||||
self._urls_to_remove.add( url )
|
||||
|
||||
|
||||
|
||||
def EventListDoubleClick( self, event ):
|
||||
|
||||
selection = self._urls_listbox.GetSelection()
|
||||
|
||||
if selection != wx.NOT_FOUND:
|
||||
|
||||
url = self._urls_listbox.GetClientData( selection )
|
||||
|
||||
self._RemoveURL( selection )
|
||||
|
||||
self._url_input.SetValue( url )
|
||||
|
||||
|
||||
|
||||
def EventInputCharHook( self, event ):
|
||||
|
||||
( modifier, key ) = ClientData.ConvertKeyEventToSimpleTuple( event )
|
||||
|
||||
if key in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ):
|
||||
|
||||
url = self._url_input.GetValue()
|
||||
|
||||
if url == '':
|
||||
|
||||
self.GetParent().DoOK()
|
||||
|
||||
else:
|
||||
|
||||
parse_result = urlparse.urlparse( url )
|
||||
|
||||
if parse_result.scheme == '':
|
||||
|
||||
wx.MessageBox( 'Could not parse that URL! Please make sure you include http:// or https://.' )
|
||||
|
||||
return
|
||||
|
||||
|
||||
self._EnterURL( url )
|
||||
|
||||
self._url_input.SetValue( '' )
|
||||
|
||||
|
||||
else:
|
||||
|
||||
event.Skip()
|
||||
|
||||
|
||||
|
||||
def CommitChanges( self ):
|
||||
|
||||
hash = self._media.GetHash()
|
||||
|
||||
content_updates = []
|
||||
|
||||
if len( self._urls_to_add ) > 0:
|
||||
|
||||
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( hash, self._urls_to_add ) ) )
|
||||
|
||||
|
||||
if len( self._urls_to_remove ) > 0:
|
||||
|
||||
content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_DELETE, ( hash, self._urls_to_remove ) ) )
|
||||
|
||||
|
||||
if len( content_updates ) > 0:
|
||||
|
||||
service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : content_updates }
|
||||
|
||||
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
|
||||
|
||||
|
||||
|
||||
class RepairFileSystemPanel( ClientGUIScrolledPanels.ManagePanel ):
|
||||
|
||||
def __init__( self, parent, missing_locations ):
|
||||
|
|
|
@ -388,6 +388,11 @@ class DialogThatTakesScrollablePanel( DialogThatResizes ):
|
|||
raise NotImplementedError()
|
||||
|
||||
|
||||
def DoOK( self ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def EventChildSizeChanged( self, event ):
|
||||
|
||||
if self._panel is not None:
|
||||
|
@ -418,7 +423,7 @@ class DialogThatTakesScrollablePanel( DialogThatResizes ):
|
|||
|
||||
if command == 'ok':
|
||||
|
||||
self.EventOK( None )
|
||||
self.DoOK()
|
||||
|
||||
else:
|
||||
|
||||
|
@ -429,7 +434,7 @@ class DialogThatTakesScrollablePanel( DialogThatResizes ):
|
|||
|
||||
def EventOK( self, event ):
|
||||
|
||||
raise NotImplementedError()
|
||||
self.DoOK()
|
||||
|
||||
|
||||
def SetPanel( self, panel ):
|
||||
|
@ -477,7 +482,7 @@ class DialogNullipotent( DialogThatTakesScrollablePanelClose ):
|
|||
DialogThatTakesScrollablePanelClose.__init__( self, parent, title )
|
||||
|
||||
|
||||
def EventOK( self, event ):
|
||||
def DoOK( self ):
|
||||
|
||||
SaveTLWSizeAndPosition( self, self._frame_key )
|
||||
|
||||
|
@ -513,7 +518,7 @@ class DialogEdit( DialogThatTakesScrollablePanelApplyCancel ):
|
|||
DialogThatTakesScrollablePanelApplyCancel.__init__( self, parent, title )
|
||||
|
||||
|
||||
def EventOK( self, event ):
|
||||
def DoOK( self ):
|
||||
|
||||
try:
|
||||
|
||||
|
@ -531,7 +536,7 @@ class DialogEdit( DialogThatTakesScrollablePanelApplyCancel ):
|
|||
|
||||
class DialogManage( DialogThatTakesScrollablePanelApplyCancel ):
|
||||
|
||||
def EventOK( self, event ):
|
||||
def DoOK( self ):
|
||||
|
||||
try:
|
||||
|
||||
|
|
|
@ -12,16 +12,14 @@ local_booru_css = FileResource( os.path.join( HC.STATIC_DIR, 'local_booru_style.
|
|||
|
||||
class HydrusResourceBooru( HydrusServerResources.HydrusResource ):
|
||||
|
||||
def _recordDataUsage( self, request ):
|
||||
def _reportDataUsed( self, request, num_bytes ):
|
||||
|
||||
path = request.path[1:] # /account -> account
|
||||
self._service.ReportDataUsed( num_bytes )
|
||||
|
||||
if request.method == 'GET': method = HC.GET
|
||||
else: method = HC.POST
|
||||
|
||||
def _reportRequestUsed( self, request ):
|
||||
|
||||
num_bytes = request.hydrus_request_data_usage
|
||||
|
||||
self._service.RequestMade( num_bytes )
|
||||
self._service.ReportRequestUsed()
|
||||
|
||||
|
||||
def _checkService( self, request ):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import collections
|
||||
import cStringIO
|
||||
import HydrusConstants as HC
|
||||
import HydrusExceptions
|
||||
import HydrusNetwork
|
||||
|
@ -8,13 +9,16 @@ import HydrusSerialisable
|
|||
import errno
|
||||
import httplib
|
||||
import os
|
||||
import random
|
||||
import requests
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
import urllib3
|
||||
from urllib3.exceptions import InsecureRequestWarning
|
||||
import socket
|
||||
import socks
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import urllib
|
||||
import urlparse
|
||||
import yaml
|
||||
|
@ -22,7 +26,7 @@ import HydrusData
|
|||
import itertools
|
||||
import HydrusGlobals as HG
|
||||
|
||||
requests.packages.urllib3.disable_warnings( InsecureRequestWarning )
|
||||
urllib3.disable_warnings( InsecureRequestWarning )
|
||||
|
||||
def AddHydrusCredentialsToHeaders( credentials, request_headers ):
|
||||
|
||||
|
@ -1062,6 +1066,57 @@ class BandwidthManager( HydrusSerialisable.SerialisableBase ):
|
|||
return result
|
||||
|
||||
|
||||
def _GetSerialisableInfo( self ):
|
||||
|
||||
serialisable_global_tracker = self._global_bandwidth_tracker.GetSerialisableTuple()
|
||||
serialisable_global_rules = self._global_bandwidth_rules.GetSerialisableTuple()
|
||||
|
||||
all_serialisable_trackers = [ ( domain, tracker.GetSerialisableTuple() ) for ( domain, tracker ) in self._domains_to_bandwidth_trackers ]
|
||||
all_serialisable_rules = [ ( domain, rules.GetSerialisableTuple() ) for ( domain, rules ) in self._domains_to_bandwidth_rules ]
|
||||
|
||||
return ( serialisable_global_tracker, serialisable_global_rules, all_serialisable_trackers, all_serialisable_rules )
|
||||
|
||||
|
||||
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
|
||||
|
||||
( serialisable_global_tracker, serialisable_global_rules, all_serialisable_trackers, all_serialisable_rules ) = serialisable_info
|
||||
|
||||
self._global_bandwidth_tracker = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_global_tracker )
|
||||
self._global_bandwidth_rules = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_global_rules )
|
||||
|
||||
for ( domain, serialisable_tracker ) in all_serialisable_trackers:
|
||||
|
||||
self._domains_to_bandwidth_trackers[ domain ] = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_tracker )
|
||||
|
||||
|
||||
for ( domain, serialisable_rules ) in all_serialisable_rules:
|
||||
|
||||
self._domains_to_bandwidth_rules[ domain ] = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_rules )
|
||||
|
||||
|
||||
|
||||
def CanStartGlobally( self ):
|
||||
|
||||
return self.CanStartURL( None )
|
||||
|
||||
|
||||
def CanStartURL( self, url ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
for ( bandwidth_tracker, bandwidth_rules ) in self._GetApplicableTrackersAndRules( url ):
|
||||
|
||||
if not bandwidth_rules.CanStart( bandwidth_tracker ):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
def GetEstimateInfo( self, domain = None ):
|
||||
|
||||
with self._lock:
|
||||
|
@ -1086,29 +1141,34 @@ class BandwidthManager( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def OK( self, url = None ):
|
||||
def ReportDataUsedGlobally( self, num_bytes ):
|
||||
|
||||
self.ReportDataUsedURL( None, num_bytes )
|
||||
|
||||
|
||||
def ReportDataUsedURL( self, url, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
for ( bandwidth_tracker, bandwidth_rules ) in self._GetApplicableTrackersAndRules( url ):
|
||||
|
||||
if not bandwidth_rules.OK( bandwidth_tracker ):
|
||||
|
||||
return False
|
||||
|
||||
bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def RequestMade( self, url, num_bytes ):
|
||||
def ReportRequestUsedGlobally( self ):
|
||||
|
||||
self.ReportRequestUsedURL( None )
|
||||
|
||||
|
||||
def ReportRequestUsedURL( self, url ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
for ( bandwidth_tracker, bandwidth_rules ) in self._GetApplicableTrackersAndRules( url ):
|
||||
|
||||
bandwidth_tracker.RequestMade( num_bytes )
|
||||
bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
|
||||
|
||||
|
@ -1132,6 +1192,8 @@ HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIAL
|
|||
|
||||
class NetworkEngine( object ):
|
||||
|
||||
MAX_JOBS = 10 # turn this into an option
|
||||
|
||||
def __init__( self, controller ):
|
||||
|
||||
self._controller = controller
|
||||
|
@ -1140,19 +1202,22 @@ class NetworkEngine( object ):
|
|||
|
||||
self._new_work_to_do = threading.Event()
|
||||
|
||||
self._new_network_jobs = []
|
||||
self._throttled_jobs = []
|
||||
self._jobs_bandwidth_throttled = []
|
||||
self._jobs_login_throttled = []
|
||||
self._current_login_process = None
|
||||
self._jobs_ready_to_start = []
|
||||
self._jobs_downloading = []
|
||||
|
||||
self._local_shutdown = False
|
||||
|
||||
# start main loop
|
||||
self._controller.CallToThread( self.MainLoop )
|
||||
|
||||
|
||||
def AddJob( self, job ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._new_network_jobs.append( job )
|
||||
self._jobs_bandwidth_throttled.append( job )
|
||||
|
||||
|
||||
self._new_work_to_do.set()
|
||||
|
@ -1162,41 +1227,135 @@ class NetworkEngine( object ):
|
|||
|
||||
while not ( self._local_shutdown or self._controller.ModelIsShutdown() ):
|
||||
|
||||
def ProcessBandwidthJob( job ):
|
||||
|
||||
if job.IsDone():
|
||||
|
||||
return False
|
||||
|
||||
elif job.IsAsleep():
|
||||
|
||||
return True
|
||||
|
||||
elif not job.BandwidthOK():
|
||||
|
||||
job.SetStatus( u'waiting on bandwidth\u2026' )
|
||||
|
||||
job.Sleep( 5 )
|
||||
|
||||
return True
|
||||
|
||||
else:
|
||||
|
||||
self._jobs_login_throttled.append( job )
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def ProcessLoginJob( job ):
|
||||
|
||||
if job.IsDone():
|
||||
|
||||
return False
|
||||
|
||||
elif job.IsAsleep():
|
||||
|
||||
return True
|
||||
|
||||
elif job.NeedsLogin():
|
||||
|
||||
if job.CanLogin():
|
||||
|
||||
if self._current_login_process is None:
|
||||
|
||||
login_process = job.GenerateLoginProcess()
|
||||
|
||||
self._controller.CallToThread( login_process.Start )
|
||||
|
||||
self._current_login_process = login_process
|
||||
|
||||
job.SetStatus( u'logging in\u2026' )
|
||||
|
||||
else:
|
||||
|
||||
job.SetStatus( u'waiting on login\u2026' )
|
||||
|
||||
job.Sleep( 5 )
|
||||
|
||||
|
||||
else:
|
||||
|
||||
job.SetStatus( 'unable to login!' )
|
||||
|
||||
job.Sleep( 15 )
|
||||
|
||||
|
||||
return True
|
||||
|
||||
else:
|
||||
|
||||
self._jobs_ready_to_start.append( job )
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def ProcessCurrentLoginJob():
|
||||
|
||||
if self._current_login_process is not None:
|
||||
|
||||
if self._current_login_process.IsDone():
|
||||
|
||||
self._current_login_process = None
|
||||
|
||||
|
||||
|
||||
|
||||
def ProcessReadyJob( job ):
|
||||
|
||||
if job.IsDone():
|
||||
|
||||
return False
|
||||
|
||||
elif len( self._jobs_downloading ) < self.MAX_JOBS:
|
||||
|
||||
self._controller.CallToThread( job.Start )
|
||||
|
||||
self._jobs_downloading.append( job )
|
||||
|
||||
return False
|
||||
|
||||
else:
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def ProcessDownloadingJob( job ):
|
||||
|
||||
if job.IsDone():
|
||||
|
||||
return False
|
||||
|
||||
else:
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._throttled_jobs.extend( self._new_network_jobs )
|
||||
self._jobs_bandwidth_throttled = filter( ProcessBandwidthJob, self._jobs_bandwidth_throttled )
|
||||
|
||||
self._new_network_jobs = []
|
||||
self._jobs_login_throttled = filter( ProcessLoginJob, self._jobs_login_throttled )
|
||||
|
||||
|
||||
#
|
||||
|
||||
ready_to_start = []
|
||||
throttled_jobs = self._throttled_jobs
|
||||
|
||||
self._throttled_jobs = []
|
||||
|
||||
for job in throttled_jobs:
|
||||
ProcessCurrentLoginJob()
|
||||
|
||||
if job.ReadyToWork():
|
||||
|
||||
ready_to_start.append( job )
|
||||
|
||||
elif not job.IsCancelled():
|
||||
|
||||
self._throttled_jobs.append( job )
|
||||
|
||||
self._jobs_ready_to_start = filter( ProcessReadyJob, self._jobs_ready_to_start )
|
||||
|
||||
|
||||
#
|
||||
|
||||
for job in ready_to_start:
|
||||
self._jobs_downloading = filter( ProcessDownloadingJob, self._jobs_downloading )
|
||||
|
||||
self._controller.CallToThread( job.Start )
|
||||
|
||||
|
||||
# have this hold on to jobs until they are done, so the user can look at all the current ones in a review panel somewhere
|
||||
# this also lets us max out the num active connections at an optional value
|
||||
|
||||
self._new_work_to_do.wait( 1 )
|
||||
|
||||
|
@ -1213,27 +1372,58 @@ class NetworkJob( object ):
|
|||
|
||||
def __init__( self, method, url, body = None, referral_url = None, temp_path = None ):
|
||||
|
||||
self._lock = threading.Lock()
|
||||
|
||||
self._method = method
|
||||
self._url = url
|
||||
self._body = body
|
||||
self._referral_url = referral_url
|
||||
self._temp_path = temp_path
|
||||
|
||||
self._response = None
|
||||
self._bandwidth_tracker = HydrusNetworking.BandwidthTracker()
|
||||
|
||||
self._speed_tracker = HydrusNetworking.TransferSpeedTracker()
|
||||
self._wake_time = 0
|
||||
|
||||
self._stream_io = cStringIO.StringIO()
|
||||
|
||||
self._time_ready_to_work = 0
|
||||
self._has_error = False
|
||||
# a way to hold error traceback and a way to fetch it
|
||||
self._error_exception = None
|
||||
self._error_text = None
|
||||
|
||||
self._is_done = False
|
||||
self._is_cancelled = False
|
||||
self._bandwidth_override = False
|
||||
|
||||
self._text = 'initialising'
|
||||
self._value_range = ( None, None )
|
||||
self._status_code = None
|
||||
|
||||
self._lock = threading.Lock()
|
||||
self._status_text = u'initialising\u2026'
|
||||
self._num_bytes_read = 0
|
||||
self._num_bytes_to_read = None
|
||||
|
||||
|
||||
def _BandwidthOK( self ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _CanLogin( self ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _GenerateLoginProcess( self ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _GetSession( self ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _ImmediateBandwidthOK( self ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _IsCancelled( self ):
|
||||
|
@ -1251,180 +1441,433 @@ class NetworkJob( object ):
|
|||
return False
|
||||
|
||||
|
||||
def _GetSession( self ):
|
||||
def _NeedsLogin( self ):
|
||||
|
||||
pass # fetch the regular session from the sessionmanager
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _ReadResponse( self, f = None ):
|
||||
def _ReadResponse( self, response, stream_dest ):
|
||||
|
||||
# get the content-length, if any, to use for range
|
||||
|
||||
bytes_read = 0
|
||||
|
||||
for chunk in self._response.iter_content( chunk_size = 8192 ):
|
||||
if 'content-length' in response.headers:
|
||||
|
||||
if self._IsCancelled():
|
||||
self._num_bytes_to_read = int( response.headers[ 'content-length' ] )
|
||||
|
||||
|
||||
try:
|
||||
|
||||
for chunk in response.iter_content( chunk_size = 8192 ):
|
||||
|
||||
return
|
||||
if self._IsCancelled():
|
||||
|
||||
return
|
||||
|
||||
|
||||
stream_dest.write( chunk )
|
||||
|
||||
chunk_length = len( chunk )
|
||||
|
||||
self._num_bytes_read += chunk_length
|
||||
|
||||
self._ReportDataUsed( chunk_length )
|
||||
self._WaitOnImmediateBandwidth()
|
||||
|
||||
|
||||
if f is not None:
|
||||
finally:
|
||||
|
||||
num_bytes_used = self._num_bytes_read
|
||||
|
||||
if self._body is not None:
|
||||
|
||||
f.write( chunk )
|
||||
num_bytes_used += len( self._body )
|
||||
|
||||
|
||||
chunk_length = len( chunk )
|
||||
|
||||
bytes_read += chunk_length
|
||||
|
||||
self._speed_tracker.DataTransferred( chunk_length )
|
||||
|
||||
# update the status
|
||||
|
||||
|
||||
num_bytes_used = bytes_read
|
||||
|
||||
def _ReportDataUsed( self, num_bytes ):
|
||||
|
||||
if self._body is not None:
|
||||
|
||||
num_bytes_used += len( self._body )
|
||||
|
||||
self._bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
self._ReportBandwidth( num_bytes_used )
|
||||
|
||||
def _ReportRequestUsed( self ):
|
||||
|
||||
self._bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
|
||||
def _SetCancelled( self ):
|
||||
|
||||
self._is_cancelled = True
|
||||
|
||||
self._SetDone()
|
||||
|
||||
|
||||
def _SetError( self, e, error ):
|
||||
|
||||
self._has_error = True
|
||||
self._error_exception = e
|
||||
self._error_text = error
|
||||
|
||||
self._SetDone()
|
||||
|
||||
|
||||
def _SetDone( self ):
|
||||
|
||||
self._is_done = True
|
||||
|
||||
|
||||
def _ReportBandwidth( self, num_bytes ):
|
||||
def _WaitOnImmediateBandwidth( self ):
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
bandwidth_manager.RequestMade( self._url, num_bytes )
|
||||
while not self._ImmediateBandwidthOK() and not self._IsCancelled():
|
||||
|
||||
time.sleep( 0.5 )
|
||||
|
||||
|
||||
|
||||
def BandwidthOverride( self ):
|
||||
def BandwidthOK( self ):
|
||||
|
||||
self._bandwidth_override = True
|
||||
with self._lock:
|
||||
|
||||
if self._bandwidth_override:
|
||||
|
||||
return True
|
||||
|
||||
else:
|
||||
|
||||
return self._BandwidthOK()
|
||||
|
||||
|
||||
|
||||
|
||||
def Cancel( self ):
|
||||
|
||||
self._is_cancelled = True
|
||||
with self._lock:
|
||||
|
||||
self._status_text = 'cancelled!'
|
||||
|
||||
self._SetCancelled()
|
||||
|
||||
|
||||
|
||||
def GenerateLoginProcess( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return self._GenerateLoginProcess()
|
||||
|
||||
|
||||
|
||||
def GetContent( self ):
|
||||
|
||||
return self._response.content
|
||||
with self._lock:
|
||||
|
||||
self._stream_io.seek( 0 )
|
||||
|
||||
return self._stream_io.read()
|
||||
|
||||
|
||||
|
||||
def GetJSON( self ):
|
||||
def GetErrorException( self ):
|
||||
|
||||
return self._response.json
|
||||
with self._lock:
|
||||
|
||||
return self._error_exception
|
||||
|
||||
|
||||
|
||||
def GetErrorText( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return self._error_text
|
||||
|
||||
|
||||
|
||||
def GetStatus( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return ( self._text, self._speed_tracker, self._value_range )
|
||||
return ( self._status_text, self._bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 1 ), self._num_bytes_read, self._num_bytes_to_read )
|
||||
|
||||
|
||||
|
||||
def HasError( self ):
|
||||
|
||||
return self._has_error
|
||||
with self._lock:
|
||||
|
||||
return self._has_error
|
||||
|
||||
|
||||
|
||||
def IsAsleep( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return HydrusData.TimeHasPassed( self._wake_time )
|
||||
|
||||
|
||||
|
||||
def IsCancelled( self ):
|
||||
|
||||
return self._IsCancelled()
|
||||
with self._lock:
|
||||
|
||||
return self._IsCancelled()
|
||||
|
||||
|
||||
|
||||
def IsDone( self ):
|
||||
|
||||
return self._is_done
|
||||
with self._lock:
|
||||
|
||||
return self._is_done
|
||||
|
||||
|
||||
|
||||
def ReadyToWork( self ):
|
||||
|
||||
if not HydrusData.TimeHasPassed( self._time_ready_to_work ):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
if not self._bandwidth_override:
|
||||
|
||||
pass
|
||||
|
||||
# make sure bandwidth domain is ok
|
||||
# report to status if not with how long to expect to wait
|
||||
# set ready to work ahead an appropriate time
|
||||
|
||||
|
||||
# try
|
||||
# make sure login domain is ok
|
||||
# this can do the actual login here. a little delay is fine for this job, and it is good it is here where the engine works serially
|
||||
# except
|
||||
# abandon and set status and set ready to work ahead a bit
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def SetStatus( self, text, value, range ):
|
||||
def NeedsLogin( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._text = text
|
||||
self._value_range = ( value, range )
|
||||
self._NeedsLogin()
|
||||
|
||||
|
||||
|
||||
def OverrideBandwidth( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_override = True
|
||||
|
||||
|
||||
|
||||
def SetStatus( self, text ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._status_text = text
|
||||
|
||||
|
||||
|
||||
def Sleep( self, seconds ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._wake_time = HydrusData.GetNow() + seconds
|
||||
|
||||
|
||||
|
||||
def Start( self ):
|
||||
|
||||
# set status throughout this
|
||||
|
||||
session = self._GetSession()
|
||||
|
||||
headers = {}
|
||||
|
||||
if self._referral_url is not None:
|
||||
try:
|
||||
|
||||
headers = { 'referer' : self._referral_url }
|
||||
|
||||
|
||||
self._response = session.request( self._method, self._url, headers = headers, stream = True )
|
||||
|
||||
# check the response here using requestscheckresponse above
|
||||
|
||||
# if no error:
|
||||
|
||||
# deal with reading errors here gracefully, whatever form they occur in
|
||||
|
||||
if self._temp_path is None:
|
||||
|
||||
self._ReadResponse()
|
||||
|
||||
else:
|
||||
|
||||
with open( self._temp_path, 'rb' ) as f:
|
||||
with self._lock:
|
||||
|
||||
self._ReadResponse( f )
|
||||
self._ReportRequestUsed()
|
||||
|
||||
session = self._GetSession()
|
||||
|
||||
method = self._method
|
||||
url = self._url
|
||||
data = self._body
|
||||
|
||||
headers = {}
|
||||
|
||||
if self._referral_url is not None:
|
||||
|
||||
headers = { 'referer' : self._referral_url }
|
||||
|
||||
|
||||
self._status_text = u'sending request\u2026'
|
||||
|
||||
|
||||
response = session.request( method, url, data = data, headers = headers, stream = True )
|
||||
|
||||
with self._lock:
|
||||
|
||||
if self._body is not None:
|
||||
|
||||
self._ReportDataUsed( len( self._body ) )
|
||||
|
||||
|
||||
self._status_code = response.status_code
|
||||
|
||||
|
||||
if response.ok:
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._status_text = u'downloading\u2026'
|
||||
|
||||
|
||||
if self._temp_path is None:
|
||||
|
||||
self._ReadResponse( response, self._stream_io )
|
||||
|
||||
else:
|
||||
|
||||
with open( self._temp_path, 'rb' ) as f:
|
||||
|
||||
self._ReadResponse( response, f )
|
||||
|
||||
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._status_text = 'done!'
|
||||
|
||||
|
||||
else:
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._status_text = '404 - Not Found' # ConvertStatusCodeIntoEnglish( response.status_code )
|
||||
|
||||
|
||||
self._ReadResponse( response, self._stream_io )
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._stream_io.seek( 0 )
|
||||
|
||||
data = self._stream_io.read()
|
||||
|
||||
( e, error_text ) = ( HydrusExceptions.NotFoundException( 'wew' ), 'Bunch of html that was returned or whatever.' ) # ConvertStatusCodeAndDataIntoExceptionInfo( response.status_code, data )
|
||||
|
||||
self._SetError( e, error_text )
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._status_text = 'unexpected error!'
|
||||
|
||||
trace = traceback.format_exc()
|
||||
|
||||
self._SetError( e, trace )
|
||||
|
||||
|
||||
finally:
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._SetDone()
|
||||
|
||||
|
||||
|
||||
|
||||
class HydrusNetworkJob( NetworkJob ):
|
||||
|
||||
def __init__( self, service_key, method, url, body = None, referral_url = None, temp_path = None ):
|
||||
class NetworkJobWeb( NetworkJob ):
|
||||
|
||||
def _BandwidthOK( self ):
|
||||
|
||||
NetworkJob.__init__( self, method, url, body, referral_url, temp_path )
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
return bandwidth_manager.CanStartURL( self._url )
|
||||
|
||||
|
||||
def _CanLogin( self ):
|
||||
|
||||
# ask login engine if it is possible to login at this time (i.e. if our login details seem to be valid--a bad login will invalidate the form/details until the user can re-verify, hence stopping all bad requests from spamming a changed login form)
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _GenerateLoginProcess( self ):
|
||||
|
||||
pass
|
||||
|
||||
# talk to login engine, figure out an object to handle this that will follow the script and report status back to the network engine
|
||||
|
||||
|
||||
def _GetSession( self ):
|
||||
|
||||
pass # fetch the regular session from the sessionmanager
|
||||
|
||||
|
||||
def _ImmediateBandwidthOK( self ):
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
return bandwidth_manager.CanContinueURL( self._url )
|
||||
|
||||
|
||||
def _NeedsLogin( self ):
|
||||
|
||||
# consult login engine, ask if I need login (it will consult its records and the current session)
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _ReportDataUsed( self, num_bytes ):
|
||||
|
||||
NetworkJob._ReportDataUsed( self, num_bytes )
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
bandwidth_manager.ReportDataUsedURL( self._url, num_bytes )
|
||||
|
||||
|
||||
def _ReportRequestUsed( self ):
|
||||
|
||||
NetworkJob._ReportRequestUsed( self )
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
bandwidth_manager.ReportRequestUsedURL( self._url )
|
||||
|
||||
|
||||
class NetworkJobWebLogin( NetworkJobWeb ):
|
||||
|
||||
def _BandwidthOK( self ):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _ImmediateBandwidthOK( self ):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _NeedsLogin( self ):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class NetworkJobHydrus( NetworkJob ):
|
||||
|
||||
def __init__( self, service_key, method, url, body = None, temp_path = None ):
|
||||
|
||||
NetworkJob.__init__( self, method, url, body, temp_path = temp_path )
|
||||
|
||||
self._service_key = service_key
|
||||
|
||||
|
||||
def _ReportBandwidth( self, num_bytes ):
|
||||
def _BandwidthOK( self ):
|
||||
|
||||
pass # fetch my service, report requestmade
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
if not bandwidth_manager.CanStartGlobally():
|
||||
|
||||
return False
|
||||
|
||||
else:
|
||||
|
||||
service = HG.client_controller.GetServicesManager().GetService( self._service_key )
|
||||
|
||||
return service.BandwidthOK()
|
||||
|
||||
|
||||
|
||||
def _CanLogin( self ):
|
||||
|
||||
# ask service if account is valid
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _GenerateLoginProcess( self ):
|
||||
|
||||
pass
|
||||
|
||||
# talk to service, figure out a login-process compatible object to handle the session gen
|
||||
|
||||
|
||||
def _GetSession( self ):
|
||||
|
@ -1433,3 +1876,62 @@ class HydrusNetworkJob( NetworkJob ):
|
|||
# this will ultimately be a job for the login engine step, earlier
|
||||
|
||||
|
||||
def _ImmediateBandwidthOK( self ):
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
service = HG.client_controller.GetServicesManager().GetService( self._service_key )
|
||||
|
||||
return bandwidth_manager.CanContinueGlobally() and service.CanContinue()
|
||||
|
||||
|
||||
def _NeedsLogin( self ):
|
||||
|
||||
# consult service, ask if I need a session key
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _ReportDataUsed( self, num_bytes ):
|
||||
|
||||
NetworkJob._ReportDataUsed( self, num_bytes )
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
bandwidth_manager.ReportDataUsedGlobally( num_bytes )
|
||||
|
||||
service = HG.client_controller.GetServicesManager().GetService( self._service_key )
|
||||
|
||||
return service.ReportDataUsed( num_bytes )
|
||||
|
||||
|
||||
def _ReportRequestUsed( self ):
|
||||
|
||||
NetworkJob._ReportRequestUsed( self )
|
||||
|
||||
bandwidth_manager = HG.client_controller.GetBandwidthManager()
|
||||
|
||||
bandwidth_manager.ReportRequestUsedGlobally()
|
||||
|
||||
service = HG.client_controller.GetServicesManager().GetService( self._service_key )
|
||||
|
||||
return service.ReportRequestUsed()
|
||||
|
||||
|
||||
class NetworkJobHydrusLogin( NetworkJobHydrus ):
|
||||
|
||||
def _BandwidthOK( self ):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _ImmediateBandwidthOK( self ):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _NeedsLogin( self ):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
@ -452,7 +452,18 @@ class RasterContainerVideo( RasterContainer ):
|
|||
frame = self._frames[ index ]
|
||||
|
||||
|
||||
self.GetReadyForFrame( index + 1 )
|
||||
num_frames = self.GetNumFrames()
|
||||
|
||||
if index == num_frames - 1:
|
||||
|
||||
next_index = 0
|
||||
|
||||
else:
|
||||
|
||||
next_index = index + 1
|
||||
|
||||
|
||||
self.GetReadyForFrame( next_index )
|
||||
|
||||
return frame
|
||||
|
||||
|
@ -467,6 +478,13 @@ class RasterContainerVideo( RasterContainer ):
|
|||
|
||||
num_frames = self.GetNumFrames()
|
||||
|
||||
frame_exists = 0 <= next_index_to_expect and next_index_to_expect <= ( num_frames - 1 )
|
||||
|
||||
if not frame_exists:
|
||||
|
||||
return
|
||||
|
||||
|
||||
if num_frames > self._num_frames_backwards + 1 + self._num_frames_forwards:
|
||||
|
||||
index_out_of_buffer = self._IndexOutOfRange( next_index_to_expect, self._buffer_start_index, self._buffer_end_index )
|
||||
|
|
|
@ -550,6 +550,19 @@ class FileSystemPredicates( object ):
|
|||
elif operator == '>': self._common_info[ 'min_num_tags' ] = num_tags
|
||||
|
||||
|
||||
if predicate_type == HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER:
|
||||
|
||||
( namespace, operator, num ) = value
|
||||
|
||||
if operator == '<': self._common_info[ 'max_tag_as_number' ] = ( namespace, num )
|
||||
elif operator == '>': self._common_info[ 'min_tag_as_number' ] = ( namespace, num )
|
||||
elif operator == u'\u2248':
|
||||
|
||||
self._common_info[ 'min_tag_as_number' ] = ( namespace, int( num * 0.85 ) )
|
||||
self._common_info[ 'max_tag_as_number' ] = ( namespace, int( num * 1.15 ) )
|
||||
|
||||
|
||||
|
||||
if predicate_type == HC.PREDICATE_TYPE_SYSTEM_WIDTH:
|
||||
|
||||
( operator, width ) = value
|
||||
|
@ -1168,6 +1181,41 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
|
|||
base += service.GetName()
|
||||
|
||||
|
||||
elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER:
|
||||
|
||||
if self._value is None:
|
||||
|
||||
base = 'tag as number'
|
||||
|
||||
else:
|
||||
|
||||
( namespace, operator, num ) = self._value
|
||||
|
||||
if namespace == '':
|
||||
|
||||
n_text = 'tag'
|
||||
|
||||
else:
|
||||
|
||||
n_text = namespace
|
||||
|
||||
|
||||
if operator == u'\u2248':
|
||||
|
||||
o_text = ' about '
|
||||
|
||||
elif operator == '<':
|
||||
|
||||
o_text = ' less than '
|
||||
|
||||
elif operator == '>':
|
||||
|
||||
o_text = ' more than '
|
||||
|
||||
|
||||
base = n_text + o_text + HydrusData.ConvertIntToPrettyString( num )
|
||||
|
||||
|
||||
elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS:
|
||||
|
||||
base = 'num duplicate relationships'
|
||||
|
|
|
@ -266,7 +266,7 @@ class ServiceLocalBooru( Service ):
|
|||
|
||||
def _GetFunctionalStatus( self ):
|
||||
|
||||
if not self._bandwidth_rules.OK( self._bandwidth_tracker ):
|
||||
if not self._bandwidth_rules.CanStart( self._bandwidth_tracker ):
|
||||
|
||||
return ( False, 'bandwidth exceeded' )
|
||||
|
||||
|
@ -302,7 +302,7 @@ class ServiceLocalBooru( Service ):
|
|||
|
||||
with self._lock:
|
||||
|
||||
return self._bandwidth_rules.OK( self._bandwidth_tracker )
|
||||
return self._bandwidth_rules.CanStart( self._bandwidth_tracker )
|
||||
|
||||
|
||||
|
||||
|
@ -322,11 +322,19 @@ class ServiceLocalBooru( Service ):
|
|||
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_tracker.RequestMade( num_bytes )
|
||||
self._bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
|
||||
|
||||
|
@ -464,7 +472,7 @@ class ServiceRemote( Service ):
|
|||
return ( False, self._no_requests_reason + ' - next request ' + HydrusData.ConvertTimestampToPrettyPending( self._no_requests_until ) )
|
||||
|
||||
|
||||
if not self._bandwidth_rules.OK( self._bandwidth_tracker ):
|
||||
if not self._bandwidth_rules.CanStart( self._bandwidth_tracker ):
|
||||
|
||||
return ( False, 'bandwidth exceeded' )
|
||||
|
||||
|
@ -496,13 +504,36 @@ class ServiceRemote( Service ):
|
|||
self._bandwidth_rules = dictionary[ 'bandwidth_rules' ]
|
||||
|
||||
|
||||
def _RecordBandwidth( self, method, command, num_bytes ):
|
||||
def _ReportDataUsed( self, num_bytes ):
|
||||
|
||||
self._bandwidth_tracker.RequestMade( num_bytes )
|
||||
self._bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
def _ReportRequestUsed( self ):
|
||||
|
||||
self._bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
def BandwidthOK( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return self._bandwidth_rules.CanStart( self._bandwidth_tracker )
|
||||
|
||||
|
||||
|
||||
def ImmediateBandwidthOK( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return self._bandwidth_rules.CanContinue( self._bandwidth_tracker )
|
||||
|
||||
|
||||
|
||||
def GetBandwidthCurrentMonthSummary( self ):
|
||||
|
||||
with self._lock:
|
||||
|
@ -610,16 +641,22 @@ class ServiceRestricted( ServiceRemote ):
|
|||
self._next_account_sync = dictionary[ 'next_account_sync' ]
|
||||
|
||||
|
||||
def _RecordBandwidth( self, method, command, num_bytes ):
|
||||
def _ReportDataUsed( self, num_bytes ):
|
||||
|
||||
ServiceRemote._RecordBandwidth( self, method, command, num_bytes )
|
||||
ServiceRemote._ReportDataUsed( self, num_bytes )
|
||||
|
||||
if ( method, command ) != ( HC.GET, 'account' ):
|
||||
|
||||
self._account.RequestMade( num_bytes )
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
self._account.ReportDataUsed( num_bytes )
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
def _ReportRequestUsed( self ):
|
||||
|
||||
ServiceRemote._ReportRequestUsed( self )
|
||||
|
||||
self._account.ReportRequestUsed()
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
def GetAccount( self ):
|
||||
|
@ -653,6 +690,22 @@ class ServiceRestricted( ServiceRemote ):
|
|||
return self._account.IsDirty()
|
||||
|
||||
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._ReportDataUsed( num_bytes )
|
||||
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._ReportRequestUsed()
|
||||
|
||||
|
||||
|
||||
def Request( self, method, command, request_args = None, request_headers = None, report_hooks = None, temp_path = None, return_cookies = False, return_data_used = False ):
|
||||
|
||||
if request_args is None: request_args = {}
|
||||
|
@ -661,6 +714,11 @@ class ServiceRestricted( ServiceRemote ):
|
|||
|
||||
try:
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._ReportRequestUsed()
|
||||
|
||||
|
||||
credentials = self.GetCredentials()
|
||||
|
||||
if command in ( 'access_key', '' ):
|
||||
|
@ -729,12 +787,12 @@ class ServiceRestricted( ServiceRemote ):
|
|||
|
||||
elif method == HC.POST:
|
||||
|
||||
data_used = len( body )
|
||||
data_used = len( body ) + size_of_response
|
||||
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._RecordBandwidth( method, command, data_used )
|
||||
self._ReportDataUsed( data_used )
|
||||
|
||||
|
||||
if return_data_used:
|
||||
|
@ -835,8 +893,9 @@ class ServiceRestricted( ServiceRemote ):
|
|||
|
||||
self._account = response[ 'account' ]
|
||||
|
||||
# because the account is one behind! mostly do this just to sync up nicely with the service bandwidth tracker
|
||||
self._account.RequestMade( data_used )
|
||||
# because the account was one behind when it was serialised! mostly do this just to sync up nicely with the service bandwidth tracker
|
||||
self._account.ReportDataUsed( data_used )
|
||||
self._account.ReportRequestUsed()
|
||||
|
||||
if force:
|
||||
|
||||
|
@ -1522,22 +1581,22 @@ class ServiceIPFS( ServiceRemote ):
|
|||
|
||||
def on_wx_select_tree( job_key, url_tree ):
|
||||
|
||||
import ClientGUIDialogs
|
||||
import ClientGUIDialogs
|
||||
|
||||
with ClientGUIDialogs.DialogSelectFromURLTree( None, url_tree ) as dlg:
|
||||
|
||||
with ClientGUIDialogs.DialogSelectFromURLTree( None, url_tree ) as dlg:
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
urls = dlg.GetURLs()
|
||||
|
||||
if len( urls ) > 0:
|
||||
|
||||
urls = dlg.GetURLs()
|
||||
|
||||
if len( urls ) > 0:
|
||||
|
||||
HG.client_controller.CallToThread( ClientDownloading.THREADDownloadURLs, job_key, urls, multihash )
|
||||
|
||||
HG.client_controller.CallToThread( ClientDownloading.THREADDownloadURLs, job_key, urls, multihash )
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def off_wx():
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ options = {}
|
|||
# Misc
|
||||
|
||||
NETWORK_VERSION = 18
|
||||
SOFTWARE_VERSION = 260
|
||||
SOFTWARE_VERSION = 261
|
||||
|
||||
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
|
||||
|
||||
|
@ -596,8 +596,9 @@ PREDICATE_TYPE_SYSTEM_FILE_SERVICE = 23
|
|||
PREDICATE_TYPE_SYSTEM_NUM_PIXELS = 24
|
||||
PREDICATE_TYPE_SYSTEM_DIMENSIONS = 25
|
||||
PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS = 26
|
||||
PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER = 27
|
||||
|
||||
SYSTEM_PREDICATES = [ PREDICATE_TYPE_SYSTEM_EVERYTHING, PREDICATE_TYPE_SYSTEM_INBOX, PREDICATE_TYPE_SYSTEM_ARCHIVE, PREDICATE_TYPE_SYSTEM_UNTAGGED, PREDICATE_TYPE_SYSTEM_NUM_TAGS, PREDICATE_TYPE_SYSTEM_LIMIT, PREDICATE_TYPE_SYSTEM_SIZE, PREDICATE_TYPE_SYSTEM_AGE, PREDICATE_TYPE_SYSTEM_HASH, PREDICATE_TYPE_SYSTEM_WIDTH, PREDICATE_TYPE_SYSTEM_HEIGHT, PREDICATE_TYPE_SYSTEM_RATIO, PREDICATE_TYPE_SYSTEM_DURATION, PREDICATE_TYPE_SYSTEM_MIME, PREDICATE_TYPE_SYSTEM_RATING, PREDICATE_TYPE_SYSTEM_SIMILAR_TO, PREDICATE_TYPE_SYSTEM_LOCAL, PREDICATE_TYPE_SYSTEM_NOT_LOCAL, PREDICATE_TYPE_SYSTEM_NUM_WORDS, PREDICATE_TYPE_SYSTEM_FILE_SERVICE, PREDICATE_TYPE_SYSTEM_NUM_PIXELS, PREDICATE_TYPE_SYSTEM_DIMENSIONS, PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ]
|
||||
SYSTEM_PREDICATES = [ PREDICATE_TYPE_SYSTEM_EVERYTHING, PREDICATE_TYPE_SYSTEM_INBOX, PREDICATE_TYPE_SYSTEM_ARCHIVE, PREDICATE_TYPE_SYSTEM_UNTAGGED, PREDICATE_TYPE_SYSTEM_NUM_TAGS, PREDICATE_TYPE_SYSTEM_LIMIT, PREDICATE_TYPE_SYSTEM_SIZE, PREDICATE_TYPE_SYSTEM_AGE, PREDICATE_TYPE_SYSTEM_HASH, PREDICATE_TYPE_SYSTEM_WIDTH, PREDICATE_TYPE_SYSTEM_HEIGHT, PREDICATE_TYPE_SYSTEM_RATIO, PREDICATE_TYPE_SYSTEM_DURATION, PREDICATE_TYPE_SYSTEM_MIME, PREDICATE_TYPE_SYSTEM_RATING, PREDICATE_TYPE_SYSTEM_SIMILAR_TO, PREDICATE_TYPE_SYSTEM_LOCAL, PREDICATE_TYPE_SYSTEM_NOT_LOCAL, PREDICATE_TYPE_SYSTEM_NUM_WORDS, PREDICATE_TYPE_SYSTEM_FILE_SERVICE, PREDICATE_TYPE_SYSTEM_NUM_PIXELS, PREDICATE_TYPE_SYSTEM_DIMENSIONS, PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER, PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ]
|
||||
|
||||
SITE_TYPE_DEVIANT_ART = 0
|
||||
SITE_TYPE_GIPHY = 1
|
||||
|
|
|
@ -69,7 +69,11 @@ class HydrusController( object ):
|
|||
|
||||
|
||||
|
||||
if len( self._call_to_threads ) < 10:
|
||||
# all the threads in the pool are currently busy
|
||||
|
||||
calling_from_the_thread_pool = threading.current_thread() in self._call_to_threads
|
||||
|
||||
if calling_from_the_thread_pool or len( self._call_to_threads ) < 10:
|
||||
|
||||
call_to_thread = HydrusThreading.THREADCallToThread( self )
|
||||
|
||||
|
@ -136,6 +140,23 @@ class HydrusController( object ):
|
|||
|
||||
def CallToThread( self, callable, *args, **kwargs ):
|
||||
|
||||
if HG.callto_report_mode:
|
||||
|
||||
what_to_report = [ callable ]
|
||||
|
||||
if len( args ) > 0:
|
||||
|
||||
what_to_report.append( args )
|
||||
|
||||
|
||||
if len( kwargs ) > 0:
|
||||
|
||||
what_to_report.append( kwargs )
|
||||
|
||||
|
||||
HydrusData.ShowText( tuple( what_to_report ) )
|
||||
|
||||
|
||||
call_to_thread = self._GetCallToThread()
|
||||
|
||||
call_to_thread.put( callable, *args, **kwargs )
|
||||
|
@ -296,7 +317,12 @@ class HydrusController( object ):
|
|||
return self._Read( action, *args, **kwargs )
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
pass
|
||||
|
||||
|
|
|
@ -227,31 +227,45 @@ def ConvertTimeDeltaToPrettyString( seconds ):
|
|||
|
||||
if seconds is None:
|
||||
|
||||
return 'month'
|
||||
return 'per month'
|
||||
|
||||
|
||||
if seconds > 60:
|
||||
|
||||
seconds = int( seconds )
|
||||
|
||||
if seconds > 86400:
|
||||
if seconds >= 86400:
|
||||
|
||||
days = seconds / 86400
|
||||
hours = ( seconds % 86400 ) / 3600
|
||||
|
||||
result = '%d' % days + ' days'
|
||||
if days == 1:
|
||||
|
||||
result = '1 day'
|
||||
|
||||
else:
|
||||
|
||||
result = '%d' % days + ' days'
|
||||
|
||||
|
||||
if hours > 0:
|
||||
|
||||
result += ' %d' % hours + ' hours'
|
||||
|
||||
|
||||
elif seconds > 3600:
|
||||
elif seconds >= 3600:
|
||||
|
||||
hours = seconds / 3600
|
||||
minutes = ( seconds % 3600 ) / 60
|
||||
|
||||
result = '%d' % hours + ' hours'
|
||||
if hours == 1:
|
||||
|
||||
result = '1 hour'
|
||||
|
||||
else:
|
||||
|
||||
result = '%d' % hours + ' hours'
|
||||
|
||||
|
||||
if minutes > 0:
|
||||
|
||||
|
@ -275,6 +289,10 @@ def ConvertTimeDeltaToPrettyString( seconds ):
|
|||
|
||||
result = '%.1f' % seconds + ' seconds'
|
||||
|
||||
elif seconds == 1:
|
||||
|
||||
result = '1 second'
|
||||
|
||||
elif seconds > 0.1:
|
||||
|
||||
result = '%d' % ( seconds * 1000 ) + ' milliseconds'
|
||||
|
@ -1324,9 +1342,13 @@ class Account( HydrusYAMLBase ):
|
|||
|
||||
def MakeStale( self ): self._info[ 'fresh_timestamp' ] = 0
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
self._info[ 'used_bytes' ] += num_bytes
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
self._info[ 'used_requests' ] += 1
|
||||
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ test_controller = None
|
|||
view_shutdown = False
|
||||
model_shutdown = False
|
||||
|
||||
callto_report_mode = False
|
||||
db_report_mode = False
|
||||
db_profile_mode = False
|
||||
gui_report_mode = False
|
||||
|
|
|
@ -578,11 +578,21 @@ class Account( object ):
|
|||
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_tracker.RequestMade( num_bytes )
|
||||
self._bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
@ -735,7 +745,7 @@ class AccountType( object ):
|
|||
|
||||
def BandwidthOK( self, bandwidth_tracker ):
|
||||
|
||||
return self._bandwidth_rules.OK( bandwidth_tracker )
|
||||
return self._bandwidth_rules.CanStart( bandwidth_tracker )
|
||||
|
||||
|
||||
def HasPermission( self, content_type, permission ):
|
||||
|
@ -2053,11 +2063,21 @@ class ServerService( object ):
|
|||
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_tracker.RequestMade( num_bytes )
|
||||
self._bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
@ -2137,7 +2157,7 @@ class ServerServiceRestricted( ServerService ):
|
|||
|
||||
with self._lock:
|
||||
|
||||
return self._bandwidth_rules.OK( self._bandwidth_tracker )
|
||||
return self._bandwidth_rules.CanStart( self._bandwidth_tracker )
|
||||
|
||||
|
||||
|
||||
|
@ -2283,15 +2303,25 @@ class ServerServiceAdmin( ServerServiceRestricted ):
|
|||
|
||||
with self._lock:
|
||||
|
||||
return self._server_bandwidth_rules.OK( self._server_bandwidth_tracker )
|
||||
return self._server_bandwidth_rules.CanStart( self._server_bandwidth_tracker )
|
||||
|
||||
|
||||
|
||||
def ServerRequestMade( self, num_bytes ):
|
||||
def ServerReportDataUsed( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._server_bandwidth_tracker.RequestMade( num_bytes )
|
||||
self._server_bandwidth_tracker.ReportDataUsed( num_bytes )
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
|
||||
def ServerReportRequestUsed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._server_bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
|
|
@ -105,7 +105,51 @@ class BandwidthRules( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def GetUsageStringsAndGaugeTuples( self, bandwidth_tracker, threshold = 3600 ):
|
||||
def CanContinue( self, bandwidth_tracker, threshold = 60 ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
for ( bandwidth_type, time_delta, max_allowed ) in self._rules:
|
||||
|
||||
# Do not block an ongoing jpg download because the current month is 100.03% used
|
||||
if time_delta is None or time_delta > threshold:
|
||||
|
||||
continue
|
||||
|
||||
|
||||
if bandwidth_tracker.GetUsage( bandwidth_type, time_delta ) > max_allowed:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def CanStart( self, bandwidth_tracker, threshold = 60 ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
for ( bandwidth_type, time_delta, max_allowed ) in self._rules:
|
||||
|
||||
# Do not prohibit a new job from starting just because the current download speed is 210/200KB/s
|
||||
if time_delta is not None and time_delta < threshold:
|
||||
|
||||
continue
|
||||
|
||||
|
||||
if bandwidth_tracker.GetUsage( bandwidth_type, time_delta ) > max_allowed:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def GetUsageStringsAndGaugeTuples( self, bandwidth_tracker, threshold = 600 ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
|
@ -113,7 +157,7 @@ class BandwidthRules( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
for ( bandwidth_type, time_delta, max_allowed ) in self._rules:
|
||||
|
||||
time_is_less_than_threshold = time_delta is not None and time_delta < threshold
|
||||
time_is_less_than_threshold = time_delta is not None and time_delta <= threshold
|
||||
|
||||
if time_is_less_than_threshold or max_allowed == 0:
|
||||
|
||||
|
@ -156,27 +200,6 @@ class BandwidthRules( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def OK( self, bandwidth_tracker ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
for ( bandwidth_type, time_delta, max_allowed ) in self._rules:
|
||||
|
||||
if max_allowed == 0:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
if bandwidth_tracker.GetUsage( bandwidth_type, time_delta ) > max_allowed:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_BANDWIDTH_RULES ] = BandwidthRules
|
||||
|
||||
class BandwidthTracker( HydrusSerialisable.SerialisableBase ):
|
||||
|
@ -254,28 +277,7 @@ class BandwidthTracker( HydrusSerialisable.SerialisableBase ):
|
|||
return month_time
|
||||
|
||||
|
||||
def _GetTimes( self, dt ):
|
||||
|
||||
# collapse each time portion to the latest timestamp it covers
|
||||
|
||||
( year, month, day, hour, minute ) = ( dt.year, dt.month, dt.day, dt.hour, dt.minute )
|
||||
|
||||
month_dt = datetime.datetime( year, month, 1 )
|
||||
day_dt = datetime.datetime( year, month, day )
|
||||
hour_dt = datetime.datetime( year, month, day, hour )
|
||||
minute_dt = datetime.datetime( year, month, day, hour, minute )
|
||||
|
||||
month_time = calendar.timegm( month_dt.timetuple() )
|
||||
day_time = calendar.timegm( day_dt.timetuple() )
|
||||
hour_time = calendar.timegm( hour_dt.timetuple() )
|
||||
minute_time = calendar.timegm( minute_dt.timetuple() )
|
||||
|
||||
second_time = calendar.timegm( dt.timetuple() )
|
||||
|
||||
return ( month_time, day_time, hour_time, minute_time, second_time )
|
||||
|
||||
|
||||
def _GetUsage( self, bandwidth_type, time_delta ):
|
||||
def _GetRawUsage( self, bandwidth_type, time_delta ):
|
||||
|
||||
if time_delta is None:
|
||||
|
||||
|
@ -340,15 +342,73 @@ class BandwidthTracker( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
# we need the 'window' because this tracks backets from the first timestamp and we want to include if since lands anywhere in the bracket
|
||||
# we need the 'window' because this tracks brackets from the first timestamp and we want to include if 'since' lands anywhere in the bracket
|
||||
# e.g. if it is 1200 and we want the past 1,000, we also need the bracket starting at 0, which will include 200-999
|
||||
|
||||
time_delta += window
|
||||
|
||||
since = HydrusData.GetNow() - time_delta
|
||||
|
||||
return sum( ( value for ( key, value ) in counter.items() if key >= since ) )
|
||||
|
||||
|
||||
def _GetTimes( self, dt ):
|
||||
|
||||
# collapse each time portion to the latest timestamp it covers
|
||||
|
||||
( year, month, day, hour, minute ) = ( dt.year, dt.month, dt.day, dt.hour, dt.minute )
|
||||
|
||||
month_dt = datetime.datetime( year, month, 1 )
|
||||
day_dt = datetime.datetime( year, month, day )
|
||||
hour_dt = datetime.datetime( year, month, day, hour )
|
||||
minute_dt = datetime.datetime( year, month, day, hour, minute )
|
||||
|
||||
month_time = calendar.timegm( month_dt.timetuple() )
|
||||
day_time = calendar.timegm( day_dt.timetuple() )
|
||||
hour_time = calendar.timegm( hour_dt.timetuple() )
|
||||
minute_time = calendar.timegm( minute_dt.timetuple() )
|
||||
|
||||
second_time = calendar.timegm( dt.timetuple() )
|
||||
|
||||
return ( month_time, day_time, hour_time, minute_time, second_time )
|
||||
|
||||
|
||||
def _GetUsage( self, bandwidth_type, time_delta ):
|
||||
|
||||
if time_delta is not None and bandwidth_type == HC.BANDWIDTH_TYPE_DATA and time_delta <= 5:
|
||||
|
||||
usage = self._GetWeightedApproximateUsage( bandwidth_type, time_delta )
|
||||
|
||||
else:
|
||||
|
||||
usage = self._GetRawUsage( bandwidth_type, time_delta )
|
||||
|
||||
|
||||
self._MaintainCache()
|
||||
|
||||
return sum( ( value for ( key, value ) in counter.items() if key >= since ) )
|
||||
return usage
|
||||
|
||||
|
||||
def _GetWeightedApproximateUsage( self, bandwidth_type, time_delta ):
|
||||
|
||||
LONG_DELTA = time_delta * 15
|
||||
SHORT_DELTA = time_delta * 3
|
||||
|
||||
SHORT_WEIGHT = 3
|
||||
|
||||
usage_long = self._GetRawUsage( bandwidth_type, LONG_DELTA )
|
||||
usage_short = self._GetRawUsage( bandwidth_type, SHORT_DELTA )
|
||||
|
||||
total_weighted_usage = usage_long + ( usage_short * SHORT_WEIGHT )
|
||||
|
||||
total_weight = LONG_DELTA + ( SHORT_DELTA * SHORT_WEIGHT )
|
||||
|
||||
# since this is in bytes, an int for the final answer is fine and proper
|
||||
usage = int( total_weighted_usage / total_weight )
|
||||
|
||||
# usage per sec would be this / time_delta
|
||||
|
||||
return usage
|
||||
|
||||
|
||||
def _MaintainCache( self ):
|
||||
|
@ -400,11 +460,16 @@ class BandwidthTracker( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
with self._lock:
|
||||
|
||||
if time_delta == 0:
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
return self._GetUsage( bandwidth_type, time_delta )
|
||||
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
|
@ -413,18 +478,35 @@ class BandwidthTracker( HydrusSerialisable.SerialisableBase ):
|
|||
( month_time, day_time, hour_time, minute_time, second_time ) = self._GetTimes( dt )
|
||||
|
||||
self._months_bytes[ month_time ] += num_bytes
|
||||
self._months_requests[ month_time ] += 1
|
||||
|
||||
self._days_bytes[ day_time ] += num_bytes
|
||||
self._days_requests[ day_time ] += 1
|
||||
|
||||
self._hours_bytes[ hour_time ] += num_bytes
|
||||
self._hours_requests[ hour_time ] += 1
|
||||
|
||||
self._minutes_bytes[ minute_time ] += num_bytes
|
||||
self._minutes_requests[ minute_time ] += 1
|
||||
|
||||
self._seconds_bytes[ second_time ] += num_bytes
|
||||
|
||||
self._MaintainCache()
|
||||
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
dt = datetime.datetime.utcnow()
|
||||
|
||||
( month_time, day_time, hour_time, minute_time, second_time ) = self._GetTimes( dt )
|
||||
|
||||
self._months_requests[ month_time ] += 1
|
||||
|
||||
self._days_requests[ day_time ] += 1
|
||||
|
||||
self._hours_requests[ hour_time ] += 1
|
||||
|
||||
self._minutes_requests[ minute_time ] += 1
|
||||
|
||||
self._seconds_requests[ second_time ] += 1
|
||||
|
||||
self._MaintainCache()
|
||||
|
@ -432,83 +514,3 @@ class BandwidthTracker( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_BANDWIDTH_TRACKER ] = BandwidthTracker
|
||||
|
||||
class TransferSpeedTracker( object ):
|
||||
|
||||
CLEAN_PERIOD = 30
|
||||
|
||||
LONG_DELTA = 15
|
||||
SHORT_DELTA = 3
|
||||
|
||||
SHORT_WEIGHT = 3
|
||||
|
||||
def __init__( self ):
|
||||
|
||||
self._lock = threading.Lock()
|
||||
|
||||
self._current_speed = 0
|
||||
|
||||
self._current_speed_timestamp = 0
|
||||
self._current_speed_dirty = False
|
||||
|
||||
self._timestamps_to_amounts = collections.Counter()
|
||||
|
||||
self._next_clean_time = HydrusData.GetNow() + self.CLEAN_PERIOD
|
||||
|
||||
|
||||
def _CleanHistory( self ):
|
||||
|
||||
if HydrusData.TimeHasPassed( self._next_clean_time ):
|
||||
|
||||
now = HydrusData.GetNow()
|
||||
|
||||
invalid_indices = [ timestamp for timestamp in self._timestamps_to_amounts.keys() if timestamp < now - self.LONG_DELTA ]
|
||||
|
||||
for timestamp in invalid_indices:
|
||||
|
||||
del self._timestamps_to_amounts[ timestamp ]
|
||||
|
||||
|
||||
self._next_clean_time = HydrusData.GetNow() + self.CLEAN_PERIOD
|
||||
|
||||
|
||||
|
||||
def DataTransferred( self, num_bytes ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._CleanHistory()
|
||||
|
||||
now = HydrusData.GetNow()
|
||||
|
||||
self._timestamps_to_amounts[ now ] += num_bytes
|
||||
|
||||
self._current_speed_dirty = True
|
||||
|
||||
|
||||
|
||||
def GetCurrentSpeed( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._CleanHistory()
|
||||
|
||||
now = HydrusData.GetNow()
|
||||
|
||||
if self._current_speed_dirty or self._current_speed_timestamp != now:
|
||||
|
||||
total_bytes = ( self._timestamps_to_amounts[ timestamp ] for timestamp in range( now - self.LONG_DELTA, now ) )
|
||||
total_bytes += ( self._timestamps_to_amounts[ timestamp ] * self.SHORT_WEIGHT for timestamp in range( now - self.SHORT_DELTA, now ) )
|
||||
|
||||
total_weight = self.LONG_DELTA + ( self.SHORT_DELTA * self.SHORT_WEIGHT )
|
||||
|
||||
self._current_speed = total_bytes // total_weight # since this is in bytes, an int is fine and proper
|
||||
|
||||
self._current_speed_timestamp = now
|
||||
self._current_speed_dirty = False
|
||||
|
||||
|
||||
return self._current_speed
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ class HydrusRequest( Request ):
|
|||
self.is_hydrus_client = True
|
||||
self.hydrus_args = None
|
||||
self.hydrus_response_context = None
|
||||
self.hydrus_request_data_usage = 0
|
||||
|
||||
|
||||
def finish( self ):
|
||||
|
|
|
@ -384,11 +384,13 @@ class HydrusResource( Resource ):
|
|||
raise HydrusExceptions.ForbiddenException( 'Did not recognise Content-Type header!' )
|
||||
|
||||
|
||||
total_bytes_read = 0
|
||||
|
||||
if mime == HC.APPLICATION_JSON:
|
||||
|
||||
json_string = request.content.read()
|
||||
|
||||
request.hydrus_request_data_usage += len( json_string )
|
||||
total_bytes_read += len( json_string )
|
||||
|
||||
hydrus_args = HydrusNetwork.ParseBodyString( json_string )
|
||||
|
||||
|
@ -404,13 +406,15 @@ class HydrusResource( Resource ):
|
|||
|
||||
f.write( block )
|
||||
|
||||
request.hydrus_request_data_usage += len( block )
|
||||
total_bytes_read += len( block )
|
||||
|
||||
|
||||
|
||||
hydrus_args = ParseFileArguments( temp_path )
|
||||
|
||||
|
||||
self._reportDataUsed( request, total_bytes_read )
|
||||
|
||||
|
||||
request.hydrus_args = hydrus_args
|
||||
|
||||
|
@ -496,9 +500,8 @@ class HydrusResource( Resource ):
|
|||
request.setHeader( 'Content-Length', str( content_length ) )
|
||||
|
||||
|
||||
request.hydrus_request_data_usage += content_length
|
||||
|
||||
self._recordDataUsage( request )
|
||||
self._reportDataUsed( request, content_length )
|
||||
self._reportRequestUsed( request )
|
||||
|
||||
if do_finish:
|
||||
|
||||
|
@ -632,13 +635,18 @@ class HydrusResource( Resource ):
|
|||
return access_key
|
||||
|
||||
|
||||
def _recordDataUsage( self, request ):
|
||||
def _reportDataUsed( self, request, num_bytes ):
|
||||
|
||||
num_bytes = request.hydrus_request_data_usage
|
||||
self._service.ReportDataUsed( num_bytes )
|
||||
|
||||
self._service.RequestMade( num_bytes )
|
||||
HG.controller.ReportDataUsed( num_bytes )
|
||||
|
||||
HG.controller.RequestMade( num_bytes )
|
||||
|
||||
def _reportRequestUsed( self, request ):
|
||||
|
||||
self._service.ReportRequestUsed()
|
||||
|
||||
HG.controller.ReportRequestUsed()
|
||||
|
||||
|
||||
def _threadDoGETJob( self, request ):
|
||||
|
|
|
@ -363,9 +363,14 @@ class Controller( HydrusController.HydrusController ):
|
|||
self.CallToThread( self.ProcessPubSub )
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
self._admin_service.ServerRequestMade( num_bytes )
|
||||
self._admin_service.ServerReportDataUsed( num_bytes )
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
self._admin_service.ServerReportRequestUsed()
|
||||
|
||||
|
||||
def Run( self ):
|
||||
|
|
|
@ -2099,6 +2099,8 @@ class DB( HydrusDB.HydrusDB ):
|
|||
min_weight_permitted = None
|
||||
max_weight_permitted = None
|
||||
|
||||
max_total_weight = None
|
||||
|
||||
petition_pairs = list( tag_ids_to_hash_ids.items() )
|
||||
|
||||
random.shuffle( petition_pairs )
|
||||
|
@ -2116,21 +2118,29 @@ class DB( HydrusDB.HydrusDB ):
|
|||
min_weight_permitted = 1
|
||||
max_weight_permitted = 1
|
||||
|
||||
max_total_weight = 20000
|
||||
|
||||
elif content_weight < 10:
|
||||
|
||||
min_weight_permitted = 2
|
||||
max_weight_permitted = 9
|
||||
|
||||
max_total_weight = 5000
|
||||
|
||||
elif content_weight < 50:
|
||||
|
||||
min_weight_permitted = 10
|
||||
max_weight_permitted = 49
|
||||
|
||||
max_total_weight = 2000
|
||||
|
||||
else:
|
||||
|
||||
min_weight_permitted = 50
|
||||
max_weight_permitted = None
|
||||
|
||||
max_total_weight = 500
|
||||
|
||||
|
||||
else:
|
||||
|
||||
|
@ -2159,7 +2169,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
total_num_petitions += 1
|
||||
total_weight += content_weight
|
||||
|
||||
if total_num_petitions > 20 and total_weight > 1000:
|
||||
if total_num_petitions > 20 and total_weight > 10000:
|
||||
|
||||
break
|
||||
|
||||
|
|
|
@ -160,17 +160,27 @@ class HydrusResourceRestricted( HydrusServerResources.HydrusResource ):
|
|||
return request
|
||||
|
||||
|
||||
def _recordDataUsage( self, request ):
|
||||
def _reportDataUsed( self, request, num_bytes ):
|
||||
|
||||
HydrusServerResources.HydrusResource._recordDataUsage( self, request )
|
||||
|
||||
num_bytes = request.hydrus_request_data_usage
|
||||
HydrusServerResources.HydrusResource._reportDataUsed( self, request, num_bytes )
|
||||
|
||||
account = request.hydrus_account
|
||||
|
||||
if account is not None:
|
||||
|
||||
account.RequestMade( num_bytes )
|
||||
account.ReportDataUsed( num_bytes )
|
||||
|
||||
|
||||
|
||||
def _reportRequestUsed( self, request ):
|
||||
|
||||
HydrusServerResources.HydrusResource._reportRequestUsed( self, request )
|
||||
|
||||
account = request.hydrus_account
|
||||
|
||||
if account is not None:
|
||||
|
||||
account.ReportRequestUsed()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -594,7 +594,7 @@ class TestClientDB( unittest.TestCase ):
|
|||
predicates.append( ClientSearch.Predicate( HC.PREDICATE_TYPE_SYSTEM_EVERYTHING, min_current_count = 1 ) )
|
||||
predicates.append( ClientSearch.Predicate( HC.PREDICATE_TYPE_SYSTEM_INBOX, min_current_count = 1 ) )
|
||||
predicates.append( ClientSearch.Predicate( HC.PREDICATE_TYPE_SYSTEM_ARCHIVE, min_current_count = 0 ) )
|
||||
predicates.extend( [ ClientSearch.Predicate( predicate_type ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_UNTAGGED, HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_SIZE, HC.PREDICATE_TYPE_SYSTEM_AGE, HC.PREDICATE_TYPE_SYSTEM_HASH, HC.PREDICATE_TYPE_SYSTEM_DIMENSIONS, HC.PREDICATE_TYPE_SYSTEM_DURATION, HC.PREDICATE_TYPE_SYSTEM_NUM_WORDS, HC.PREDICATE_TYPE_SYSTEM_MIME, HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ] ] )
|
||||
predicates.extend( [ ClientSearch.Predicate( predicate_type ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_UNTAGGED, HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_SIZE, HC.PREDICATE_TYPE_SYSTEM_AGE, HC.PREDICATE_TYPE_SYSTEM_HASH, HC.PREDICATE_TYPE_SYSTEM_DIMENSIONS, HC.PREDICATE_TYPE_SYSTEM_DURATION, HC.PREDICATE_TYPE_SYSTEM_NUM_WORDS, HC.PREDICATE_TYPE_SYSTEM_MIME, HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO, HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE, HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER, HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS ] ] )
|
||||
|
||||
self.assertEqual( result, predicates )
|
||||
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
import collections
|
||||
import HydrusConstants as HC
|
||||
import os
|
||||
import TestConstants
|
||||
import time
|
||||
import unittest
|
||||
import HydrusData
|
||||
import HydrusGlobals as HG
|
||||
import HydrusNetworking
|
||||
|
||||
class TestBandwidthManagement( unittest.TestCase ):
|
||||
|
||||
def test_bandwidth_tracker( self ):
|
||||
|
||||
bandwidth_tracker = HydrusNetworking.BandwidthTracker()
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 0 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 0 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 1 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 1 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 2 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 2 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 6 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 6 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 3600 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 3600 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, None ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, None ), 0 )
|
||||
self.assertAlmostEquals
|
||||
#
|
||||
|
||||
bandwidth_tracker.ReportDataUsed( 1024 )
|
||||
bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 0 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 0 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 1 ), 170 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 1 ), 1 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 2 ), 85 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 2 ), 1 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 6 ), 1024 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 6 ), 1 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 3600 ), 1024 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 3600 ), 1 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, None ), 1024 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, None ), 1 )
|
||||
|
||||
#
|
||||
|
||||
time.sleep( 5 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 0 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 0 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 1 ), 42 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 1 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 2 ), 85 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 2 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 6 ), 1024 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 6 ), 1 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 3600 ), 1024 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 3600 ), 1 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, None ), 1024 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, None ), 1 )
|
||||
|
||||
#
|
||||
|
||||
bandwidth_tracker.ReportDataUsed( 32 )
|
||||
bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
bandwidth_tracker.ReportDataUsed( 32 )
|
||||
bandwidth_tracker.ReportRequestUsed()
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 0 ), 0 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 0 ), 0 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 1 ), 53 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 1 ), 2 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 2 ), 90 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 2 ), 2 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 6 ), 1088 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 6 ), 3 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, 3600 ), 1088 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, 3600 ), 3 )
|
||||
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_DATA, None ), 1088 )
|
||||
self.assertEqual( bandwidth_tracker.GetUsage( HC.BANDWIDTH_TYPE_REQUESTS, None ), 3 )
|
||||
|
||||
|
13
test.py
13
test.py
|
@ -25,6 +25,7 @@ from include import TestDB
|
|||
from include import TestFunctions
|
||||
from include import TestClientImageHandling
|
||||
from include import TestHydrusNATPunch
|
||||
from include import TestHydrusNetworking
|
||||
from include import TestHydrusSerialisable
|
||||
from include import TestHydrusServer
|
||||
from include import TestHydrusSessions
|
||||
|
@ -262,7 +263,12 @@ class Controller( object ):
|
|||
return self._reads[ name ]
|
||||
|
||||
|
||||
def RequestMade( self, num_bytes ):
|
||||
def ReportDataUsed( self, num_bytes ):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ReportRequestUsed( self ):
|
||||
|
||||
pass
|
||||
|
||||
|
@ -286,7 +292,10 @@ class Controller( object ):
|
|||
suites.append( unittest.TestLoader().loadTestsFromModule( TestHydrusSessions ) )
|
||||
suites.append( unittest.TestLoader().loadTestsFromModule( TestHydrusTags ) )
|
||||
if run_all or only_run == 'db': suites.append( unittest.TestLoader().loadTestsFromModule( TestDB ) )
|
||||
if run_all or only_run == 'downloading': suites.append( unittest.TestLoader().loadTestsFromModule( TestClientDownloading ) )
|
||||
if run_all or only_run == 'downloading':
|
||||
|
||||
suites.append( unittest.TestLoader().loadTestsFromModule( TestClientDownloading ) )
|
||||
suites.append( unittest.TestLoader().loadTestsFromModule( TestHydrusNetworking ) )
|
||||
if run_all or only_run == 'gui':
|
||||
suites.append( unittest.TestLoader().loadTestsFromModule( TestDialogs ) )
|
||||
suites.append( unittest.TestLoader().loadTestsFromModule( TestClientListBoxes ) )
|
||||
|
|
Loading…
Reference in New Issue