Version 110

This commit is contained in:
Hydrus 2014-04-09 15:18:58 -05:00
parent 130da0a904
commit 375611ce4b
19 changed files with 524 additions and 185 deletions

View File

@ -8,6 +8,24 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 110</h3></li>
<ul>
<li>fixed a variable overwriting issue in the subscription daemon error reporting code that was causing error spam</li>
<li>fixed more actual and potential instances of this error elsewhere</li>
<li>fixed a bug in the import file error system for non-local files</li>
<li>fixed url parsing for urls lacking 'http://' or 'https://'</li>
<li>fixed hentai foundry image parsing for two different cases</li>
<li>fixed client gui-initiated db backup to also backup client_updates folder</li>
<li>A/C dropdown now shows current and pending counts separately!</li>
<li>fixed display of uncaught database exceptions</li>
<li>new version of sqlite for windows, might speed some things up a bit</li>
<li>upgraded a bunch of other libraries, look forward to mysterious new bugs!</li>
<li>I _think_ some transparency support is improved</li>
<li>moved messaging forward</li>
<li>dropped processed_mappings experiment</li>
<li>removed a bloated mapping index, hopefully speeding a couple things up</li>
<li>fixed an erroneous error message in hydrus network session manager</li>
</ul>
<li><h3>version 109</h3></li>
<ul>
<li>started processed_mappings table. for now, it mirrors normal mappings table</li>

View File

@ -314,24 +314,42 @@ def CatchExceptionClient( etype, value, tb ):
try:
trace_list = traceback.format_tb( tb )
if etype == HydrusExceptions.DBException:
( text, caller_traceback, db_traceback ) = value
info = {}
info[ 'text' ] = text
info[ 'caller_traceback' ] = caller_traceback
info[ 'db_traceback' ] = db_traceback
message = HC.Message( HC.MESSAGE_TYPE_DB_ERROR, info )
else:
trace_list = traceback.format_tb( tb )
trace = ''.join( trace_list )
message = HC.Message( HC.MESSAGE_TYPE_ERROR, { 'error' : ( etype, value, trace ) } )
trace = ''.join( trace_list )
HC.pubsub.pub( 'message', message )
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_ERROR, { 'error' : ( etype, value, trace ) } ) )
except:
message = 'Encountered an error I could not parse:'
text = 'Encountered an error I could not parse:'
message += os.linesep
text += os.linesep
message += HC.u( ( etype, value, tb ) )
text += HC.u( ( etype, value, tb ) )
try: message += traceback.format_exc()
try: text += traceback.format_exc()
except: pass
HC.ShowText( message )
HC.ShowText( text )
def GenerateCollectByChoices( sort_by_choices ):
@ -759,11 +777,11 @@ def ShowExceptionClient( e ):
if isinstance( e, HydrusExceptions.DBException ):
( caller_traceback, db_traceback ) = e.GetTracebacks()
( text, caller_traceback, db_traceback ) = e.args
info = {}
info[ 'text' ] = HC.u( e )
info[ 'text' ] = text
info[ 'caller_traceback' ] = caller_traceback
info[ 'db_traceback' ] = db_traceback

View File

@ -355,9 +355,9 @@ The database will be locked while the backup occurs, which may lock up your gui
connection.connect()
connection.close()
message = 'Something was already bound to port ' + HC.u( port )
text = 'Something was already bound to port ' + HC.u( port )
wx.CallLater( 1, HC.ShowText, message )
wx.CallLater( 1, HC.ShowText, text )
except:
@ -374,9 +374,9 @@ The database will be locked while the backup occurs, which may lock up your gui
except:
message = 'Tried to bind port ' + HC.u( port ) + ' but it failed'
text = 'Tried to bind port ' + HC.u( port ) + ' but it failed'
wx.CallLater( 1, HC.ShowText, message )
wx.CallLater( 1, HC.ShowText, text )

View File

@ -1430,6 +1430,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
shutil.copytree( HC.CLIENT_FILES_DIR, path + os.path.sep + 'client_files' )
shutil.copytree( HC.CLIENT_THUMBNAILS_DIR, path + os.path.sep + 'client_thumbnails' )
shutil.copytree( HC.CLIENT_UPDATES_DIR, path + os.path.sep + 'client_updates' )
HC.ShowText( 'Backup done!' )
@ -1721,31 +1722,41 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
cache_results.extend( [ ( namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ] )
results = []
ids = set()
if include_current: results += [ ( namespace_id, tag_id, current_count ) for ( namespace_id, tag_id, current_count, pending_count ) in cache_results ]
if include_pending: results += [ ( namespace_id, tag_id, pending_count ) for ( namespace_id, tag_id, current_count, pending_count ) in cache_results ]
current_ids_to_count = collections.Counter()
pending_ids_to_count = collections.Counter()
tags_to_count = collections.Counter()
[ tags_to_count.update( { ( namespace_id, tag_id ) : num_tags } ) for ( namespace_id, tag_id, num_tags ) in results ]
if collapse and not there_was_a_namespace:
for ( namespace_id, tag_id, current_count, pending_count ) in cache_results:
unnamespaced_tag_ids = { tag_id for ( namespace_id, tag_id, num_tags ) in results }
ids.add( ( namespace_id, tag_id ) )
[ tags_to_count.update( { ( 1, tag_id ) : num_tags } ) for ( namespace_id, tag_id, num_tags ) in results if namespace_id != 1 and tag_id in unnamespaced_tag_ids ]
current_ids_to_count[ ( namespace_id, tag_id ) ] += current_count
pending_ids_to_count[ ( namespace_id, tag_id ) ] += pending_count
if namespace_id != 1 and collapse and not there_was_a_namespace:
current_ids_to_count[ ( 1, tag_id ) ] += current_count
pending_ids_to_count[ ( 1, tag_id ) ] += pending_count
tag_info = [ ( self._GetNamespaceTag( c, namespace_id, tag_id ), num_tags ) for ( ( namespace_id, tag_id ), num_tags ) in tags_to_count.items() if num_tags > 0 ]
ids_to_do = set()
tags = { tag for ( tag, num_tags ) in tag_info }
if include_current: ids_to_do.update( ( id for ( id, count ) in current_ids_to_count.items() if count > 0 ) )
if include_pending: ids_to_do.update( ( id for ( id, count ) in pending_ids_to_count.items() if count > 0 ) )
ids_to_tags = { ( namespace_id, tag_id ) : self._GetNamespaceTag( c, namespace_id, tag_id ) for ( namespace_id, tag_id ) in ids_to_do }
tag_info = [ ( ids_to_tags[ id ], current_ids_to_count[ id ], pending_ids_to_count[ id ] ) for id in ids_to_do ]
tags_to_do = { tag for ( tag, current_count, pending_count ) in tag_info }
tag_censorship_manager = HC.app.GetManager( 'tag_censorship' )
filtered_tags = tag_censorship_manager.FilterTags( tag_service_identifier, tags )
filtered_tags = tag_censorship_manager.FilterTags( tag_service_identifier, tags_to_do )
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', tag ), num_tags ) for ( tag, num_tags ) in tag_info if tag in filtered_tags ]
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', tag ), { HC.CURRENT : current_count, HC.PENDING : pending_count } ) for ( tag, current_count, pending_count ) in tag_info if tag in filtered_tags ]
matches = CC.AutocompleteMatchesPredicates( tag_service_identifier, predicates, collapse = collapse )
@ -2066,16 +2077,16 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
predicates = []
if service_type in ( HC.COMBINED_FILE, HC.COMBINED_TAG ): predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ), None ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_HASH ] ] )
if service_type in ( HC.COMBINED_FILE, HC.COMBINED_TAG ): predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ) ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_HASH ] ] )
elif service_type in ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ):
service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_FILES } )
num_everything = service_info[ HC.SERVICE_INFO_NUM_FILES ]
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), num_everything ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), { HC.CURRENT : num_everything } ) )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ), None ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_HASH ] ] )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ) ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_HASH ] ] )
elif service_type in ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ):
@ -2105,21 +2116,21 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
num_archive = num_local - num_inbox
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), num_everything ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), { HC.CURRENT : num_everything } ) )
if num_inbox > 0:
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_INBOX, None ), num_inbox ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_ARCHIVE, None ), num_archive ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_INBOX, None ), { HC.CURRENT : num_inbox } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_ARCHIVE, None ), { HC.CURRENT : num_archive } ) )
if service_type == HC.FILE_REPOSITORY:
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_LOCAL, None ), num_local ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_NOT_LOCAL, None ), num_not_local ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_LOCAL, None ), { HC.CURRENT : num_local } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_NOT_LOCAL, None ), { HC.CURRENT : num_not_local } ) )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ), None ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_SIZE, HC.SYSTEM_PREDICATE_TYPE_AGE, HC.SYSTEM_PREDICATE_TYPE_HASH, HC.SYSTEM_PREDICATE_TYPE_WIDTH, HC.SYSTEM_PREDICATE_TYPE_HEIGHT, HC.SYSTEM_PREDICATE_TYPE_RATIO, HC.SYSTEM_PREDICATE_TYPE_DURATION, HC.SYSTEM_PREDICATE_TYPE_NUM_WORDS, HC.SYSTEM_PREDICATE_TYPE_MIME, HC.SYSTEM_PREDICATE_TYPE_RATING, HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, HC.SYSTEM_PREDICATE_TYPE_FILE_SERVICE ] ] )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ) ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_SIZE, HC.SYSTEM_PREDICATE_TYPE_AGE, HC.SYSTEM_PREDICATE_TYPE_HASH, HC.SYSTEM_PREDICATE_TYPE_WIDTH, HC.SYSTEM_PREDICATE_TYPE_HEIGHT, HC.SYSTEM_PREDICATE_TYPE_RATIO, HC.SYSTEM_PREDICATE_TYPE_DURATION, HC.SYSTEM_PREDICATE_TYPE_NUM_WORDS, HC.SYSTEM_PREDICATE_TYPE_MIME, HC.SYSTEM_PREDICATE_TYPE_RATING, HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, HC.SYSTEM_PREDICATE_TYPE_FILE_SERVICE ] ] )
return predicates
@ -3341,7 +3352,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
for ( namespace_id, tag_id, hash_ids ) in advanced_mappings_ids:
c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, namespace_id, tag_id ) )
c.execute( 'DELETE FROM processed_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, namespace_id, tag_id ) )
c.execute( 'DELETE FROM service_info WHERE service_id = ?;', ( service_id, ) )
@ -3700,7 +3710,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence DESC;' ) ]
c.execute( 'DELETE FROM mappings WHERE service_id = ?;', ( self._combined_tag_service_id, ) )
c.execute( 'DELETE FROM processed_mappings WHERE service_id = ?;', ( self._combined_tag_service_id, ) )
first_round = True
@ -3719,8 +3728,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
first_round = False
c.execute( 'INSERT INTO processed_mappings SELECT * FROM mappings WHERE service_id = ?;', ( self._combined_tag_service_id, ) )
c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = ?;', ( self._combined_tag_service_id, ) )
file_service_identifiers = self._GetServiceIdentifiers( c, ( HC.FILE_REPOSITORY, HC.LOCAL_FILE, HC.COMBINED_FILE ) )
@ -3802,7 +3809,13 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if dump_type == YAML_DUMP_ID_SUBSCRIPTION: data[ 'advanced_tag_options' ] = data[ 'advanced_tag_options' ].items()
c.execute( 'INSERT INTO yaml_dumps ( dump_type, dump_name, dump ) VALUES ( ?, ?, ? );', ( dump_type, dump_name, data ) )
try: c.execute( 'INSERT INTO yaml_dumps ( dump_type, dump_name, dump ) VALUES ( ?, ?, ? );', ( dump_type, dump_name, data ) )
except:
print( ( dump_type, dump_name, data ) )
raise
def _UpdateAutocompleteTagCacheFromFiles( self, c, file_service_id, hash_ids, direction ):
@ -3842,12 +3855,10 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
deletable_hash_ids = existing_hash_ids.intersection( appropriate_hash_ids )
c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( deletable_hash_ids ) + ' AND status = ?;', ( service_id, namespace_id, tag_id, old_status ) )
c.execute( 'DELETE FROM processed_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( deletable_hash_ids ) + ' AND status = ?;', ( service_id, namespace_id, tag_id, old_status ) )
num_old_deleted = self._GetRowCount( c )
c.execute( 'UPDATE mappings SET status = ? WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( appropriate_hash_ids ) + ' AND status = ?;', ( new_status, service_id, namespace_id, tag_id, old_status ) )
c.execute( 'UPDATE processed_mappings SET status = ? WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( appropriate_hash_ids ) + ' AND status = ?;', ( new_status, service_id, namespace_id, tag_id, old_status ) )
num_old_made_new = self._GetRowCount( c )
@ -3916,7 +3927,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
deletable_hash_ids.update( search_hash_ids )
c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( deletable_hash_ids ) + ';', ( self._combined_tag_service_id, namespace_id, tag_id ) )
c.execute( 'DELETE FROM processed_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( deletable_hash_ids ) + ';', ( self._combined_tag_service_id, namespace_id, tag_id ) )
UpdateAutocompleteTagCacheFromCombinedCurrentTags( namespace_id, tag_id, deletable_hash_ids, -1 )
@ -3929,7 +3939,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
deletable_hash_ids = set( hash_ids ).difference( existing_other_precedence_hash_ids )
c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( deletable_hash_ids ) + ' AND status = ?;', ( self._combined_tag_service_id, namespace_id, tag_id, HC.PENDING ) )
c.execute( 'DELETE FROM processed_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( deletable_hash_ids ) + ' AND status = ?;', ( self._combined_tag_service_id, namespace_id, tag_id, HC.PENDING ) )
UpdateAutocompleteTagCacheFromCombinedPendingTags( namespace_id, tag_id, deletable_hash_ids, -1 )
@ -3943,7 +3952,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
new_hash_ids = set( hash_ids ).difference( arguing_higher_precedence_hash_ids ).difference( existing_combined_hash_ids )
c.executemany( 'INSERT OR IGNORE INTO mappings VALUES ( ?, ?, ?, ?, ? );', [ ( self._combined_tag_service_id, namespace_id, tag_id, hash_id, HC.CURRENT ) for hash_id in new_hash_ids ] )
c.executemany( 'INSERT OR IGNORE INTO processed_mappings VALUES ( ?, ?, ?, ?, ? );', [ ( self._combined_tag_service_id, namespace_id, tag_id, hash_id, HC.CURRENT ) for hash_id in new_hash_ids ] )
UpdateAutocompleteTagCacheFromCombinedCurrentTags( namespace_id, tag_id, new_hash_ids, 1 )
@ -3955,7 +3963,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
new_hash_ids = set( hash_ids ).difference( existing_combined_hash_ids )
c.executemany( 'INSERT OR IGNORE INTO mappings VALUES ( ?, ?, ?, ?, ? );', [ ( self._combined_tag_service_id, namespace_id, tag_id, hash_id, HC.PENDING ) for hash_id in new_hash_ids ] )
c.executemany( 'INSERT OR IGNORE INTO processed_mappings VALUES ( ?, ?, ?, ?, ? );', [ ( self._combined_tag_service_id, namespace_id, tag_id, hash_id, HC.PENDING ) for hash_id in new_hash_ids ] )
UpdateAutocompleteTagCacheFromCombinedPendingTags( namespace_id, tag_id, new_hash_ids, 1 )
@ -3963,7 +3970,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
def DeletePending( namespace_id, tag_id, hash_ids ):
c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' AND status = ?;', ( service_id, namespace_id, tag_id, HC.PENDING ) )
c.execute( 'DELETE FROM processed_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' AND status = ?;', ( service_id, namespace_id, tag_id, HC.PENDING ) )
num_deleted = self._GetRowCount( c )
@ -3991,7 +3997,6 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
new_hash_ids = set( hash_ids ).difference( existing_hash_ids )
c.executemany( 'INSERT OR IGNORE INTO mappings VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id, status ) for hash_id in new_hash_ids ] )
c.executemany( 'INSERT OR IGNORE INTO processed_mappings VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id, status ) for hash_id in new_hash_ids ] )
num_rows_added = self._GetRowCount( c )
@ -4445,9 +4450,9 @@ class DB( ServiceDB ):
except:
message = 'Database commit error:' + os.linesep + traceback.format_exc()
text = 'Database commit error:' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
c.execute( 'ROLLBACK' )
@ -4604,7 +4609,6 @@ class DB( ServiceDB ):
c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' )
c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' )
c.execute( 'CREATE INDEX mappings_service_id_status_index ON mappings ( service_id, status );' )
c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
c.execute( 'CREATE TABLE mapping_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, reason_id ) );' )
c.execute( 'CREATE INDEX mapping_petitions_hash_id_index ON mapping_petitions ( hash_id );' )
@ -4646,13 +4650,6 @@ class DB( ServiceDB ):
c.execute( 'CREATE TABLE perceptual_hashes ( hash_id INTEGER PRIMARY KEY, phash BLOB_BYTES );' )
c.execute( 'CREATE TABLE processed_mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, status INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, status ) );' )
c.execute( 'CREATE INDEX processed_mappings_hash_id_index ON processed_mappings ( hash_id );' )
c.execute( 'CREATE INDEX processed_mappings_service_id_tag_id_index ON processed_mappings ( service_id, tag_id );' )
c.execute( 'CREATE INDEX processed_mappings_service_id_hash_id_index ON processed_mappings ( service_id, hash_id );' )
c.execute( 'CREATE INDEX processed_mappings_service_id_status_index ON processed_mappings ( service_id, status );' )
c.execute( 'CREATE INDEX processed_mappings_status_index ON processed_mappings ( status );' )
c.execute( 'CREATE TABLE ratings_filter ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, min REAL, max REAL, PRIMARY KEY( service_id, hash_id ) );' )
c.execute( 'CREATE TABLE reasons ( reason_id INTEGER PRIMARY KEY, reason TEXT );' )
@ -4926,6 +4923,15 @@ class DB( ServiceDB ):
if version == 109:
c.execute( 'DELETE FROM yaml_dumps WHERE dump_type = ?;', ( YAML_DUMP_ID_GUI_SESSION, ) )
c.execute( 'DROP TABLE processed_mappings;' )
c.execute( 'DROP INDEX mappings_status_index;' )
c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
HC.is_db_updated = True
@ -7029,6 +7035,7 @@ class DB( ServiceDB ):
shutil.copytree( path + os.path.sep + 'client_files', HC.CLIENT_FILES_DIR )
shutil.copytree( path + os.path.sep + 'client_thumbnails', HC.CLIENT_THUMBNAILS_DIR )
shutil.copytree( path + os.path.sep + 'client_updates', HC.CLIENT_UPDATES_DIR )
def Shutdown( self ): self._local_shutdown = True
@ -7232,9 +7239,9 @@ def DAEMONCheckImportFolders():
details[ 'failed_imported_paths' ].add( path )
message = 'Import folder failed to import ' + path + ':' + os.linesep + traceback.format_exc()
text = 'Import folder failed to import ' + path + ':' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
should_action = False
@ -7313,9 +7320,9 @@ def DAEMONDownloadFiles():
except:
message = 'Error downloading file:' + os.linesep + traceback.format_exc()
text = 'Error downloading file:' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
@ -7412,15 +7419,15 @@ def DAEMONResizeThumbnails():
except IOError as e:
message = 'Thumbnail rendering error:' + os.linesep + traceback.format_exc()
text = 'Thumbnail read error:' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
except Exception as e:
message = 'Thumbnail rendering error:' + os.linesep + traceback.format_exc()
text = 'Thumbnail rendering error:' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
if i % 10 == 0: time.sleep( 2 )
@ -7470,9 +7477,9 @@ def DAEMONSynchroniseAccounts():
name = service_identifier.GetName()
message = 'Failed to refresh account for ' + name + ':' + os.linesep + os.linesep + HC.u( e )
text = 'Failed to refresh account for ' + name + ':' + os.linesep + os.linesep + HC.u( e )
HC.ShowText( message )
HC.ShowText( text )
@ -7588,9 +7595,9 @@ def DAEMONSynchroniseMessages():
except Exception as e:
message = 'Failed to check ' + name + ':' + os.linesep + os.linesep + traceback.format_exc()
text = 'Failed to check ' + name + ':' + os.linesep + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
@ -7638,9 +7645,9 @@ def DAEMONSynchroniseMessages():
except:
message = 'Sending a message failed: ' + os.linesep + traceback.format_exc()
text = 'Sending a message failed: ' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
status = 'failed'
@ -7897,9 +7904,9 @@ def DAEMONSynchroniseRepositories():
message.Close()
message = 'Failed to update ' + name + ':' + os.linesep + os.linesep + HC.u( e )
text = 'Failed to update ' + name + ':' + os.linesep + os.linesep + HC.u( e )
HC.ShowText( message )
HC.ShowText( text )
time.sleep( 3 )
@ -8145,9 +8152,9 @@ def DAEMONSynchroniseSubscriptions():
except Exception as e:
message = 'While trying to execute a subscription, the url ' + url + ' caused this problem:' + os.linesep + traceback.format_exc()
text = 'While trying to execute a subscription, the url ' + url + ' caused this problem:' + os.linesep + traceback.format_exc()
HC.ShowText( message )
HC.ShowText( text )
i += 1

View File

@ -1105,7 +1105,10 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
new_page.SetSearchFocus()
except: pass
except Exception as e:
HC.ShowException( e )
if HC.PLATFORM_OSX: self._ClosePage( 0 )
@ -1951,7 +1954,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def NewSimilarTo( self, file_service_identifier, hash ): self._NewPageQuery( file_service_identifier, initial_predicates = [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, ( hash, 5 ) ), None ) ] )
def NewSimilarTo( self, file_service_identifier, hash ): self._NewPageQuery( file_service_identifier, initial_predicates = [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, ( hash, 5 ) ) ) ] )
def NotifyNewOptions( self ):

View File

@ -659,28 +659,38 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
else: tags_managers.append( m.GetTagsManager() )
lists_of_tags = []
lists_of_current_tags = [ list( tags_manager.GetCurrent( self._tag_service_identifier ) ) for tags_manager in tags_managers ]
lists_of_pending_tags = [ list( tags_manager.GetPending( self._tag_service_identifier ) ) for tags_manager in tags_managers ]
if self._include_current: lists_of_tags += [ list( tags_manager.GetCurrent( self._tag_service_identifier ) ) for tags_manager in tags_managers ]
if self._include_pending: lists_of_tags += [ list( tags_manager.GetPending( self._tag_service_identifier ) ) for tags_manager in tags_managers ]
current_tags_flat_iterable = itertools.chain.from_iterable( lists_of_current_tags )
pending_tags_flat_iterable = itertools.chain.from_iterable( lists_of_pending_tags )
all_tags_flat_iterable = itertools.chain.from_iterable( lists_of_tags )
current_tags_flat = [ tag for tag in current_tags_flat_iterable if HC.SearchEntryMatchesTag( half_complete_tag, tag ) ]
pending_tags_flat = [ tag for tag in pending_tags_flat_iterable if HC.SearchEntryMatchesTag( half_complete_tag, tag ) ]
all_tags_flat = [ tag for tag in all_tags_flat_iterable if HC.SearchEntryMatchesTag( half_complete_tag, tag ) ]
if self._current_namespace != '':
current_tags_flat = [ tag for tag in current_tags_flat if tag.startswith( self._current_namespace + ':' ) ]
pending_tags_flat = [ tag for tag in pending_tags_flat if tag.startswith( self._current_namespace + ':' ) ]
if self._current_namespace != '': all_tags_flat = [ tag for tag in all_tags_flat if tag.startswith( self._current_namespace + ':' ) ]
current_tags_to_count = collections.Counter( current_tags_flat )
pending_tags_to_count = collections.Counter( pending_tags_flat )
tags_to_count = collections.Counter( all_tags_flat )
tags_to_do = set()
results = CC.AutocompleteMatchesPredicates( self._tag_service_identifier, [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, tag ), count ) for ( tag, count ) in tags_to_count.items() ] )
if self._include_current: tags_to_do.update( current_tags_to_count.keys() )
if self._include_pending: tags_to_do.update( pending_tags_to_count.keys() )
results = CC.AutocompleteMatchesPredicates( self._tag_service_identifier, [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, tag ), { HC.CURRENT : current_tags_to_count[ tag ], HC.PENDING : pending_tags_to_count[ tag ] } ) for tag in tags_to_do ] )
matches = results.GetMatches( half_complete_tag )
if self._current_namespace != '': matches.insert( 0, HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( operator, namespace ), None ) )
if self._current_namespace != '': matches.insert( 0, HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( operator, namespace ) ) )
entry_predicate = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, search_text ), None )
entry_predicate = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, search_text ) )
try:
@ -835,13 +845,13 @@ class AutoCompleteDropdownTagsWrite( AutoCompleteDropdownTags ):
top_predicates = []
top_predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', search_text ), 0 ) )
top_predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', search_text ) ) )
siblings_manager = HC.app.GetManager( 'tag_siblings' )
sibling = siblings_manager.GetSibling( search_text )
if sibling is not None: top_predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', sibling ), 0 ) )
if sibling is not None: top_predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', sibling ) ) )
for predicate in top_predicates:
@ -874,7 +884,7 @@ class AutoCompleteDropdownTagsWrite( AutoCompleteDropdownTags ):
raw_parents = parents_manager.GetParents( self._tag_service_identifier, tag )
parents = [ HC.Predicate( HC.PREDICATE_TYPE_PARENT, raw_parent, None ) for raw_parent in raw_parents ]
parents = [ HC.Predicate( HC.PREDICATE_TYPE_PARENT, raw_parent ) for raw_parent in raw_parents ]
@ -4329,7 +4339,7 @@ class TagsBoxCPP( TagsBoxCounts ):
def _Activate( self, s, term ):
predicate = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', term ), None )
predicate = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', term ) )
HC.pubsub.pub( 'add_predicate', self._page_key, predicate )

View File

@ -1949,7 +1949,7 @@ class DialogInputFileSystemPredicate( Dialog ):
elif self._type == HC.SYSTEM_PREDICATE_TYPE_FILE_SERVICE: info = ( self._sign.GetClientData( self._sign.GetSelection() ), self._current_pending.GetClientData( self._current_pending.GetSelection() ), self._file_service_identifier.GetClientData( self._file_service_identifier.GetSelection() ) )
self._predicate = HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( self._type, info ), None )
self._predicate = HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( self._type, info ) )
self.EndModal( wx.ID_OK )
@ -2381,9 +2381,9 @@ class DialogInputLocalFiles( Dialog ):
except:
message = 'Tried to read a key, but did not understand it.'
text = 'Tried to read a key, but did not understand it.'
HC.ShowText( message )
HC.ShowText( text )

View File

@ -2102,7 +2102,7 @@ class ManagementPanelQuery( ManagementPanel ):
else: return
elif system_predicate_type == HC.SYSTEM_PREDICATE_TYPE_UNTAGGED: predicate = HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, ( '=', 0 ) ), None )
elif system_predicate_type == HC.SYSTEM_PREDICATE_TYPE_UNTAGGED: predicate = HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, ( '=', 0 ) ) )
if self._current_predicates_box.HasPredicate( predicate ): self._current_predicates_box.RemovePredicate( predicate )

View File

@ -1283,9 +1283,9 @@ class DraftPanel( wx.Panel ):
except:
message = 'The hydrus client could not connect to your message depot, so the message could not be sent!'
text = 'The hydrus client could not connect to your message depot, so the message could not be sent!'
HC.ShowText( message )
HC.ShowText( text )
return
@ -1422,4 +1422,64 @@ class MessagePanel( wx.Panel ):
self.GetParent().FitInside()
# here starts the message reboot code
class IMFrame( ClientGUICommon.Frame ):
def __init__( self, parent, me_account, them_account, context ):
def InitialiseControls():
self._me_label = MeLabel( self, me_account ) # maybe these two should be the same, and infer me/them status itself
self._them_label = ThemLabel( self, them_account )
self._convo_box = ConvoBox( self, context_key ) # something like this
self._text_input = ConvoTextInput( self, callable ) # callable should be private method of this, or similar!
def PopulateControls():
# could introduce last convo here, or whatever.
pass
def ArrangeControls():
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.AddF( self._me_label, FLAGS_MIXED )
hbox.AddF( wx.StaticText( self, label = ' talking to ' ), FLAGS_MIXED )
hbox.AddF( self._them_label, FLAGS_MIXED )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.AddF( self._convo_box, FLAGS_EXPAND_BOTH_WAYS )
vbox.AddF( self._text_input, FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
self.SetInitialSize( ( 400, 600 ) ) # this should be remembered, stuck in options
me_name = me_account.GetNameBlah()
them_name = them_account.GetNameBlah()
ClientGUICommon.Frame.__init__( self, parent, title = me_name + ' talking to ' + them_name )
InitialiseControls()
PopulateControls()
ArrangeControls()
self.Show( True )
def TextInputCallable( self, text ):
pass
# send it to the context, which will report it

View File

@ -48,7 +48,7 @@ TEMP_DIR = BASE_DIR + os.path.sep + 'temp'
# Misc
NETWORK_VERSION = 13
SOFTWARE_VERSION = 109
SOFTWARE_VERSION = 110
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -1773,13 +1773,13 @@ class JobDatabase():
if isinstance( self._result, HydrusExceptions.DBException ):
( gumpf, db_traceback ) = self._result.GetTracebacks()
( text, gumpf, db_traceback ) = self._result.args
trace_list = traceback.format_stack()
caller_traceback = 'Stack Trace (most recent call last):' + os.linesep + os.linesep + os.linesep.join( trace_list )
raise HydrusExceptions.DBException( u( self._result ), caller_traceback, db_traceback )
raise HydrusExceptions.DBException( text, caller_traceback, db_traceback )
else: raise self._result
@ -1977,11 +1977,12 @@ class Predicate( HydrusYAMLBase ):
yaml_tag = u'!Predicate'
def __init__( self, predicate_type, value, count ):
def __init__( self, predicate_type, value, counts = {} ):
self._predicate_type = predicate_type
self._value = value
self._count = count
self._counts = collections.Counter()
self._counts.update( counts )
def __eq__( self, other ): return self.__hash__() == other.__hash__()
@ -1990,15 +1991,19 @@ class Predicate( HydrusYAMLBase ):
def __ne__( self, other ): return self.__hash__() != other.__hash__()
def __repr__( self ): return 'Predicate: ' + u( ( self._predicate_type, self._value, self._count ) )
def __repr__( self ): return 'Predicate: ' + u( ( self._predicate_type, self._value, self._counts ) )
def AddToCount( self, count ): self._count += count
def AddToCount( self, type, count ): self._counts[ type ] += count
def GetCopy( self ): return Predicate( self._predicate_type, self._value, self._count )
def GetCopy( self ): return Predicate( self._predicate_type, self._value, self._counts )
def GetCountlessCopy( self ): return Predicate( self._predicate_type, self._value, None )
def GetCount( self ): return self._count
def GetCount( self, type = None ):
if type is None: return sum( self._counts.values() )
else: return self._counts[ type ]
def GetInfo( self ): return ( self._predicate_type, self._value )
@ -2006,6 +2011,16 @@ class Predicate( HydrusYAMLBase ):
def GetUnicode( self, with_count = True ):
count_text = u''
if with_count:
count_text
if self._counts[ CURRENT ] > 0: count_text += u' (' + ConvertIntToPrettyString( self._counts[ CURRENT ] ) + u')'
if self._counts[ PENDING ] > 0: count_text += u' (+' + ConvertIntToPrettyString( self._counts[ PENDING ] ) + u')'
if self._predicate_type == PREDICATE_TYPE_SYSTEM:
( system_predicate_type, info ) = self._value
@ -2139,7 +2154,7 @@ class Predicate( HydrusYAMLBase ):
if with_count and self._count is not None: base += u' (' + ConvertIntToPrettyString( self._count ) + u')'
base += count_text
elif self._predicate_type == PREDICATE_TYPE_TAG:
@ -2150,7 +2165,7 @@ class Predicate( HydrusYAMLBase ):
base += tag
if self._count is not None: base += u' (' + ConvertIntToPrettyString( self._count ) + u')'
base += count_text
siblings_manager = app.GetManager( 'tag_siblings' )
@ -2166,7 +2181,7 @@ class Predicate( HydrusYAMLBase ):
base += tag
if with_count and self._count is not None: base += u' (' + ConvertIntToPrettyString( self._count ) + u')'
base += count_text
elif self._predicate_type == PREDICATE_TYPE_NAMESPACE:

View File

@ -576,14 +576,16 @@ class DownloaderHentaiFoundry( Downloader ):
# the .jpg bit is what we really need, but whatever
try:
index = html.index( 'http://pictures.hentai-foundry.com//' )
index = html.index( 'pictures.hentai-foundry.com' )
stuff = html[ index : index + 100 ]
image_url = html[ index : index + 256 ]
try: ( image_url, gumpf ) = stuff.split( '"', 1 )
except: ( image_url, gumpf ) = stuff.split( '&#039;', 1 )
if '"' in image_url: ( image_url, gumpf ) = image_url.split( '"', 1 )
if '&#039;' in image_url: ( image_url, gumpf ) = image_url.split( '&#039;', 1 )
except: raise Exception( 'Could not parse image url!' )
image_url = 'http://' + image_url
except Exception as e: raise Exception( 'Could not parse image url!' + os.linesep + HC.u( e ) )
soup = bs4.BeautifulSoup( html )
@ -1055,7 +1057,8 @@ class ImportArgsGenerator():
self._job_key.SetVariable( 'result', 'failed' )
HC.ShowText( 'Problem importing ' + name + '!' )
if 'name' in locals(): HC.ShowText( 'Problem importing ' + name + '!' )
HC.ShowException( e )
time.sleep( 2 )
@ -1803,11 +1806,11 @@ def Parse4chanPostScreen( html ):
print( repr( soup ) )
message = 'You are banned from this board! html written to log.'
text = 'You are banned from this board! html written to log.'
HC.ShowText( message )
HC.ShowText( text )
return ( 'big error', message )
return ( 'big error', text )
else:
@ -1820,11 +1823,11 @@ def Parse4chanPostScreen( html ):
try: print( repr( soup ) )
except: pass
message = 'Unknown problem; html written to log.'
text = 'Unknown problem; html written to log.'
HC.ShowText( message )
HC.ShowText( text )
return ( 'error', message )
return ( 'error', text )
problem = HC.u( problem_tag )

View File

@ -1,16 +1,5 @@
class CantRenderWithCVException( Exception ): pass
class DBException( Exception ):
def __init__( self, text, caller_traceback, db_traceback ):
Exception.__init__( self, text )
self._caller_traceback = caller_traceback
self._db_traceback = db_traceback
def GetTracebacks( self ): return ( self._caller_traceback, self._db_traceback )
class DBException( Exception ): pass
class DBAccessException( Exception ): pass
class FileException( Exception ): pass
class ForbiddenException( Exception ): pass

View File

@ -110,6 +110,11 @@ def GeneratePerceptualHash( path ):
# convert to 32 x 32 greyscale
if thumbnail.mode == 'P':
thumbnail = thumbnail.convert( 'RGBA' ) # problem with some P images converting to L without RGBA step in between
if thumbnail.mode == 'RGBA':
# this is some code i picked up somewhere

View File

@ -1,6 +1,11 @@
import HydrusConstants as HC
import HydrusExceptions
import os
import socket
import threading
from twisted.internet import reactor, defer
from twisted.internet.threads import deferToThread
from twisted.python import log
if HC.PLATFORM_WINDOWS: import win32com.client

View File

@ -96,6 +96,11 @@ def ParseURL( url ):
try:
starts_http = url.startswith( 'http://' )
starts_https = url.startswith( 'https://' )
if not starts_http and not starts_https: url = 'http://' + url
parse_result = urlparse.urlparse( url )
scheme = parse_result.scheme

View File

@ -647,7 +647,7 @@ class TagParentsManager():
for parent in parents:
parent_predicate = HC.Predicate( HC.PREDICATE_TYPE_PARENT, parent, None )
parent_predicate = HC.Predicate( HC.PREDICATE_TYPE_PARENT, parent )
results.append( parent_predicate )
@ -826,7 +826,7 @@ class TagSiblingsManager():
( old_operator, old_tag ) = old_predicate.GetValue()
new_predicate = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( old_operator, new_tag ), 0 )
new_predicate = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( old_operator, new_tag ) )
tags_to_predicates[ new_tag ] = new_predicate
@ -835,9 +835,11 @@ class TagSiblingsManager():
new_predicate = tags_to_predicates[ new_tag ]
count = old_predicate.GetCount()
current_count = old_predicate.GetCount( HC.CURRENT )
pending_count = old_predicate.GetCount( HC.PENDING )
new_predicate.AddToCount( count )
new_predicate.AddToCount( HC.CURRENT, current_count )
new_predicate.AddToCount( HC.PENDING, pending_count )
else: tags_to_include_in_results.add( tag )

199
include/Puncher.py Normal file
View File

@ -0,0 +1,199 @@
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2010 Frank Scholz <coherence@beebits.net>
from twisted.internet import reactor
import warnings
warnings.simplefilter("ignore")
from coherence.base import Coherence
from coherence.upnp.devices.control_point import ControlPoint
class Puncher(object):
def __init__(self,command,config):
#print "command %r %r %r" %(command,config,config.subOptions)
self.config = config
self.locked = False
if command == None:
self.command = 'show-devices'
else:
self.command = command
if self.command == 'show-devices':
self.locked = None
if self.command == 'add-mapping':
if self.config.subOptions['internal-host'] == None:
raise Exception("internal-host parameter missing")
if self.config.subOptions['internal-port'] == None:
raise Exception("internal-port parameter missing")
if self.config.subOptions['protocol'].lower() not in ['tcp','udp']:
raise Exception("protocol value invalid")
if self.config.subOptions['active'].lower() in ['y','true','1','yes']:
self.config.subOptions['active'] = True
else:
self.config.subOptions['active'] = False
if self.config.subOptions['remote-host'].lower() in ['""','any']:
self.config.subOptions['remote-host'] = ''
if self.config.subOptions['external-port'] == None:
self.config.subOptions['external-port'] = self.config.subOptions['internal-port']
if self.command == 'delete-mapping':
if self.config.subOptions['remote-host'] == None:
raise Exception("remote-host parameter missing")
if self.config.subOptions['external-port'] == None:
raise Exception("external-port parameter missing")
if self.config.subOptions['protocol'].lower() not in ['tcp','udp']:
raise Exception("protocol value invalid")
coherence_config = {}
coherence_config['logmode'] = 'none'
self.control_point = ControlPoint(Coherence(coherence_config),auto_client=['InternetGatewayDevice'])
self.control_point.connect(self.igd_found, 'Coherence.UPnP.ControlPoint.InternetGatewayDevice.detected')
self.control_point.connect(self.igd_removed, 'Coherence.UPnP.ControlPoint.InternetGatewayDevice.removed')
self.timeout = reactor.callLater(int(self.config['timeout']),self.stop)
self.devices = {}
self.reports = {'show-devices': self.show_devices,
'show-mappings': self.show_mappings,
'info': self.show_info}
def show_devices(self):
for uuid in self.devices.keys():
print "%s with uuid:%s" % (self.devices[uuid]['friendly_name'], uuid)
def show_info(self):
for uuid in self.devices.keys():
print "%s with uuid:%s" % (self.devices[uuid]['friendly_name'], uuid)
if len(self.devices) > 0:
print "External IP address: ", self.external_ip_address
print "Number of port-mappings: ", self.port_mapping_number_of_entries
def show_mappings(self):
for uuid in self.devices.keys():
print "%s with uuid:%s" % (self.devices[uuid]['friendly_name'], uuid)
mappings = self.devices[uuid].get('mappings',None)
if mappings == None or len(mappings) == 0:
print "no port-mappings found"
else:
print "Ext. Port | Remote Host | Int. Port | Internal Host | Prot. | active | duration | description"
print "=" * 100
for mapping in mappings:
if mapping['NewLeaseDuration'] == '0':
mapping['NewLeaseDuration'] = 'infinite'
else:
mapping['NewLeaseDuration'] += 'sec'
if mapping['NewRemoteHost'] == '':
mapping['NewRemoteHost'] = 'any'
if mapping['NewEnabled'] == '1':
mapping['NewEnabled'] = 'yes'
else:
mapping['NewEnabled'] = 'no'
print " %05s | %-14s | %05s | %-14s | %5s | %6s | %8s | %s" % (mapping['NewExternalPort'],
mapping['NewRemoteHost'],
mapping['NewInternalPort'],
mapping['NewInternalClient'],
mapping['NewProtocol'],
mapping['NewEnabled'],
mapping['NewLeaseDuration'],
mapping['NewPortMappingDescription'])
print "=" * 100
def stop(self,quiet=False):
try:
self.timeout.cancel()
except:
pass
if quiet == False:
if len(self.devices) == 0:
print "no InternetGatewayDevice found"
elif len(self.devices) == 1:
print "1 InternetGatewayDevice found:"
else:
print "%d InternetGatewayDevices found:" % len(self.devices)
self.reports.get(self.command,self.show_devices)()
print ""
reactor.stop()
def append_mappings(self,mappings,device):
device['mappings'] = mappings
self.stop()
def add_mapping_ok(self,result,device):
print "port-mapping to %s added" %device['friendly_name']
self.stop(quiet=True)
def add_mapping_failed(self,result,device):
print "failed to add port-mapping to %s" %device['friendly_name']
self.stop(quiet=True)
def delete_mapping_ok(self,result,device):
print "port-mapping deleted from %s" %device['friendly_name']
self.stop(quiet=True)
def delete_mapping_failed(self,result,device):
print "failed to delete port-mapping from %s" %device['friendly_name']
self.stop(quiet=True)
def igd_found(self,client,udn):
#print "IGD found", client.device.get_friendly_name()
if self.locked == True:
return
elif self.locked == False:
self.locked = True
if(self.config['uuid'] != None and
client.device.get_uuid().endswith(self.config['uuid']) == False):
return
self.devices[client.device.get_uuid()] = {'friendly_name': client.device.get_friendly_name()}
if self.locked == True:
wan_ip_connection_service = client.wan_device.wan_connection_device.wan_ip_connection or \
client.wan_device.wan_connection_device.wan_ppp_connection
if self.command == 'show-mappings':
dfr = wan_ip_connection_service.get_all_port_mapping_entries()
dfr.addCallback(self.append_mappings,self.devices[client.device.get_uuid()])
elif self.command == 'add-mapping':
dfr = wan_ip_connection_service.add_port_mapping(remote_host=self.config.subOptions['remote-host'],
external_port=int(self.config.subOptions['external-port']),
protocol=self.config.subOptions['protocol'].upper(),
internal_port=int(self.config.subOptions['internal-port']),
internal_client=self.config.subOptions['internal-host'],
enabled=self.config.subOptions['active'],
port_mapping_description=self.config.subOptions['description'],
lease_duration=int(self.config.subOptions['lease-duration']))
dfr.addCallback(self.add_mapping_ok,self.devices[client.device.get_uuid()])
dfr.addErrback(self.add_mapping_failed,self.devices[client.device.get_uuid()])
elif self.command == 'delete-mapping':
dfr = wan_ip_connection_service.delete_port_mapping(remote_host=self.config.subOptions['remote-host'],
external_port=int(self.config.subOptions['external-port']),
protocol=self.config.subOptions['protocol'].upper())
dfr.addCallback(self.delete_mapping_ok,self.devices[client.device.get_uuid()])
dfr.addErrback(self.delete_mapping_failed,self.devices[client.device.get_uuid()])
elif self.command == 'info':
self.port_mapping_number_of_entries = None
self.external_ip_address = None
wan_ip_connection_service.subscribe_for_variable('PortMappingNumberOfEntries', callback=self.state_variable_change)
wan_ip_connection_service.subscribe_for_variable('ExternalIPAddress', callback=self.state_variable_change)
def igd_removed(self,udn):
#print "IGD removed", udn
pass
def state_variable_change(self,variable):
if variable.name == 'ExternalIPAddress':
self.external_ip_address = variable.value
elif variable.name == 'PortMappingNumberOfEntries':
if variable.value != '':
self.port_mapping_number_of_entries = int(variable.value)
else:
self.port_mapping_number_of_entries = 0
if(self.port_mapping_number_of_entries != None and
self.external_ip_address != None):
self.stop()

View File

@ -123,15 +123,15 @@ class TestClientDB( unittest.TestCase ):
preds = set()
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cars' ), 1 ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'car' ), 1 ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'series:cars' ), 1 ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cars' ), { HC.CURRENT : 1 } ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'car' ), { HC.CURRENT : 1 } ) )
preds.add( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'series:cars' ), { HC.CURRENT : 1 } ) )
read_preds = result.GetMatches( 'c' )
# count isn't tested in predicate.__eq__, I think
for p in read_preds: self.assertEqual( p.GetCount(), 1 )
for p in read_preds: self.assertEqual( p.GetCount( HC.CURRENT ), 1 )
self.assertEqual( set( read_preds ), preds )
@ -147,11 +147,11 @@ class TestClientDB( unittest.TestCase ):
result = self._read( 'autocomplete_tags', half_complete_tag = 'series:c' )
pred = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'series:cars' ), 1 )
pred = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'series:cars' ), { HC.CURRENT : 1 } )
( read_pred, ) = result.GetMatches( 'series:c' )
self.assertEqual( read_pred.GetCount(), 1 )
self.assertEqual( read_pred.GetCount( HC.CURRENT ), 1 )
self.assertEqual( pred, read_pred )
@ -159,11 +159,11 @@ class TestClientDB( unittest.TestCase ):
result = self._read( 'autocomplete_tags', tag = 'car' )
pred = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'car' ), 1 )
pred = HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'car' ), { HC.CURRENT : 1 } )
( read_pred, ) = result.GetMatches( 'car' )
self.assertEqual( read_pred.GetCount(), 1 )
self.assertEqual( read_pred.GetCount( HC.CURRENT ), 1 )
self.assertEqual( pred, read_pred )
@ -290,7 +290,7 @@ class TestClientDB( unittest.TestCase ):
for ( operator, namespace, result ) in tests:
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( operator, namespace ), None ) ]
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( operator, namespace ) ) ]
search_context = CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, predicates = predicates )
@ -304,7 +304,7 @@ class TestClientDB( unittest.TestCase ):
for ( predicate_type, info, result ) in tests:
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( predicate_type, info ), None ) ]
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( predicate_type, info ) ) ]
search_context = CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, predicates = predicates )
@ -318,7 +318,7 @@ class TestClientDB( unittest.TestCase ):
for ( operator, tag, result ) in tests:
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, tag ), None ) ]
predicates = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( operator, tag ) ) ]
search_context = CC.FileSearchContext( file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, predicates = predicates )
@ -582,10 +582,10 @@ class TestClientDB( unittest.TestCase ):
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), 1 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_INBOX, None ), 1 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_ARCHIVE, None ), 0 ) )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ), None ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_SIZE, HC.SYSTEM_PREDICATE_TYPE_AGE, HC.SYSTEM_PREDICATE_TYPE_HASH, HC.SYSTEM_PREDICATE_TYPE_WIDTH, HC.SYSTEM_PREDICATE_TYPE_HEIGHT, HC.SYSTEM_PREDICATE_TYPE_RATIO, HC.SYSTEM_PREDICATE_TYPE_DURATION, HC.SYSTEM_PREDICATE_TYPE_NUM_WORDS, HC.SYSTEM_PREDICATE_TYPE_MIME, HC.SYSTEM_PREDICATE_TYPE_RATING, HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, HC.SYSTEM_PREDICATE_TYPE_FILE_SERVICE ] ] )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_EVERYTHING, None ), { HC.CURRENT : 1 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_INBOX, None ), { HC.CURRENT : 1 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_ARCHIVE, None ), { HC.CURRENT : 0 } ) )
predicates.extend( [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( system_predicate_type, None ) ) for system_predicate_type in [ HC.SYSTEM_PREDICATE_TYPE_UNTAGGED, HC.SYSTEM_PREDICATE_TYPE_NUM_TAGS, HC.SYSTEM_PREDICATE_TYPE_LIMIT, HC.SYSTEM_PREDICATE_TYPE_SIZE, HC.SYSTEM_PREDICATE_TYPE_AGE, HC.SYSTEM_PREDICATE_TYPE_HASH, HC.SYSTEM_PREDICATE_TYPE_WIDTH, HC.SYSTEM_PREDICATE_TYPE_HEIGHT, HC.SYSTEM_PREDICATE_TYPE_RATIO, HC.SYSTEM_PREDICATE_TYPE_DURATION, HC.SYSTEM_PREDICATE_TYPE_NUM_WORDS, HC.SYSTEM_PREDICATE_TYPE_MIME, HC.SYSTEM_PREDICATE_TYPE_RATING, HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, HC.SYSTEM_PREDICATE_TYPE_FILE_SERVICE ] ] )
self.assertEqual( result, predicates )

View File

@ -441,45 +441,45 @@ class TestTagParents( unittest.TestCase ):
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'grandmother' ), 10 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'grandfather' ), 15 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'not_exist' ), 20 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'grandmother' ), { HC.CURRENT : 10 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'grandfather' ), { HC.CURRENT : 15 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'not_exist' ), { HC.CURRENT : 20 } ) )
self.assertEqual( self._tag_parents_manager.ExpandPredicates( HC.COMBINED_TAG_SERVICE_IDENTIFIER, predicates ), predicates )
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), 10 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), { HC.CURRENT : 10 } ) )
results = []
results.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), 10 ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'mother', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'father', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandmother', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandfather', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), { HC.CURRENT : 10 } ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'mother' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'father' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandmother' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandfather' ) )
self.assertEqual( set( self._tag_parents_manager.ExpandPredicates( HC.COMBINED_TAG_SERVICE_IDENTIFIER, predicates ) ), set( results ) )
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( '+', 'series' ), None ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), 10 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cousin' ), 5 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( '+', 'series' ) ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), { HC.CURRENT : 10 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cousin' ), { HC.CURRENT : 5 } ) )
results = []
results.append( HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( '+', 'series' ), None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), 10 ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'mother', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'father', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandmother', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandfather', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cousin' ), 5 ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'aunt', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'uncle', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandmother', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandfather', None ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_NAMESPACE, ( '+', 'series' ) ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'child' ), { HC.CURRENT : 10 } ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'mother' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'father' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandmother' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandfather' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'cousin' ), { HC.CURRENT : 5 } ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'aunt' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'uncle' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandmother' ) )
results.append( HC.Predicate( HC.PREDICATE_TYPE_PARENT, 'grandfather' ) )
self.assertEqual( set( self._tag_parents_manager.ExpandPredicates( HC.COMBINED_TAG_SERVICE_IDENTIFIER, predicates ) ), set( results ) )
@ -620,19 +620,19 @@ class TestTagSiblings( unittest.TestCase ):
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_a' ), 10 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_b' ), 5 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_c' ), 20 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_a' ), { HC.CURRENT : 10 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_b' ), { HC.CURRENT : 5 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_c' ), { HC.CURRENT : 20 } ) )
results = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_c' ), 35 ) ]
results = [ HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_c' ), { HC.CURRENT : 35 } ) ]
self.assertEqual( self._tag_siblings_manager.CollapsePredicates( predicates ), results )
predicates = []
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_a' ), 10 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_b' ), 5 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_c' ), 20 ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_a' ), { HC.CURRENT : 10 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_b' ), { HC.CURRENT : 5 } ) )
predicates.append( HC.Predicate( HC.PREDICATE_TYPE_TAG, ( '+', 'chain_c' ), { HC.CURRENT : 20 } ) )
( result, ) = self._tag_siblings_manager.CollapsePredicates( predicates )