Version 165

This commit is contained in:
Hydrus 2015-07-15 15:28:26 -05:00
parent c985f70a4f
commit 701fd3f2e4
27 changed files with 1682 additions and 1089 deletions

View File

@ -8,6 +8,53 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 165</h3></li>
<ul>
<li>added a db table to track when a file was sent to trash</li>
<li>added an option for whether trashed files are removed from view</li>
<li>added a max age to trashed files</li>
<li>added a max size to the trash</li>
<li>added a daemon to maintain the trash</li>
<li>improved some generic deleted shortcut logic, so delete key will present option to delete from trash if appropriate</li>
<li>shift+delete will now undelete</li>
<li>misc trash code improvements</li>
<li>thumbnails deleted from the trash will accurately remove their inbox status icons</li>
<li>images downloaded from file repositories will accurately add inbox status icons</li>
<li>reduced chance of problems when trying to delete media from the trash when it is currently being rendered</li>
<li>further reduced this chance</li>
<li>removed redundant undelete code, folded it into add/delete code</li>
<li>the media viewer will now remove files deleted from the trash in all cases, even when launched from a file repository search</li>
<li>significantly improved how animations figure out when to put frames on screen. these timings should be much more accurate, slowing down only if your CPU can't keep up</li>
<li>8chan thread watcher now parses all files from multiple file posts</li>
<li>if a booru provides a link starting with 'Download PNG', which refers to the original source png, that will be preferred over the jpg (konachan and yande.re, which run Moebooru, do this)</li>
<li>booru parser is now a little more liberal in parsing for the image link</li>
<li>yande.re booru support is added by default</li>
<li>fixed some local/remote state code, which was breaking file repository searches in several ways</li>
<li>improved error handling in repository file download daemon</li>
<li>cleaned up manage options dialog code</li>
<li>reduced min size of the media caches in manage options dialog</li>
<li>moved thumbnail size to 'maintenance and memory'</li>
<li>added better error handling to repositories that cannot find an update file during processing</li>
<li>repositories that have such errors will pause to give the user a chance to figure out a solution</li>
<li>misc code improvements</li>
<li>fixed a bug where file repository downloads were not importing if the file had once been deleted</li>
<li>dropped the clever indices from last week--sqlite had too much trouble with them. I will reform my db tables next week to get around the whole issue. for now, I have reintroduced the bulky index and forced sqlite to use it where appropriate</li>
<li>added a test for collecting pending tag data</li>
<li>tags in the form 'text:', which would sometimes slip through when typing quickly, are now not processed</li>
<li>improved tag cleaning error reporting</li>
<li>improved when special wildcard and namespace predicates are provided</li>
<li>namespace predicates now display as 'namespace:*anything*'</li>
<li>fixed a bug when launching the media viewer from collected results</li>
<li>fixed a command hover window layout bug that was putting namespace text in incorrect places</li>
<li>fixed a bug that was causing the new client-wide processing phase option not to work because, ironically, its calculation was out of phase</li>
<li>review services will now state how many updates have been processed</li>
<li>review services will specify if an update is imminently due, rather than saying the repo is fully synched</li>
<li>fixed a review services layout bug that was misaligning text after an account or data usage update</li>
<li>fixed a bug in system:similar_to serialisation, so pages with this predicate can now be saved to a session</li>
<li>fixed the same bug for system:hash</li>
<li>vacuum should be a bit politer about waiting for its popup message to appear</li>
<li>database maintenance won't run while a repository is synchronising</li>
</ul>
<li><h3>version 164</h3></li>
<ul>
<li>rewrote the drawing code for the listbox that displays tags in various ways to be a lot faster and more memory efficient</li>

View File

@ -518,6 +518,7 @@ class Controller( HydrusController.HydrusController ):
HydrusThreading.DAEMONWorker( 'CheckImportFolders', ClientDaemons.DAEMONCheckImportFolders, ( 'notify_restart_import_folders_daemon', 'notify_new_import_folders' ), period = 180 )
HydrusThreading.DAEMONWorker( 'CheckExportFolders', ClientDaemons.DAEMONCheckExportFolders, ( 'notify_restart_export_folders_daemon', 'notify_new_export_folders' ), period = 180 )
HydrusThreading.DAEMONWorker( 'DownloadFiles', ClientDaemons.DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) )
HydrusThreading.DAEMONWorker( 'MaintainTrash', ClientDaemons.DAEMONMaintainTrash, init_wait = 60 )
HydrusThreading.DAEMONWorker( 'ResizeThumbnails', ClientDaemons.DAEMONResizeThumbnails, period = 3600 * 24, init_wait = 600 )
HydrusThreading.DAEMONWorker( 'SynchroniseAccounts', ClientDaemons.DAEMONSynchroniseAccounts, ( 'permissions_are_stale', ) )
HydrusThreading.DAEMONWorker( 'SynchroniseRepositories', ClientDaemons.DAEMONSynchroniseRepositories, ( 'notify_restart_repo_sync_daemon', 'notify_new_permissions' ) )

View File

@ -1049,7 +1049,14 @@ class DB( HydrusDB.HydrusDB ):
if service_id == self._local_file_service_id:
self._DeleteFiles( self._trash_service_id, successful_hash_ids )
self._DeleteFiles( self._trash_service_id, successful_hash_ids, files_being_undeleted = True )
if service_id == self._trash_service_id:
now = HydrusData.GetNow()
self._c.executemany( 'INSERT OR IGNORE INTO file_trash ( hash_id, timestamp ) VALUES ( ?, ? );', ( ( hash_id, now ) for hash_id in successful_hash_ids ) )
@ -1389,6 +1396,9 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE TABLE file_transfers ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE INDEX file_transfers_hash_id ON file_transfers ( hash_id );' )
self._c.execute( 'CREATE TABLE file_trash ( hash_id INTEGER PRIMARY KEY, timestamp INTEGER );' )
self._c.execute( 'CREATE INDEX file_trash_timestamp ON file_trash ( timestamp );' )
self._c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ), FOREIGN KEY( service_id, hash_id ) REFERENCES files_info ON DELETE CASCADE );' )
self._c.execute( 'CREATE INDEX file_petitions_hash_id_index ON file_petitions ( hash_id );' )
@ -1413,8 +1423,7 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE INDEX mappings_namespace_id_index ON mappings ( namespace_id );' )
self._c.execute( 'CREATE INDEX mappings_tag_id_index ON mappings ( tag_id );' )
self._c.execute( 'CREATE INDEX mappings_hash_id_index ON mappings ( hash_id );' )
self._c.execute( 'CREATE INDEX mappings_status_pending_index ON mappings ( status ) WHERE status = 1;' )
self._c.execute( 'CREATE INDEX mappings_status_deleted_index ON mappings ( status ) WHERE status = 2;' )
self._c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
self._c.execute( 'CREATE TABLE mapping_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, reason_id ) );' )
self._c.execute( 'CREATE INDEX mapping_petitions_hash_id_index ON mapping_petitions ( hash_id );' )
@ -1528,20 +1537,19 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'COMMIT' )
def _DeleteFiles( self, service_id, hash_ids ):
def _DeleteFiles( self, service_id, hash_ids, files_being_undeleted = False ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
rows = self._c.execute( 'SELECT * FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ).fetchall()
rows = self._c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ).fetchall()
# service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words
hash_ids = { row[ 1 ] for row in rows }
hash_ids = { row[ 0 ] for row in rows }
if len( hash_ids ) > 0:
total_size = sum( [ row[ 2 ] for row in rows ] )
total_size = sum( [ row[ 1 ] for row in rows ] )
num_files = len( rows )
num_thumbnails = len( [ 1 for row in rows if row[ 3 ] in HC.MIMES_WITH_THUMBNAILS ] )
num_thumbnails = len( [ 1 for row in rows if row[ 2 ] in HC.MIMES_WITH_THUMBNAILS ] )
num_inbox = len( hash_ids.intersection( self._inbox_hash_ids ) )
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
@ -1552,7 +1560,15 @@ class DB( HydrusDB.HydrusDB ):
service_info_updates.append( ( -num_files, service_id, HC.SERVICE_INFO_NUM_FILES ) )
service_info_updates.append( ( -num_thumbnails, service_id, HC.SERVICE_INFO_NUM_THUMBNAILS ) )
service_info_updates.append( ( -num_inbox, service_id, HC.SERVICE_INFO_NUM_INBOX ) )
service_info_updates.append( ( num_files, service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) )
if not files_being_undeleted:
# an undelete moves from trash to local, which shouldn't be remembered as a delete from the trash service
service_info_updates.append( ( num_files, service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) )
self._c.executemany( 'INSERT OR IGNORE INTO deleted_files ( service_id, hash_id ) VALUES ( ?, ? );', [ ( service_id, hash_id ) for hash_id in hash_ids ] )
self._c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates )
@ -1561,22 +1577,23 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._c.executemany( 'INSERT OR IGNORE INTO deleted_files ( service_id, hash_id ) VALUES ( ?, ? );', [ ( service_id, hash_id ) for hash_id in hash_ids ] )
self._UpdateAutocompleteTagCacheFromFiles( service_id, hash_ids, -1 )
if service_id == self._local_file_service_id:
new_rows = [ ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) for ( service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in rows ]
self._AddFiles( self._trash_service_id, new_rows )
self._AddFiles( self._trash_service_id, rows )
if service_id == self._trash_service_id:
self._ArchiveFiles( hash_ids )
self._c.execute( 'DELETE FROM file_trash WHERE hash_id IN ' + splayed_hash_ids + ';' )
self._DeletePhysicalFiles( hash_ids )
if not files_being_undeleted:
self._ArchiveFiles( hash_ids )
self._DeletePhysicalFiles( hash_ids )
self.pub_after_commit( 'notify_new_pending' )
@ -1725,7 +1742,7 @@ class DB( HydrusDB.HydrusDB ):
if service.GetServiceType() == HC.TAG_REPOSITORY:
pending_rescinded_mappings_ids = HydrusData.BuildKeyToListDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in self._c.execute( 'SELECT namespace_id, tag_id, hash_id FROM mappings WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ) ] )
pending_rescinded_mappings_ids = HydrusData.BuildKeyToListDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in self._c.execute( 'SELECT namespace_id, tag_id, hash_id FROM mappings INDEXED BY mappings_status_index WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ) ] )
pending_rescinded_mappings_ids = [ ( namespace_id, tag_id, hash_ids ) for ( ( namespace_id, tag_id ), hash_ids ) in pending_rescinded_mappings_ids.items() ]
@ -1753,6 +1770,33 @@ class DB( HydrusDB.HydrusDB ):
def _DeletePhysicalFiles( self, hash_ids ):
def DeletePaths( paths ):
for path in paths:
try:
os.chmod( path, stat.S_IWRITE | stat.S_IREAD )
except:
pass
try:
os.remove( path )
except OSError:
print( 'In trying to delete the orphan ' + path + ', this error was encountered:' )
print( traceback.format_exc() )
deletee_paths = set()
potentially_pending_upload_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM file_transfers;', ) }
deletable_file_hash_ids = hash_ids.difference( potentially_pending_upload_hash_ids )
@ -1766,18 +1810,7 @@ class DB( HydrusDB.HydrusDB ):
try: path = ClientFiles.GetFilePath( hash )
except HydrusExceptions.NotFoundException: continue
try:
try: os.chmod( path, stat.S_IWRITE | stat.S_IREAD )
except: pass
os.remove( path )
except OSError:
print( 'In trying to delete the orphan ' + path + ', this error was encountered:' )
print( traceback.format_exc() )
deletee_paths.add( path )
@ -1794,21 +1827,21 @@ class DB( HydrusDB.HydrusDB ):
path = ClientFiles.GetExpectedThumbnailPath( hash, True )
resized_path = ClientFiles.GetExpectedThumbnailPath( hash, False )
try:
if os.path.exists( path ):
if os.path.exists( path ): os.remove( path )
if os.path.exists( resized_path ): os.remove( resized_path )
deletee_paths.add( path )
except OSError:
print( 'In trying to delete the orphan ' + path + ' or ' + resized_path + ', this error was encountered:' )
print( traceback.format_exc() )
if os.path.exists( resized_path ):
deletee_paths.add( resized_path )
self._c.execute( 'DELETE from perceptual_hashes WHERE hash_id IN ' + HydrusData.SplayListForDB( deletable_thumbnail_hash_ids ) + ';' )
wx.CallLater( 5000, DeletePaths, deletee_paths )
def _DeleteServiceInfo( self ):
@ -3163,6 +3196,15 @@ class DB( HydrusDB.HydrusDB ):
return pendings
def _GetOldestTrashHashes( self, minimum_age = 0 ):
timestamp_cutoff = HydrusData.GetNow() - minimum_age
hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM file_trash WHERE timestamp < ? ORDER BY timestamp ASC LIMIT 10;', ( timestamp_cutoff, ) ) }
return self._GetHashes( hash_ids )
def _GetOptions( self ):
result = self._c.execute( 'SELECT options FROM options;' ).fetchone()
@ -3210,7 +3252,7 @@ class DB( HydrusDB.HydrusDB ):
current_update_weight = 0
pending_dict = HydrusData.BuildKeyToListDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in self._c.execute( 'SELECT namespace_id, tag_id, hash_id FROM mappings WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ) ] )
pending_dict = HydrusData.BuildKeyToListDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in self._c.execute( 'SELECT namespace_id, tag_id, hash_id FROM mappings INDEXED BY mappings_status_index WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ) ] )
pending_chunks = []
@ -3548,8 +3590,8 @@ class DB( HydrusDB.HydrusDB ):
elif info_type == HC.SERVICE_INFO_NUM_NAMESPACES: result = self._c.execute( 'SELECT COUNT( DISTINCT namespace_id ) FROM mappings WHERE service_id = ? AND status IN ( ?, ? );', ( service_id, HC.CURRENT, HC.PENDING ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_TAGS: result = self._c.execute( 'SELECT COUNT( DISTINCT tag_id ) FROM mappings WHERE service_id = ? AND status IN ( ?, ? );', ( service_id, HC.CURRENT, HC.PENDING ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_MAPPINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM mappings WHERE service_id = ? AND status IN ( ?, ? );', ( service_id, HC.CURRENT, HC.PENDING ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_DELETED_MAPPINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM mappings WHERE service_id = ? AND status = ?;', ( service_id, HC.DELETED ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PENDING_MAPPINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM mappings WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_DELETED_MAPPINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM mappings INDEXED BY mappings_status_index WHERE service_id = ? AND status = ?;', ( service_id, HC.DELETED ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PENDING_MAPPINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM mappings INDEXED BY mappings_status_index WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PETITIONED_MAPPINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM mapping_petitions WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PENDING_TAG_SIBLINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM tag_sibling_petitions WHERE service_id = ? AND status = ?;', ( service_id, HC.PENDING ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PETITIONED_TAG_SIBLINGS: result = self._c.execute( 'SELECT COUNT( * ) FROM tag_sibling_petitions WHERE service_id = ? AND status = ?;', ( service_id, HC.PETITIONED ) ).fetchone()
@ -4633,6 +4675,12 @@ class DB( HydrusDB.HydrusDB ):
self._UpdateServiceInfo( service_id, info_update )
elif action == HC.SERVICE_UPDATE_PAUSE:
info_update = { 'paused' : True }
self._UpdateServiceInfo( service_id, info_update )
self.pub_service_updates_after_commit( service_keys_to_service_updates )
@ -4705,6 +4753,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'media_results_from_ids': result = self._GetMediaResults( *args, **kwargs )
elif action == 'news': result = self._GetNews( *args, **kwargs )
elif action == 'nums_pending': result = self._GetNumsPending( *args, **kwargs )
elif action == 'oldest_trash_hashes': result = self._GetOldestTrashHashes( *args, **kwargs )
elif action == 'options': result = self._GetOptions( *args, **kwargs )
elif action == 'pending': result = self._GetPending( *args, **kwargs )
elif action == 'pixiv_account': result = self._GetYAMLDump( YAML_DUMP_ID_SINGLE, 'pixiv_account' )
@ -4987,36 +5036,11 @@ class DB( HydrusDB.HydrusDB ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
rows = self._c.execute( 'SELECT * FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( self._trash_service_id, ) ).fetchall()
rows = self._c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( self._trash_service_id, ) ).fetchall()
# service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words
hash_ids = { row[ 1 ] for row in rows }
if len( hash_ids ) > 0:
if len( rows ) > 0:
total_size = sum( [ row[ 2 ] for row in rows ] )
num_files = len( rows )
num_thumbnails = len( [ 1 for row in rows if row[ 3 ] in HC.MIMES_WITH_THUMBNAILS ] )
num_inbox = len( hash_ids.intersection( self._inbox_hash_ids ) )
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
service_info_updates = []
service_info_updates.append( ( -total_size, self._trash_service_id, HC.SERVICE_INFO_TOTAL_SIZE ) )
service_info_updates.append( ( -num_files, self._trash_service_id, HC.SERVICE_INFO_NUM_FILES ) )
service_info_updates.append( ( -num_thumbnails, self._trash_service_id, HC.SERVICE_INFO_NUM_THUMBNAILS ) )
service_info_updates.append( ( -num_inbox, self._trash_service_id, HC.SERVICE_INFO_NUM_INBOX ) )
self._c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates )
self._c.execute( 'DELETE FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( self._trash_service_id, ) )
self._UpdateAutocompleteTagCacheFromFiles( self._trash_service_id, hash_ids, -1 )
new_rows = [ ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) for ( service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in rows ]
self._AddFiles( self._local_file_service_id, new_rows )
self._AddFiles( self._local_file_service_id, rows )
@ -5626,6 +5650,33 @@ class DB( HydrusDB.HydrusDB ):
self._c.executemany( 'INSERT OR IGNORE INTO deleted_files ( service_id, hash_id ) VALUES ( ?, ? );', ( ( self._trash_service_id, hash_id ) for hash_id in deleted_hash_ids ) )
if version == 164:
self._c.execute( 'CREATE TABLE file_trash ( hash_id INTEGER PRIMARY KEY, timestamp INTEGER );' )
self._c.execute( 'CREATE INDEX file_trash_timestamp ON file_trash ( timestamp );' )
self._trash_service_id = self._GetServiceId( CC.TRASH_SERVICE_KEY )
trash_hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( self._trash_service_id, ) ) ]
now = HydrusData.GetNow()
self._c.executemany( 'INSERT OR IGNORE INTO file_trash ( hash_id, timestamp ) VALUES ( ?, ? );', ( ( hash_id, now ) for hash_id in trash_hash_ids ) )
self._c.execute( 'DELETE FROM service_info WHERE service_id = ?;', ( self._trash_service_id, ) )
#
self._c.execute( 'DROP INDEX mappings_status_pending_index;' )
self._c.execute( 'DROP INDEX mappings_status_deleted_index;' )
self._c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
#
self._c.execute( 'REPLACE INTO yaml_dumps VALUES ( ?, ?, ? );', ( YAML_DUMP_ID_REMOTE_BOORU, 'yande.re', ClientDefaults.GetDefaultBoorus()[ 'yande.re' ] ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
HydrusGlobals.is_db_updated = True
@ -6140,6 +6191,8 @@ class DB( HydrusDB.HydrusDB ):
HydrusGlobals.pubsub.pub( 'message', job_key )
time.sleep( 1 )
self._c.execute( 'VACUUM' )
job_key.SetVariable( 'popup_text_1', prefix + 'cleaning up' )

View File

@ -211,7 +211,7 @@ def DAEMONDownloadFiles():
wx.GetApp().WaitUntilWXThreadIdle()
wx.GetApp().WriteSynchronous( 'import_file', temp_path )
wx.GetApp().WriteSynchronous( 'import_file', temp_path, override_deleted = True )
finally:
@ -220,9 +220,10 @@ def DAEMONDownloadFiles():
break
except:
except Exception as e:
HydrusData.ShowText( 'Error downloading file:' + os.linesep + traceback.format_exc() )
HydrusData.ShowText( 'Error downloading file!' )
HydrusData.ShowException( e )
@ -236,6 +237,64 @@ def DAEMONFlushServiceUpdates( list_of_service_keys_to_service_updates ):
wx.GetApp().WriteSynchronous( 'service_updates', service_keys_to_service_updates )
def DAEMONMaintainTrash():
max_size = HC.options[ 'trash_max_size' ] * 1048576
max_age = HC.options[ 'trash_max_age' ] * 3600
if max_size is not None:
service_info = wx.GetApp().Read( 'service_info', CC.TRASH_SERVICE_KEY )
while service_info[ HC.SERVICE_INFO_TOTAL_SIZE ] > max_size:
if HydrusGlobals.shutdown:
return
hashes = wx.GetApp().Read( 'oldest_trash_hashes' )
if len( hashes ) == 0:
return
content_update = HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
wx.GetApp().WaitUntilWXThreadIdle()
wx.GetApp().WriteSynchronous( 'content_updates', service_keys_to_content_updates )
service_info = wx.GetApp().Read( 'service_info', CC.TRASH_SERVICE_KEY )
if max_age is not None:
hashes = wx.GetApp().Read( 'oldest_trash_hashes', minimum_age = max_age )
while len( hashes ) > 0:
if HydrusGlobals.shutdown:
return
content_update = HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
wx.GetApp().WaitUntilWXThreadIdle()
wx.GetApp().WriteSynchronous( 'content_updates', service_keys_to_content_updates )
hashes = wx.GetApp().Read( 'oldest_trash_hashes', minimum_age = max_age )
def DAEMONResizeThumbnails():
if not wx.GetApp().CurrentlyIdle(): return
@ -342,11 +401,15 @@ def DAEMONSynchroniseRepositories():
services = wx.GetApp().GetServicesManager().GetServices( HC.REPOSITORIES )
HydrusGlobals.currently_processing_updates = True
for service in services:
service.Sync()
HydrusGlobals.currently_processing_updates = False
time.sleep( 5 )

View File

@ -1002,6 +1002,25 @@ class Service( HydrusData.HydrusYAMLBase ):
def __hash__( self ): return self._service_key.__hash__()
def _ReportSyncProcessingError( self, path ):
text = 'While synchronising ' + self._name + ', the expected update file ' + path + ', was missing.'
text += os.linesep * 2
text += 'The service has been indefinitely paused.'
text += os.linesep * 2
text += 'This is a serious error. Unless you know where the file(s) went and can restore them, you should delete this service and recreate it from scratch.'
HydrusData.ShowText( text )
service_updates = [ HydrusData.ServiceUpdate( HC.SERVICE_UPDATE_PAUSE ) ]
service_keys_to_service_updates = { self._service_key : service_updates }
self.ProcessServiceUpdates( service_keys_to_service_updates )
wx.GetApp().Write( 'service_updates', service_keys_to_service_updates )
def CanDownload( self ): return self._info[ 'account' ].HasPermission( HC.GET_DATA ) and not self.HasRecentError()
def CanDownloadUpdate( self ):
@ -1015,7 +1034,7 @@ class Service( HydrusData.HydrusYAMLBase ):
update_is_downloaded = self._info[ 'next_download_timestamp' ] > self._info[ 'next_processing_timestamp' ]
it_is_time = HydrusData.TimeHasPassed( self._info[ 'next_processing_timestamp' ] + HC.options[ 'processing_phase' ] )
it_is_time = HydrusData.TimeHasPassed( self._info[ 'next_processing_timestamp' ] + HC.UPDATE_DURATION + HC.options[ 'processing_phase' ] )
return update_is_downloaded and it_is_time
@ -1076,7 +1095,8 @@ class Service( HydrusData.HydrusYAMLBase ):
num_updates_processed = ( next_processing_timestamp - first_timestamp ) / HC.UPDATE_DURATION
downloaded_text = HydrusData.ConvertValueRangeToPrettyString( num_updates_downloaded, num_updates )
downloaded_text = 'downloaded ' + HydrusData.ConvertValueRangeToPrettyString( num_updates_downloaded, num_updates )
processed_text = 'processed ' + HydrusData.ConvertValueRangeToPrettyString( num_updates_processed, num_updates )
if not self._info[ 'account' ].HasPermission( HC.GET_DATA ): status = 'updates on hold'
else:
@ -1084,10 +1104,20 @@ class Service( HydrusData.HydrusYAMLBase ):
if self.CanDownloadUpdate(): status = 'downloaded up to ' + HydrusData.ConvertTimestampToPrettySync( self._info[ 'next_download_timestamp' ] )
elif self.CanProcessUpdate(): status = 'processed up to ' + HydrusData.ConvertTimestampToPrettySync( self._info[ 'next_processing_timestamp' ] )
elif self.HasRecentError(): status = 'due to a previous error, update is delayed - next check ' + self.GetRecentErrorPending()
else: status = 'fully synchronised - next update ' + HydrusData.ConvertTimestampToPrettyPending( self._info[ 'next_download_timestamp' ] + HC.UPDATE_DURATION + 1800 )
else:
if HydrusData.TimeHasPassed( self._info[ 'next_download_timestamp' ] + HC.UPDATE_DURATION ):
status = 'next update will be downloaded soon'
else:
status = 'fully synchronised - next update ' + HydrusData.ConvertTimestampToPrettyPending( self._info[ 'next_download_timestamp' ] + HC.UPDATE_DURATION + 1800 )
return downloaded_text + ' - ' + status
return downloaded_text + ' - ' + processed_text + ' - ' + status
def HasRecentError( self ):
@ -1159,6 +1189,10 @@ class Service( HydrusData.HydrusYAMLBase ):
self._info[ 'next_processing_timestamp' ] = next_processing_timestamp
elif action == HC.SERVICE_UPDATE_PAUSE:
self._info[ 'paused' ] = True
@ -1489,6 +1523,13 @@ class Service( HydrusData.HydrusYAMLBase ):
path = ClientFiles.GetExpectedServiceUpdatePackagePath( self._service_key, self._info[ 'next_processing_timestamp' ] )
if not os.path.exists( path ):
self._ReportSyncProcessingError( path )
return
with open( path, 'rb' ) as f: obj_string = f.read()
service_update_package = HydrusSerialisable.CreateFromString( obj_string )
@ -1504,6 +1545,13 @@ class Service( HydrusData.HydrusYAMLBase ):
path = ClientFiles.GetExpectedContentUpdatePackagePath( self._service_key, self._info[ 'next_processing_timestamp' ], subindex )
if not os.path.exists( path ):
self._ReportSyncProcessingError( path )
return
job_key.SetVariable( 'popup_text_1', update_index_string + subupdate_index_string + 'loading from disk' )
with open( path, 'rb' ) as f: obj_string = f.read()

View File

@ -15,9 +15,9 @@ def GetClientDefaultOptions():
options[ 'hpos' ] = 400
options[ 'vpos' ] = 700
options[ 'exclude_deleted_files' ] = False
options[ 'thumbnail_cache_size' ] = 100 * 1048576
options[ 'preview_cache_size' ] = 25 * 1048576
options[ 'fullscreen_cache_size' ] = 200 * 1048576
options[ 'thumbnail_cache_size' ] = 25 * 1048576
options[ 'preview_cache_size' ] = 15 * 1048576
options[ 'fullscreen_cache_size' ] = 150 * 1048576
options[ 'thumbnail_dimensions' ] = [ 150, 125 ]
options[ 'password' ] = None
options[ 'num_autocomplete_chars' ] = 2
@ -29,6 +29,9 @@ def GetClientDefaultOptions():
options[ 'maintenance_delete_orphans_period' ] = 86400 * 3
options[ 'maintenance_vacuum_period' ] = 86400 * 5
options[ 'fit_to_canvas' ] = False
options[ 'trash_max_age' ] = 72
options[ 'trash_max_size' ] = 512
options[ 'remove_trashed_files' ] = False
system_predicates = {}
@ -295,6 +298,17 @@ def GetDefaultBoorus():
boorus[ 'konachan' ] = ClientData.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
name = 'yande.re'
search_url = 'http://yande.re/post?page=%index%&tags=%tags%'
search_separator = '+'
advance_by_page_num = True
thumb_classname = 'thumb'
image_id = None
image_data = 'View larger version'
tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
boorus[ 'yande.re' ] = ClientData.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
name = 'tbib'
search_url = 'http://tbib.org/index.php?page=post&s=list&tags=%tags%&pid=%index%'
search_separator = '+'

View File

@ -346,9 +346,34 @@ class GalleryParserBooru( GalleryParser ):
links = soup.find_all( 'a' )
ok_link = None
better_link = None
for link in links:
if link.string == image_data: image_url = link[ 'href' ]
if link.string is not None:
if link.string.startswith( image_data ):
ok_link = link[ 'href' ]
if link.string.startswith( 'Download PNG' ):
better_link = link[ 'href' ]
break
if better_link is not None:
image_url = better_link
else:
image_url = ok_link
@ -1993,7 +2018,10 @@ class ImportQueueBuilderThread( ImportQueueBuilder ):
for post in posts_list:
if 'md5' not in post: continue
if 'md5' not in post:
continue
image_md5 = post[ 'md5' ].decode( 'base64' )
image_url = image_base + HydrusData.ToString( post[ 'tim' ] ) + post[ 'ext' ]
@ -2001,6 +2029,23 @@ class ImportQueueBuilderThread( ImportQueueBuilder ):
image_infos.append( ( image_md5, image_url, image_original_filename ) )
if 'extra_files' in post:
for extra_file in post[ 'extra_files' ]:
if 'md5' not in extra_file:
continue
image_md5 = extra_file[ 'md5' ].decode( 'base64' )
image_url = image_base + HydrusData.ToString( extra_file[ 'tim' ] ) + extra_file[ 'ext' ]
image_original_filename = extra_file[ 'filename' ] + extra_file[ 'ext' ]
image_infos.append( ( image_md5, image_url, image_original_filename ) )
image_infos_i_can_add = [ image_info for image_info in image_infos if image_info not in image_infos_already_added ]

View File

@ -566,7 +566,7 @@ class LocationsManager( object ):
def HasDownloading( self ): return CC.LOCAL_FILE_SERVICE_KEY in self._pending
def HasLocal( self ): return len( self._current.union( self.LOCAL_LOCATIONS ) ) > 0
def HasLocal( self ): return len( self._current.intersection( self.LOCAL_LOCATIONS ) ) > 0
def ProcessContentUpdate( self, service_key, content_update ):

View File

@ -455,7 +455,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
def _DeleteOrphans( self ):
text = 'This will iterate through the client\'s file store, deleting anything that is no longer needed. It happens automatically every few days, but you can force it here. If you have a lot of files, it will take a few minutes. A popup message will appear when it is done.'
text = 'This will iterate through the client\'s file store, deleting anything that is no longer needed. It happens automatically every few days, but you can force it here. If you have a lot of files, it will take a few minutes. A popup message will show its status.'
with ClientGUIDialogs.DialogYesNo( self, text ) as dlg:
@ -1608,7 +1608,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def _VacuumDatabase( self ):
text = 'This will rebuild the database, rewriting all indices and tables to be contiguous and optimising most operations. It happens automatically every few days, but you can force it here. If you have a large database, it will take a few minutes. A popup message will appear when it is done.'
text = 'This will rebuild the database, rewriting all indices and tables to be contiguous and optimising most operations. It happens automatically every few days, but you can force it here. If you have a large database, it will take a few minutes. A popup message will show its status'
with ClientGUIDialogs.DialogYesNo( self, text ) as dlg:
@ -2997,10 +2997,16 @@ class FrameReviewServices( ClientGUICommon.Frame ):
( action, row ) = service_update.ToTuple()
if action in ( HC.SERVICE_UPDATE_ACCOUNT, HC.SERVICE_UPDATE_REQUEST_MADE ): wx.CallLater( 600, self._DisplayAccountInfo )
if action in ( HC.SERVICE_UPDATE_ACCOUNT, HC.SERVICE_UPDATE_REQUEST_MADE ):
self._DisplayAccountInfo()
else:
wx.CallLater( 200, self._DisplayService )
wx.CallLater( 400, self.Layout ) # ugly hack, but it works for now
self._DisplayService()
self.Layout()

View File

@ -115,6 +115,7 @@ class Animation( wx.Window ):
self._current_frame_index = 0
self._current_frame_drawn = False
self._current_frame_drawn_at = 0.0
self._next_frame_due_at = 0.0
self._paused = False
@ -133,7 +134,7 @@ class Animation( wx.Window ):
self.EventResize( None )
self._timer_video.Start( 16, wx.TIMER_ONE_SHOT )
self._timer_video.Start( 5, wx.TIMER_CONTINUOUS )
def __del__( self ):
@ -168,11 +169,23 @@ class Animation( wx.Window ):
self._current_frame_drawn = True
now_in_ms = HydrusData.GetNowPrecise()
frame_was_supposed_to_be_at = self._current_frame_drawn_at + ( self._video_container.GetDuration( self._current_frame_index ) / 1000 )
next_frame_time_s = self._video_container.GetDuration( self._current_frame_index ) / 1000.0
if 1000.0 * ( now_in_ms - frame_was_supposed_to_be_at ) > 16.7: self._current_frame_drawn_at = now_in_ms
else: self._current_frame_drawn_at = frame_was_supposed_to_be_at
if HydrusData.TimeHasPassedPrecise( self._next_frame_due_at + next_frame_time_s ):
# we are rendering slower than the animation demands, so we'll slow down
# this also initialises self._next_frame_due_at
self._current_frame_drawn_at = HydrusData.GetNowPrecise()
else:
# to make timings more accurate and keep frame throughput accurate, let's pretend we drew this at the right time
self._current_frame_drawn_at = self._next_frame_due_at
self._next_frame_due_at = self._current_frame_drawn_at + next_frame_time_s
def _DrawWhite( self ):
@ -188,7 +201,10 @@ class Animation( wx.Window ):
def EventEraseBackground( self, event ): pass
def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp )
def EventPaint( self, event ):
wx.BufferedPaintDC( self, self._canvas_bmp )
def EventPropagateKey( self, event ):
@ -244,9 +260,6 @@ class Animation( wx.Window ):
if self._video_container.HasFrame( self._current_frame_index ): self._DrawFrame()
else: self._DrawWhite()
self._timer_video.Start( 1, wx.TIMER_ONE_SHOT )
@ -264,8 +277,6 @@ class Animation( wx.Window ):
if self._video_container.HasFrame( self._current_frame_index ): self._DrawFrame()
else: self._DrawWhite()
self._timer_video.Start( 1, wx.TIMER_ONE_SHOT )
self._paused = True
@ -274,24 +285,16 @@ class Animation( wx.Window ):
self._paused = False
self._timer_video.Start( 1, wx.TIMER_ONE_SHOT )
def SetAnimationBar( self, animation_bar ): self._animation_bar = animation_bar
def TIMEREventVideo( self, event ):
MIN_TIMER_TIME = 4
if self.IsShown():
if self._current_frame_drawn:
ms_since_current_frame_drawn = int( 1000.0 * ( HydrusData.GetNowPrecise() - self._current_frame_drawn_at ) )
time_to_update = ms_since_current_frame_drawn + MIN_TIMER_TIME / 2 > self._video_container.GetDuration( self._current_frame_index )
if not self._paused and time_to_update:
if not self._paused and HydrusData.TimeHasPassedPrecise( self._next_frame_due_at ):
num_frames = self._media.GetNumFrames()
@ -305,15 +308,6 @@ class Animation( wx.Window ):
if not self._current_frame_drawn and self._video_container.HasFrame( self._current_frame_index ): self._DrawFrame()
if not self._current_frame_drawn or not self._paused:
ms_since_current_frame_drawn = int( 1000.0 * ( HydrusData.GetNowPrecise() - self._current_frame_drawn_at ) )
ms_until_next_frame = max( MIN_TIMER_TIME, self._video_container.GetDuration( self._current_frame_index ) - ms_since_current_frame_drawn )
self._timer_video.Start( ms_until_next_frame, wx.TIMER_ONE_SHOT )
class AnimationBar( wx.Window ):
@ -495,14 +489,12 @@ class AnimationBar( wx.Window ):
class Canvas( object ):
def __init__( self, file_service_key, image_cache, claim_focus = True ):
def __init__( self, image_cache, claim_focus = True ):
self._file_service_key = file_service_key
self._file_service_key = CC.LOCAL_FILE_SERVICE_KEY
self._image_cache = image_cache
self._claim_focus = claim_focus
self._file_service = wx.GetApp().GetServicesManager().GetService( self._file_service_key )
self._canvas_key = HydrusData.GenerateKey()
self._dirty = True
@ -591,7 +583,12 @@ class Canvas( object ):
with ClientGUIDialogs.DialogYesNo( self, text ) as dlg:
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', { service_key : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, ( self._current_display_media.GetHash(), ) ) ] } )
if dlg.ShowModal() == wx.ID_YES:
hashes = { self._current_display_media.GetHash() }
wx.GetApp().Write( 'content_updates', { service_key : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes ) ] } )
self.SetFocus() # annoying bug because of the modal dialog
@ -725,12 +722,17 @@ class Canvas( object ):
def _Undelete( self ):
with ClientGUIDialogs.DialogYesNo( self, 'Undelete this file?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', { CC.TRASH_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, ( self._current_display_media.GetHash(), ) ) ] } )
locations_manager = self._current_display_media.GetLocationsManager()
self.SetFocus() # annoying bug because of the modal dialog
if CC.TRASH_SERVICE_KEY in locations_manager.GetCurrent():
with ClientGUIDialogs.DialogYesNo( self, 'Undelete this file?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', { CC.TRASH_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, ( self._current_display_media.GetHash(), ) ) ] } )
self.SetFocus() # annoying bug because of the modal dialog
def _ZoomIn( self ):
@ -897,7 +899,15 @@ class Canvas( object ):
def SetMedia( self, media ):
initial_image = self._current_media == None
if media is not None:
locations_manager = media.GetLocationsManager()
if not locations_manager.HasLocal():
media = None
if media != self._current_media:
@ -1154,10 +1164,10 @@ class CanvasWithDetails( Canvas ):
class CanvasPanel( Canvas, wx.Window ):
def __init__( self, parent, page_key, file_service_key ):
def __init__( self, parent, page_key ):
wx.Window.__init__( self, parent, style = wx.SIMPLE_BORDER )
Canvas.__init__( self, file_service_key, wx.GetApp().GetCache( 'preview' ), claim_focus = False )
Canvas.__init__( self, wx.GetApp().GetCache( 'preview' ), claim_focus = False )
self._page_key = page_key
@ -1309,11 +1319,11 @@ class CanvasPanel( Canvas, wx.Window ):
class CanvasFullscreenMediaList( ClientMedia.ListeningMediaList, CanvasWithDetails, ClientGUICommon.FrameThatResizes ):
def __init__( self, my_parent, page_key, file_service_key, media_results ):
def __init__( self, my_parent, page_key, media_results ):
ClientGUICommon.FrameThatResizes.__init__( self, my_parent, resize_option_prefix = 'fs_', title = 'hydrus client fullscreen media viewer' )
CanvasWithDetails.__init__( self, file_service_key, wx.GetApp().GetCache( 'fullscreen' ) )
ClientMedia.ListeningMediaList.__init__( self, file_service_key, media_results )
CanvasWithDetails.__init__( self, wx.GetApp().GetCache( 'fullscreen' ) )
ClientMedia.ListeningMediaList.__init__( self, CC.LOCAL_FILE_SERVICE_KEY, media_results )
self._page_key = page_key
@ -1634,9 +1644,9 @@ class CanvasFullscreenMediaList( ClientMedia.ListeningMediaList, CanvasWithDetai
class CanvasFullscreenMediaListFilter( CanvasFullscreenMediaList ):
def __init__( self, my_parent, page_key, file_service_key, media_results ):
def __init__( self, my_parent, page_key, media_results ):
CanvasFullscreenMediaList.__init__( self, my_parent, page_key, file_service_key, media_results )
CanvasFullscreenMediaList.__init__( self, my_parent, page_key, media_results )
self._kept = set()
self._deleted = set()
@ -1881,6 +1891,12 @@ class CanvasFullscreenMediaListFilter( CanvasFullscreenMediaList ):
self._Skip()
def EventUndelete( self, event ):
if self._HydrusShouldNotProcessInput(): event.Skip()
else: self._Undelete()
def Skip( self, canvas_key ):
if canvas_key == self._canvas_key:
@ -1891,9 +1907,9 @@ class CanvasFullscreenMediaListFilter( CanvasFullscreenMediaList ):
class CanvasFullscreenMediaListFilterInbox( CanvasFullscreenMediaListFilter ):
def __init__( self, my_parent, page_key, file_service_key, media_results ):
def __init__( self, my_parent, page_key, media_results ):
CanvasFullscreenMediaListFilter.__init__( self, my_parent, page_key, file_service_key, media_results )
CanvasFullscreenMediaListFilter.__init__( self, my_parent, page_key, media_results )
HydrusGlobals.pubsub.sub( self, 'Keep', 'canvas_archive' )
HydrusGlobals.pubsub.sub( self, 'Delete', 'canvas_delete' )
@ -1906,9 +1922,9 @@ class CanvasFullscreenMediaListFilterInbox( CanvasFullscreenMediaListFilter ):
class CanvasFullscreenMediaListNavigable( CanvasFullscreenMediaList ):
def __init__( self, my_parent, page_key, file_service_key, media_results ):
def __init__( self, my_parent, page_key, media_results ):
CanvasFullscreenMediaList.__init__( self, my_parent, page_key, file_service_key, media_results )
CanvasFullscreenMediaList.__init__( self, my_parent, page_key, media_results )
HydrusGlobals.pubsub.sub( self, 'Archive', 'canvas_archive' )
HydrusGlobals.pubsub.sub( self, 'Delete', 'canvas_delete' )
@ -1999,9 +2015,9 @@ class CanvasFullscreenMediaListNavigable( CanvasFullscreenMediaList ):
class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaListNavigable ):
def __init__( self, my_parent, page_key, file_service_key, media_results, first_hash ):
def __init__( self, my_parent, page_key, media_results, first_hash ):
CanvasFullscreenMediaListNavigable.__init__( self, my_parent, page_key, file_service_key, media_results )
CanvasFullscreenMediaListNavigable.__init__( self, my_parent, page_key, media_results )
self._timer_slideshow = wx.Timer( self, id = ID_TIMER_SLIDESHOW )
@ -2054,6 +2070,7 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaListNavigable ):
( modifier, key ) = ClientData.GetShortcutFromEvent( event )
if modifier == wx.ACCEL_NORMAL and key in ( wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE ): self._Delete()
elif modifier == wx.ACCEL_SHIFT and key in ( wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE ): self._Undelete()
elif modifier == wx.ACCEL_NORMAL and key in ( wx.WXK_SPACE, wx.WXK_NUMPAD_SPACE ): wx.CallAfter( self._PausePlaySlideshow )
elif modifier == wx.ACCEL_NORMAL and key in ( ord( '+' ), wx.WXK_ADD, wx.WXK_NUMPAD_ADD ): self._ZoomIn()
elif modifier == wx.ACCEL_NORMAL and key in ( ord( '-' ), wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT ): self._ZoomOut()
@ -2283,9 +2300,9 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaListNavigable ):
class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaListNavigable ):
def __init__( self, my_parent, page_key, file_service_key, media_results, shortcuts ):
def __init__( self, my_parent, page_key, media_results, shortcuts ):
CanvasFullscreenMediaListNavigable.__init__( self, my_parent, page_key, file_service_key, media_results )
CanvasFullscreenMediaListNavigable.__init__( self, my_parent, page_key, media_results )
self._shortcuts = shortcuts

View File

@ -640,6 +640,15 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
( inclusive, search_text, entry_predicate ) = self._ParseSearchText()
try:
HydrusTags.CheckTagNotEmpty( search_text )
except HydrusExceptions.SizeException:
return
self._BroadcastChoice( entry_predicate )
@ -800,8 +809,25 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
matches = ClientSearch.FilterPredicates( search_text, predicates )
if self._current_namespace != '': matches.insert( 0, HydrusData.Predicate( HC.PREDICATE_TYPE_NAMESPACE, self._current_namespace, inclusive = inclusive ) )
if '*' in search_text: matches.insert( 0, HydrusData.Predicate( HC.PREDICATE_TYPE_WILDCARD, search_text, inclusive = inclusive ) )
if self._current_namespace != '':
if '*' not in self._current_namespace:
matches.insert( 0, HydrusData.Predicate( HC.PREDICATE_TYPE_NAMESPACE, self._current_namespace, inclusive = inclusive ) )
if half_complete_tag != '':
if '*' in self._current_namespace or ( '*' in half_complete_tag and half_complete_tag != '*' ):
matches.insert( 0, HydrusData.Predicate( HC.PREDICATE_TYPE_WILDCARD, search_text, inclusive = inclusive ) )
elif '*' in search_text:
matches.insert( 0, HydrusData.Predicate( HC.PREDICATE_TYPE_WILDCARD, search_text, inclusive = inclusive ) )
try:
@ -942,6 +968,15 @@ class AutoCompleteDropdownTagsWrite( AutoCompleteDropdownTags ):
( search_text, entry_predicate, sibling_predicate ) = self._ParseSearchText()
try:
HydrusTags.CheckTagNotEmpty( search_text )
except HydrusExceptions.SizeException:
return
if sibling_predicate is not None:
self._BroadcastChoice( sibling_predicate )
@ -3273,7 +3308,11 @@ class NoneableSpinCtrl( wx.Panel ):
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.AddF( wx.StaticText( self, label=message + ': ' ), CC.FLAGS_MIXED )
if len( message ) > 0:
hbox.AddF( wx.StaticText( self, label = message + ': ' ), CC.FLAGS_MIXED )
hbox.AddF( self._one, CC.FLAGS_MIXED )
if self._num_dimensions == 2:
@ -3567,7 +3606,10 @@ class PopupMessage( PopupWindow ):
self._cancel_button.Disable()
def EventCopyTBButton( self, event ): HydrusGlobals.pubsub.pub( 'clipboard', 'text', self._job_key.ToString() )
def EventCopyTBButton( self, event ):
HydrusGlobals.pubsub.pub( 'clipboard', 'text', self._job_key.ToString() )
def EventPauseButton( self, event ):

View File

@ -828,7 +828,7 @@ class DialogInputAdvancedTagOptions( Dialog ):
self._name = name
self._initial_ato = ato
Dialog.__init__( self, parent, 'configure default advanced tag options for ' + pretty_name )
Dialog.__init__( self, parent, 'configure default tag import options for ' + pretty_name )
InitialiseControls()

File diff suppressed because it is too large Load Diff

View File

@ -288,8 +288,6 @@ class FullscreenHoverFrameCommands( FullscreenHoverFrame ):
self._info_text.Show()
self._SizeAndPosition()
def AddCommand( self, label, callback ):
@ -367,6 +365,10 @@ class FullscreenHoverFrameCommands( FullscreenHoverFrame ):
self._ResetText()
self.Fit()
self._SizeAndPosition()
def SetIndexString( self, canvas_key, text ):
@ -577,6 +579,8 @@ class FullscreenHoverFrameRatings( FullscreenHoverFrame ):
FullscreenHoverFrame.SetDisplayMedia( self, canvas_key, media )
self._ResetData()
class FullscreenHoverFrameTags( FullscreenHoverFrame ):

View File

@ -161,13 +161,33 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
if len( media_results ) > 0:
ClientGUICanvas.CanvasFullscreenMediaListCustomFilter( self.GetTopLevelParent(), self._page_key, self._file_service_key, media_results, shortcuts )
ClientGUICanvas.CanvasFullscreenMediaListCustomFilter( self.GetTopLevelParent(), self._page_key, media_results, shortcuts )
def _Delete( self, file_service_key ):
def _Delete( self, file_service_key = None ):
if file_service_key is None:
has_local = True in ( CC.LOCAL_FILE_SERVICE_KEY in media.GetLocationsManager().GetCurrent() for media in self._selected_media )
has_trash = True in ( CC.TRASH_SERVICE_KEY in media.GetLocationsManager().GetCurrent() for media in self._selected_media )
if has_local:
file_service_key = CC.LOCAL_FILE_SERVICE_KEY
elif has_trash:
file_service_key = CC.TRASH_SERVICE_KEY
else:
return
hashes = self._GetSelectedHashes( has_location = file_service_key )
@ -195,12 +215,9 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
if dlg.ShowModal() == wx.ID_YES:
if file_service_key in ( CC.LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY ):
if file_service_key == CC.TRASH_SERVICE_KEY:
self.SetFocussedMedia( self._page_key, None )
local_file_services = ( CC.LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY )
if file_service_key in local_file_services:
wx.GetApp().Write( 'content_updates', { file_service_key : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes ) ] } )
@ -251,7 +268,7 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
if first_media is not None and first_media.GetLocationsManager().HasLocal(): first_hash = first_media.GetDisplayMedia().GetHash()
else: first_hash = None
ClientGUICanvas.CanvasFullscreenMediaListBrowser( self.GetTopLevelParent(), self._page_key, self._file_service_key, media_results, first_hash )
ClientGUICanvas.CanvasFullscreenMediaListBrowser( self.GetTopLevelParent(), self._page_key, media_results, first_hash )
@ -261,7 +278,7 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
if len( media_results ) > 0:
ClientGUICanvas.CanvasFullscreenMediaListFilterInbox( self.GetTopLevelParent(), self._page_key, self._file_service_key, media_results )
ClientGUICanvas.CanvasFullscreenMediaListFilterInbox( self.GetTopLevelParent(), self._page_key, media_results )
@ -1394,7 +1411,17 @@ class MediaPanelThumbnails( MediaPanel ):
if self._focussed_media is not None: self._HitMedia( self._focussed_media, True, False )
elif command == 'custom_filter': self._CustomFilter()
elif command == 'delete': self._Delete( data )
elif command == 'delete':
if data is None:
self._Delete()
else:
self._Delete( data )
elif command == 'download': wx.GetApp().Write( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_PENDING, self._GetSelectedHashes( discriminant = CC.DISCRIMINANT_NOT_LOCAL ) ) ] } )
elif command == 'export_files': self._ExportFiles()
elif command == 'export_tags': self._ExportTags()
@ -1956,8 +1983,8 @@ class MediaPanelThumbnails( MediaPanel ):
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_HOME, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_home' ) ),
( wx.ACCEL_NORMAL, wx.WXK_END, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_end' ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_END, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_end' ) ),
( wx.ACCEL_NORMAL, wx.WXK_DELETE, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_KEY ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DELETE, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_KEY ) ),
( wx.ACCEL_NORMAL, wx.WXK_DELETE, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete' ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DELETE, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete' ) ),
( wx.ACCEL_NORMAL, wx.WXK_RETURN, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fullscreen' ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_ENTER, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fullscreen' ) ),
( wx.ACCEL_NORMAL, wx.WXK_UP, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_up' ) ),
@ -1972,6 +1999,8 @@ class MediaPanelThumbnails( MediaPanel ):
( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_HOME, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'shift_scroll_home' ) ),
( wx.ACCEL_SHIFT, wx.WXK_END, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'shift_scroll_end' ) ),
( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_END, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'shift_scroll_end' ) ),
( wx.ACCEL_SHIFT, wx.WXK_DELETE, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'undelete' ) ),
( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_DELETE, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'undelete' ) ),
( wx.ACCEL_SHIFT, wx.WXK_UP, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_up' ) ),
( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_UP, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_up' ) ),
( wx.ACCEL_SHIFT, wx.WXK_DOWN, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_down' ) ),

View File

@ -56,7 +56,7 @@ class Page( wx.SplitterWindow ):
file_service_key = self._management_controller.GetKey( 'file_service' )
self._preview_panel = ClientGUICanvas.CanvasPanel( self._search_preview_split, self._page_key, file_service_key )
self._preview_panel = ClientGUICanvas.CanvasPanel( self._search_preview_split, self._page_key )
self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, file_service_key, initial_media_results )

View File

@ -197,11 +197,17 @@ class MediaList( object ):
if selected_media is not None and media not in selected_media: continue
if media.IsCollection(): media_results.extend( media.GenerateMediaResults( discriminant ) )
if media.IsCollection(): media_results.extend( media.GenerateMediaResults( has_location = has_location, discriminant = discriminant, selected_media = selected_media, unrated = unrated ) )
else:
if discriminant is not None:
if ( discriminant == CC.DISCRIMINANT_INBOX and not media.HasInbox() ) or ( discriminant == CC.DISCRIMINANT_LOCAL and not media.GetLocationsManager().HasLocal() ) or ( discriminant == CC.DISCRIMINANT_NOT_LOCAL and media.GetLocationsManager().HasLocal() ): continue
inbox_failed = discriminant == CC.DISCRIMINANT_INBOX and not media.HasInbox()
local_failed = discriminant == CC.DISCRIMINANT_LOCAL and not media.GetLocationsManager().HasLocal()
not_local_failed = discriminant == CC.DISCRIMINANT_NOT_LOCAL and media.GetLocationsManager().HasLocal()
if inbox_failed or local_failed or not_local_failed: continue
if unrated is not None:
@ -265,11 +271,15 @@ class MediaList( object ):
if action == HC.CONTENT_UPDATE_DELETE:
permanently_deleted = service_key == CC.TRASH_SERVICE_KEY and self._file_service_key in ( CC.TRASH_SERVICE_KEY, CC.LOCAL_FILE_SERVICE_KEY )
local_service_keys = ( CC.TRASH_SERVICE_KEY, CC.LOCAL_FILE_SERVICE_KEY )
repo_deleted = service_key not in ( CC.LOCAL_FILE_SERVICE_KEY, CC.TRASH_SERVICE_KEY ) and service_key == self._file_service_key
deleted_from_trash_and_local_view = service_key == CC.TRASH_SERVICE_KEY and self._file_service_key in local_service_keys
if permanently_deleted or repo_deleted:
deleted_from_local_and_option_set = HC.options[ 'remove_trashed_files' ] and service_key == CC.LOCAL_FILE_SERVICE_KEY and self._file_service_key in local_service_keys
deleted_from_repo_and_repo_view = service_key not in local_service_keys and self._file_service_key == service_key
if deleted_from_trash_and_local_view or deleted_from_local_and_option_set or deleted_from_repo_and_repo_view:
affected_singleton_media = self._GetMedia( hashes, 'singletons' )
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
@ -931,11 +941,26 @@ class MediaResult( object ):
if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): tags_manager.ProcessContentUpdate( service_key, content_update )
elif service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ):
if service_key == CC.LOCAL_FILE_SERVICE_KEY:
if service_type == HC.LOCAL_FILE:
if action == HC.CONTENT_UPDATE_ARCHIVE: inbox = False
elif action == HC.CONTENT_UPDATE_INBOX: inbox = True
if service_key == CC.LOCAL_FILE_SERVICE_KEY:
if action == HC.CONTENT_UPDATE_ADD and CC.TRASH_SERVICE_KEY not in locations_manager.GetCurrent():
inbox = True
elif service_key == CC.TRASH_SERVICE_KEY:
if action == HC.CONTENT_UPDATE_DELETE:
inbox = False
self._tuple = ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, locations_manager, local_ratings, remote_ratings )

View File

@ -49,7 +49,7 @@ options = {}
# Misc
NETWORK_VERSION = 17
SOFTWARE_VERSION = 164
SOFTWARE_VERSION = 165
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -226,6 +226,7 @@ SERVICE_UPDATE_NEWS = 7
SERVICE_UPDATE_NEXT_DOWNLOAD_TIMESTAMP = 8
SERVICE_UPDATE_NEXT_PROCESSING_TIMESTAMP = 9
SERVICE_UPDATE_SUBINDEX_COUNT = 10
SERVICE_UPDATE_PAUSE = 11
ADD = 0
DELETE = 1

View File

@ -131,7 +131,7 @@ class HydrusController( wx.App ):
self._timestamps[ 'last_maintenance_time' ] = HydrusData.GetNow()
if not self._just_woke_from_sleep and self.CurrentlyIdle(): self.MaintainDB()
if not self._just_woke_from_sleep and self.CurrentlyIdle() and not HydrusGlobals.currently_processing_updates: self.MaintainDB()
def WaitUntilWXThreadIdle( self ):

View File

@ -648,6 +648,10 @@ def TimeHasPassed( timestamp ):
return GetNow() > timestamp
def TimeHasPassedPrecise( precise_timestamp ):
return GetNowPrecise() > precise_timestamp
def ToBytes( text_producing_object ):
if type( text_producing_object ) == unicode: return text_producing_object.encode( 'utf-8' )
@ -1340,16 +1344,9 @@ class JobKey( object ):
if 'popup_db_traceback' in self._variables: stuff_to_print.append( self._variables[ 'popup_db_traceback' ] )
try:
return os.linesep.join( stuff_to_print )
except UnicodeEncodeError:
stuff_to_print = [ ToString( s ) for s in stuff_to_print ]
return os.linesep.join( stuff_to_print )
stuff_to_print = [ ToString( s ) for s in stuff_to_print ]
return os.linesep.join( stuff_to_print )
def WaitIfNeeded( self ):
@ -1411,6 +1408,16 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
serialisable_value = ( operator, value, service_key.encode( 'hex' ) )
elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO:
( hash, max_hamming ) = self._value
serialisable_value = ( hash.encode( 'hex' ), max_hamming )
elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_HASH:
serialisable_value = self._value.encode( 'hex' )
else:
serialisable_value = self._value
@ -1429,6 +1436,16 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
self._value = ( operator, value, service_key.decode( 'hex' ) )
elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO:
( serialisable_hash, max_hamming ) = serialisable_value
self._value = ( serialisable_hash.decode( 'hex' ), max_hamming )
elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_HASH:
self._value = serialisable_value.decode( 'hex' )
else:
self._value = serialisable_value
@ -1670,7 +1687,7 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
if not self._inclusive: base = u'-'
else: base = u''
base += namespace + u':*'
base += namespace + u':*anything*'
elif self._predicate_type == HC.PREDICATE_TYPE_WILDCARD:

View File

@ -9,5 +9,6 @@ is_db_updated = False
repos_changed = False
subs_changed = False
currently_processing_updates = False
db_profile_mode = False

View File

@ -110,27 +110,29 @@ def GenerateNumpyImage( path ):
return numpy_image
def GenerateHydrusBitmap( path ):
def GenerateHydrusBitmap( path, compressed = True ):
try:
numpy_image = GenerateNumpyImage( path )
return GenerateHydrusBitmapFromNumPyImage( numpy_image )
return GenerateHydrusBitmapFromNumPyImage( numpy_image, compressed = compressed )
except:
pil_image = GeneratePILImage( path )
return GenerateHydrusBitmapFromPILImage( pil_image )
return GenerateHydrusBitmapFromPILImage( pil_image, compressed = compressed )
def GenerateHydrusBitmapFromNumPyImage( numpy_image ):
def GenerateHydrusBitmapFromNumPyImage( numpy_image, compressed = True ):
( y, x, depth ) = numpy_image.shape
if depth == 4: return HydrusBitmap( numpy_image.data, wx.BitmapBufferFormat_RGBA, ( x, y ) )
else: return HydrusBitmap( numpy_image.data, wx.BitmapBufferFormat_RGB, ( x, y ) )
if depth == 4: buffer_format = wx.BitmapBufferFormat_RGBA
else: buffer_format = wx.BitmapBufferFormat_RGB
return HydrusBitmap( numpy_image.data, buffer_format, ( x, y ), compressed = compressed )
def GenerateNumPyImageFromPILImage( pil_image ):
@ -149,7 +151,7 @@ def GenerateNumPyImageFromPILImage( pil_image ):
return numpy.fromstring( s, dtype = 'uint8' ).reshape( ( h, w, len( s ) // ( w * h ) ) )
def GenerateHydrusBitmapFromPILImage( pil_image ):
def GenerateHydrusBitmapFromPILImage( pil_image, compressed = True ):
if pil_image.mode == 'RGBA' or ( pil_image.mode == 'P' and pil_image.info.has_key( 'transparency' ) ):
@ -164,7 +166,7 @@ def GenerateHydrusBitmapFromPILImage( pil_image ):
format = wx.BitmapBufferFormat_RGB
return HydrusBitmap( pil_image.tostring(), format, pil_image.size )
return HydrusBitmap( pil_image.tostring(), format, pil_image.size, compressed = compressed )
def GeneratePerceptualHash( path ):
@ -438,29 +440,51 @@ def _GetFramesPIL( self ):
# the cv code was initially written by @fluffy_cub
class HydrusBitmap( object ):
def __init__( self, data, format, size ):
def __init__( self, data, format, size, compressed = True ):
self._compressed = compressed
if self._compressed:
self._data = lz4.dumps( data )
else:
self._data = data
self._data = lz4.dumps( data )
self._format = format
self._size = size
def _GetData( self ):
if self._compressed:
return lz4.loads( self._data )
else:
return self._data
def GetWxBitmap( self ):
( width, height ) = self._size
if self._format == wx.BitmapBufferFormat_RGB: return wx.BitmapFromBuffer( width, height, lz4.loads( self._data ) )
else: return wx.BitmapFromBufferRGBA( width, height, lz4.loads( self._data ) )
if self._format == wx.BitmapBufferFormat_RGB: return wx.BitmapFromBuffer( width, height, self._GetData() )
else: return wx.BitmapFromBufferRGBA( width, height, self._GetData() )
def GetWxImage( self ):
( width, height ) = self._size
if self._format == wx.BitmapBufferFormat_RGB: return wx.ImageFromBuffer( width, height, lz4.loads( self._data ) )
if self._format == wx.BitmapBufferFormat_RGB: return wx.ImageFromBuffer( width, height, self._GetData() )
else:
bitmap = wx.BitmapFromBufferRGBA( width, height, lz4.loads( self._data ) )
bitmap = wx.BitmapFromBufferRGBA( width, height, self._GetData() )
image = wx.ImageFromBitmap( bitmap )

View File

@ -42,7 +42,7 @@ HASH_TYPE_SHA512 = 3 # 64 bytes long
# RebuildNamespaces takes namespaces_to_exclude, if you want to curate your namespaces a little better.
# If your GetNamespaces gives garbage, then just hit DeleteNamespaces. I'll be using the result of GetNamespaces to populate
# the advanced tag options widget when people sync with these archives.
# the tag import options widget when people sync with these archives.
# And also feel free to contact me directly at hydrus.admin@gmail.com if you need help.

View File

@ -910,22 +910,33 @@ def CheckTagNotEmpty( tag ):
def CleanTag( tag ):
tag = tag[:1024]
tag = tag.lower()
tag = HydrusData.ToString( tag )
tag.replace( '\r', '' )
tag.replace( '\n', '' )
tag = re.sub( '[\\s]+', ' ', tag, flags = re.UNICODE ) # turns multiple spaces into single spaces
tag = re.sub( '\\s\\Z', '', tag, flags = re.UNICODE ) # removes space at the end
while re.match( '\\s|-|system:', tag, flags = re.UNICODE ) is not None:
try:
tag = re.sub( '\\A(\\s|-|system:)', '', tag, flags = re.UNICODE ) # removes space at the beginning
tag = tag[:1024]
tag = tag.lower()
tag = HydrusData.ToString( tag )
tag.replace( '\r', '' )
tag.replace( '\n', '' )
tag = re.sub( '[\\s]+', ' ', tag, flags = re.UNICODE ) # turns multiple spaces into single spaces
tag = re.sub( '\\s\\Z', '', tag, flags = re.UNICODE ) # removes space at the end
while re.match( '\\s|-|system:', tag, flags = re.UNICODE ) is not None:
tag = re.sub( '\\A(\\s|-|system:)', '', tag, flags = re.UNICODE ) # removes spaces or garbage at the beginning
except Exception as e:
text = 'Was unable to parse the tag: ' + repr( tag )
text += os.linesep * 2
text += str( e )
raise Exception( text )
return tag

View File

@ -393,7 +393,7 @@ class VideoContainer( HydrusImageHandling.RasterContainer ):
finally: self._next_render_index = ( self._next_render_index + 1 ) % num_frames
frame = HydrusImageHandling.GenerateHydrusBitmapFromNumPyImage( numpy_image )
frame = HydrusImageHandling.GenerateHydrusBitmapFromNumPyImage( numpy_image, compressed = False )
wx.CallAfter( self.AddFrame, frame_index, frame )

View File

@ -961,10 +961,46 @@ class TestClientDB( unittest.TestCase ):
def test_pending( self ):
pass
service_key = HydrusData.GenerateKey()
# result = self._read( 'pending', service_key )
# do more when I do remote repos
info = {}
info[ 'host' ] = 'example_host'
info[ 'port' ] = 80
info[ 'access_key' ] = HydrusData.GenerateKey()
new_tag_repo = ( service_key, HC.TAG_REPOSITORY, 'new tag repo', info )
edit_log = [ HydrusData.EditLogActionAdd( new_tag_repo ) ]
self._write( 'update_services', edit_log )
#
hashes = [ os.urandom( 32 ) for i in range( 64 ) ]
tags = [ 'this', 'is', 'a:test' ]
content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PENDING, ( tag, hashes ) ) for tag in tags ]
service_keys_to_content_updates = { service_key : content_updates }
self._write( 'content_updates', service_keys_to_content_updates )
pending = self._read( 'pending', service_key )
self.assertEqual( len( pending ), 4 )
for obj in pending:
self.assertEqual( type( obj ), HydrusData.ClientToServerContentUpdatePackage )
#
edit_log = [ HydrusData.EditLogActionDelete( service_key ) ]
self._write( 'update_services', edit_log )
def test_pixiv_account( self ):

View File

@ -518,7 +518,7 @@ class TestTagObjects( unittest.TestCase ):
p = HydrusData.Predicate( HC.PREDICATE_TYPE_NAMESPACE, 'series' )
self.assertEqual( p.GetUnicode(), u'series:*' )
self.assertEqual( p.GetUnicode(), u'series:*anything*' )
p = HydrusData.Predicate( HC.PREDICATE_TYPE_TAG, 'series', inclusive = False )