diff --git a/help/changelog.html b/help/changelog.html
index e74b361c..170b9a7c 100755
--- a/help/changelog.html
+++ b/help/changelog.html
@@ -8,7 +8,42 @@
changelog
- version 98
+ version 100
+
+ - MVC import_controller system created
+ - download gauges lag reduced
+ - download cancel button desync bug fixed
+ - download error code harmonised
+ - download async code pushed to threads
+ - download job key checking and management improved
+ - download queue feed timer will obey page-hidden pause status
+ - download success and exception handling harmonised
+ - download on-close thread-signalling much improved
+ - download pages now take responsibility for download code factories
+ - download management panel code harmonised
+ - download management panels made much more MVC
+ - download error reporting is much improved
+ - download fail management is much improved
+ - download button response is much faster
+ - download progress display is split into the easier to read file/queue/builder trichotomy
+ - download display indices are less schizophrenic
+ - lots of misc improvements to download code and nomenclature
+ - thumbnail page's refresh and select menu items now only appear when appropriate
+ - thumbnail page's select menu will only show inbox/archive sub-items when appropriate
+ - thumbnail page's menu is less buggy generally
+ - db updates are now parcelised into each version, rather than one big job
+ - improved db version update notification
+ - a problem with updates from ~v70 to >v95 is fixed
+ - fixed a bug in popup message dismissal
+ - database exception formatting improved
+ - database exception display spam reduced
+ - database exception redundant traceback removed
+ - autocomplete character-search-delay time extended from 150ms to 250ms
+ - async wx window destroy code improved in certain places
+ - improved non-wx-thread popup error reporting
+ - some other bugfix, grammar and nomenclature stuff I can't remember
+
+ version 99
- added backup database menu option
- added restore database menu option
@@ -27,7 +62,7 @@
- improved hydrus's thread-communication objects
- cleaned up downloader code and logic and gui display
- version 99
+ version 98
- update to wxpython 3.0
- you can now add tags when importing from a zip
diff --git a/include/ClientConstants.py b/include/ClientConstants.py
index 25e11e18..35efd3dc 100755
--- a/include/ClientConstants.py
+++ b/include/ClientConstants.py
@@ -25,7 +25,6 @@ import urlparse
import urllib
import yaml
import wx
-import zipfile
import zlib
ID_NULL = wx.NewId()
@@ -331,7 +330,7 @@ def CatchExceptionClient( etype, value, tb ):
try: message += traceback.format_exc()
except: pass
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
def GenerateCollectByChoices( sort_by_choices ):
@@ -629,16 +628,34 @@ def IntersectTags( tags_managers, service_identifier = HC.COMBINED_TAG_SERVICE_I
def ShowExceptionClient( e ):
+ if not wx.Thread_IsMain():
+
+ ( etype, value, tb ) = sys.exc_info()
+
+ if etype is not None: e = type( e )( os.linesep.join( traceback.format_exception( etype, value, tb ) ) )
+
+
etype = type( e )
- value = HC.u( e )
-
- trace_list = traceback.format_stack()
-
- trace = ''.join( trace_list )
+ if etype == HydrusExceptions.DBException:
+
+ value = ''
+
+ trace = HC.u( e )
+
+ else:
+
+ value = HC.u( e )
+
+ trace_list = traceback.format_stack()
+
+ trace = ''.join( trace_list )
+
HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_ERROR, ( etype, value, trace ) ) )
+def ShowTextClient( text ): HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, text ) )
+
class AutocompleteMatches():
def __init__( self, matches ):
@@ -1884,26 +1901,6 @@ fourchan_imageboards.append( Imageboard( '/vp/', 'https://sys.4chan.org/vp/post'
DEFAULT_IMAGEBOARDS.append( ( '4chan', fourchan_imageboards ) )
-class Job( threading.Thread ):
-
- def __init__( self, job_key, name ):
-
- threading.Thread.__init__( self, name = name )
-
- self._job_key = job_key
-
-
- def _NotifyAllDone( self ): pass
-
- def _NotifyPartDone( self, i ): pass
-
- def _NotifyStart( self ): pass
-
- def run( self ):
-
- pass # think about this more
-
-
class Log():
def __init__( self ):
diff --git a/include/ClientController.py b/include/ClientController.py
index 0f7e867e..4e18fe26 100755
--- a/include/ClientController.py
+++ b/include/ClientController.py
@@ -222,7 +222,7 @@ The database will be locked while the backup occurs, which may lock up your gui
message += os.linesep + os.linesep
message += 'If the old instance does not close for a _very_ long time, you can usually safely force-close it from task manager.'
- with ClientGUIDialogs.DialogYesNo( None, message, yes_label = 'wait a bit, then try again', no_label = 'quit now' ) as dlg:
+ with ClientGUIDialogs.DialogYesNo( None, message, yes_label = 'wait a bit, then try again', no_label = 'forget it' ) as dlg:
if dlg.ShowModal() == wx.ID_YES: time.sleep( 3 )
else: raise HydrusExceptions.PermissionException()
@@ -288,7 +288,7 @@ The database will be locked while the backup occurs, which may lock up your gui
self.SetSplashText( 'starting daemons' )
if HC.is_first_start: self._gui.DoFirstStart()
- if HC.is_db_updated: wx.CallLater( 1, HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'The client has updated to version ' + HC.u( HC.SOFTWARE_VERSION ) + '!' ) )
+ if HC.is_db_updated: wx.CallLater( 1, HC.ShowText, 'The client has updated to version ' + HC.u( HC.SOFTWARE_VERSION ) + '!' )
self.RestartServer()
self._db.StartDaemons()
@@ -315,13 +315,13 @@ The database will be locked while the backup occurs, which may lock up your gui
wx.MessageBox( 'Woah, bad error:' + os.linesep + os.linesep + traceback.format_exc() )
- try: self._splash.Close()
- except: pass
-
init_result = False
-
- self._splash.Close()
+ finally:
+
+ try: self._splash.Destroy()
+ except: pass
+
return init_result
@@ -367,7 +367,7 @@ The database will be locked while the backup occurs, which may lock up your gui
message = 'Something was already bound to port ' + HC.u( port )
- wx.CallAfter( HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ wx.CallLater( 1, HC.ShowText, message )
except:
@@ -386,7 +386,7 @@ The database will be locked while the backup occurs, which may lock up your gui
message = 'Tried to bind port ' + HC.u( port ) + ' but it failed'
- wx.CallAfter( HC.pubsub.pub, 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ wx.CallLater( 1, HC.ShowText, message )
@@ -427,7 +427,7 @@ Once it is done, the client will restart.'''
self._gui.Hide()
- self._gui.Destroy()
+ self._gui.Close()
self._db.Shutdown()
diff --git a/include/ClientDB.py b/include/ClientDB.py
index 7636ba97..b12bc660 100755
--- a/include/ClientDB.py
+++ b/include/ClientDB.py
@@ -27,7 +27,6 @@ import traceback
import urlparse
import wx
import yaml
-import zipfile
class FileDB():
@@ -210,7 +209,7 @@ class MessageDB():
try: transport_message.VerifyIsFromCorrectPerson( public_key )
except:
- self.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'received a message that did not verify' ) )
+ HC.ShowText( 'received a message that did not verify' )
return
@@ -1420,7 +1419,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
shutil.copytree( HC.CLIENT_FILES_DIR, path + os.path.sep + 'client_files' )
shutil.copytree( HC.CLIENT_THUMBNAILS_DIR, path + os.path.sep + 'client_thumbnails' )
- self.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'Backup done!' ) )
+ HC.ShowText( 'Backup done!' )
def _DeleteFiles( self, c, service_id, hash_ids ):
@@ -3097,31 +3096,13 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
( media_result, ) = self._GetMediaResults( c, HC.LOCAL_FILE_SERVICE_IDENTIFIER, { hash_id } )
- return ( result, hash, media_result )
+ return ( result, media_result )
- else: return ( result, hash, None )
+ else: return ( result, None )
else: return ( result, hash )
- def _ImportFilePage( self, c, page_key, path, advanced_import_options = {}, service_identifiers_to_tags = {}, url = None ):
-
- try:
-
- ( result, hash, media_result ) = self._ImportFile( c, path, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, generate_media_result = True, url = url )
-
- if media_result is not None: self.pub( 'add_media_results', page_key, ( media_result, ) )
-
- self.pub( 'import_done', page_key, result )
-
- except Exception as e:
-
- HC.pubsub.pub( 'import_done', page_key, 'failed', exception = e )
-
- raise
-
-
-
def _InboxFiles( self, c, hash_ids ):
c.executemany( 'INSERT OR IGNORE INTO file_inbox VALUES ( ? );', [ ( hash_id, ) for hash_id in hash_ids ] )
@@ -3724,7 +3705,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self.pub_service_updates( { service_identifier : [ HC.ServiceUpdate( HC.SERVICE_UPDATE_RESET, new_service_identifier ) ] } )
self.pub( 'notify_new_pending' )
self.pub( 'permissions_are_stale' )
- self.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'reset ' + service_name ) )
+ HC.ShowText( 'reset ' + service_name )
def _Set4chanPass( self, c, token, pin, timeout ):
@@ -4390,7 +4371,32 @@ class DB( ServiceDB ):
( db, c ) = self._GetDBCursor()
- self._UpdateDB( c )
+ ( version, ) = c.execute( 'SELECT version FROM version;' ).fetchone()
+
+ while version < HC.SOFTWARE_VERSION:
+
+ HC.app.SetSplashText( 'updating db to v' + HC.u( version + 1 ) )
+
+ time.sleep( 2 )
+
+ try: c.execute( 'BEGIN IMMEDIATE' )
+ except Exception as e: raise HydrusExceptions.DBAccessException( HC.u( e ) )
+
+ try:
+
+ self._UpdateDB( c, version )
+
+ c.execute( 'COMMIT' )
+
+ except:
+
+ c.execute( 'ROLLBACK' )
+
+ raise Exception( 'Updating the client db to version ' + HC.u( version ) + ' caused this error:' + os.linesep + traceback.format_exc() )
+
+
+ ( version, ) = c.execute( 'SELECT version FROM version;' ).fetchone()
+
try: c.execute( 'BEGIN IMMEDIATE' )
except Exception as e: raise HydrusExceptions.DBAccessException( HC.u( e ) )
@@ -4407,7 +4413,7 @@ class DB( ServiceDB ):
message = 'Database commit error:' + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
c.execute( 'ROLLBACK' )
@@ -4855,172 +4861,151 @@ class DB( ServiceDB ):
- def _UpdateDB( self, c ):
+ def _UpdateDB( self, c, version ):
- ( version, ) = c.execute( 'SELECT version FROM version;' ).fetchone()
+ self._UpdateDBOld( c, version )
- if version < HC.SOFTWARE_VERSION:
+ if version == 91:
- try: c.execute( 'BEGIN IMMEDIATE' )
- except Exception as e: raise HydrusExceptions.DBAccessException( HC.u( e ) )
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
- try:
+ HC.options[ 'num_autocomplete_chars' ] = 2
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 93:
+
+ c.execute( 'CREATE TABLE gui_sessions ( name TEXT, info TEXT_YAML );' )
+
+
+ if version == 94:
+
+ # I changed a variable name in account, so old yaml dumps need to be refreshed
+
+ unknown_account = HC.GetUnknownAccount()
+
+ c.execute( 'UPDATE accounts SET account = ?;', ( unknown_account, ) )
+
+ for ( name, info ) in c.execute( 'SELECT name, info FROM gui_sessions;' ).fetchall():
- self._UpdateDBOld( c, version )
-
- if version < 92:
+ for ( page_name, c_text, args, kwargs ) in info:
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'num_autocomplete_chars' ] = 2
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+ if 'do_query' in kwargs: del kwargs[ 'do_query' ]
- if version < 94:
-
- c.execute( 'CREATE TABLE gui_sessions ( name TEXT, info TEXT_YAML );' )
-
-
- if version < 95:
-
- # I changed a variable name in account, so old yaml dumps need to be refreshed
-
- unknown_account = HC.GetUnknownAccount()
-
- c.execute( 'UPDATE accounts SET account = ?;', ( unknown_account, ) )
-
- for ( name, info ) in c.execute( 'SELECT name, info FROM gui_sessions;' ).fetchall():
-
- for ( page_name, c_text, args, kwargs ) in info:
-
- if 'do_query' in kwargs: del kwargs[ 'do_query' ]
-
-
- c.execute( 'UPDATE gui_sessions SET info = ? WHERE name = ?;', ( info, name ) )
-
-
-
- if version < 96:
-
- c.execute( 'COMMIT' )
-
- c.execute( 'PRAGMA foreign_keys = OFF;' )
-
- c.execute( 'BEGIN IMMEDIATE' )
-
- service_basic_info = c.execute( 'SELECT service_id, service_key, type, name FROM services;' ).fetchall()
- service_address_info = c.execute( 'SELECT service_id, host, port, last_error FROM addresses;' ).fetchall()
- service_account_info = c.execute( 'SELECT service_id, access_key, account FROM accounts;' ).fetchall()
- service_repository_info = c.execute( 'SELECT service_id, first_begin, next_begin FROM repositories;' ).fetchall()
- service_ratings_like_info = c.execute( 'SELECT service_id, like, dislike FROM ratings_like;' ).fetchall()
- service_ratings_numerical_info = c.execute( 'SELECT service_id, lower, upper FROM ratings_numerical;' ).fetchall()
-
- service_address_info = { service_id : ( host, port, last_error ) for ( service_id, host, port, last_error ) in service_address_info }
- service_account_info = { service_id : ( access_key, account ) for ( service_id, access_key, account ) in service_account_info }
- service_repository_info = { service_id : ( first_begin, next_begin ) for ( service_id, first_begin, next_begin ) in service_repository_info }
- service_ratings_like_info = { service_id : ( like, dislike ) for ( service_id, like, dislike ) in service_ratings_like_info }
- service_ratings_numerical_info = { service_id : ( lower, upper ) for ( service_id, lower, upper ) in service_ratings_numerical_info }
-
- c.execute( 'DROP TABLE services;' )
- c.execute( 'DROP TABLE addresses;' )
- c.execute( 'DROP TABLE accounts;' )
- c.execute( 'DROP TABLE repositories;' )
- c.execute( 'DROP TABLE ratings_like;' )
- c.execute( 'DROP TABLE ratings_numerical;' )
-
- c.execute( 'CREATE TABLE services ( service_id INTEGER PRIMARY KEY, service_key BLOB_BYTES, service_type INTEGER, name TEXT, info TEXT_YAML );' )
- c.execute( 'CREATE UNIQUE INDEX services_service_key_index ON services ( service_key );' )
-
- services = []
-
- for ( service_id, service_key, service_type, name ) in service_basic_info:
-
- info = {}
-
- if service_id in service_address_info:
-
- ( host, port, last_error ) = service_address_info[ service_id ]
-
- info[ 'host' ] = host
- info[ 'port' ] = port
- info[ 'last_error' ] = last_error
-
-
- if service_id in service_account_info:
-
- ( access_key, account ) = service_account_info[ service_id ]
-
- info[ 'access_key' ] = access_key
- info[ 'account' ] = account
-
-
- if service_id in service_repository_info:
-
- ( first_begin, next_begin ) = service_repository_info[ service_id ]
-
- info[ 'first_begin' ] = first_begin
- info[ 'next_begin' ] = next_begin
-
-
- if service_id in service_ratings_like_info:
-
- ( like, dislike ) = service_ratings_like_info[ service_id ]
-
- info[ 'like' ] = like
- info[ 'dislike' ] = dislike
-
-
- if service_id in service_ratings_numerical_info:
-
- ( lower, upper ) = service_ratings_numerical_info[ service_id ]
-
- info[ 'lower' ] = lower
- info[ 'upper' ] = upper
-
-
- c.execute( 'INSERT INTO services ( service_id, service_key, service_type, name, info ) VALUES ( ?, ?, ?, ?, ? );', ( service_id, sqlite3.Binary( service_key ), service_type, name, info ) )
-
-
- c.execute( 'COMMIT' )
-
- c.execute( 'PRAGMA foreign_keys = ON;' )
-
- c.execute( 'BEGIN IMMEDIATE' )
-
-
- for ( service_id, info ) in c.execute( 'SELECT service_id, info FROM services;' ).fetchall():
-
- if 'account' in info:
-
- info[ 'account' ].MakeStale()
-
- c.execute( 'UPDATE services SET info = ? WHERE service_id = ?;', ( info, service_id ) )
-
-
-
- c.execute( 'UPDATE version SET version = ?;', ( HC.SOFTWARE_VERSION, ) )
-
- c.execute( 'COMMIT' )
-
- HC.is_db_updated = True
-
- except:
-
- c.execute( 'ROLLBACK' )
-
- raise Exception( 'Tried to update the client db, but something went wrong:' + os.linesep + traceback.format_exc() )
+ c.execute( 'UPDATE gui_sessions SET info = ? WHERE name = ?;', ( info, name ) )
- self._UpdateDBOldPost( c, version )
+ if version == 95:
+
+ c.execute( 'COMMIT' )
+
+ c.execute( 'PRAGMA foreign_keys = OFF;' )
+
+ c.execute( 'BEGIN IMMEDIATE' )
+
+ service_basic_info = c.execute( 'SELECT service_id, service_key, type, name FROM services;' ).fetchall()
+ service_address_info = c.execute( 'SELECT service_id, host, port, last_error FROM addresses;' ).fetchall()
+ service_account_info = c.execute( 'SELECT service_id, access_key, account FROM accounts;' ).fetchall()
+ service_repository_info = c.execute( 'SELECT service_id, first_begin, next_begin FROM repositories;' ).fetchall()
+ service_ratings_like_info = c.execute( 'SELECT service_id, like, dislike FROM ratings_like;' ).fetchall()
+ service_ratings_numerical_info = c.execute( 'SELECT service_id, lower, upper FROM ratings_numerical;' ).fetchall()
+
+ service_address_info = { service_id : ( host, port, last_error ) for ( service_id, host, port, last_error ) in service_address_info }
+ service_account_info = { service_id : ( access_key, account ) for ( service_id, access_key, account ) in service_account_info }
+ service_repository_info = { service_id : ( first_begin, next_begin ) for ( service_id, first_begin, next_begin ) in service_repository_info }
+ service_ratings_like_info = { service_id : ( like, dislike ) for ( service_id, like, dislike ) in service_ratings_like_info }
+ service_ratings_numerical_info = { service_id : ( lower, upper ) for ( service_id, lower, upper ) in service_ratings_numerical_info }
+
+ c.execute( 'DROP TABLE services;' )
+ c.execute( 'DROP TABLE addresses;' )
+ c.execute( 'DROP TABLE accounts;' )
+ c.execute( 'DROP TABLE repositories;' )
+ c.execute( 'DROP TABLE ratings_like;' )
+ c.execute( 'DROP TABLE ratings_numerical;' )
+
+ c.execute( 'CREATE TABLE services ( service_id INTEGER PRIMARY KEY, service_key BLOB_BYTES, service_type INTEGER, name TEXT, info TEXT_YAML );' )
+ c.execute( 'CREATE UNIQUE INDEX services_service_key_index ON services ( service_key );' )
+
+ services = []
+
+ for ( service_id, service_key, service_type, name ) in service_basic_info:
+
+ info = {}
+
+ if service_id in service_address_info:
+
+ ( host, port, last_error ) = service_address_info[ service_id ]
+
+ info[ 'host' ] = host
+ info[ 'port' ] = port
+ info[ 'last_error' ] = last_error
+
+
+ if service_id in service_account_info:
+
+ ( access_key, account ) = service_account_info[ service_id ]
+
+ info[ 'access_key' ] = access_key
+ info[ 'account' ] = account
+
+
+ if service_id in service_repository_info:
+
+ ( first_begin, next_begin ) = service_repository_info[ service_id ]
+
+ info[ 'first_begin' ] = first_begin
+ info[ 'next_begin' ] = next_begin
+
+
+ if service_id in service_ratings_like_info:
+
+ ( like, dislike ) = service_ratings_like_info[ service_id ]
+
+ info[ 'like' ] = like
+ info[ 'dislike' ] = dislike
+
+
+ if service_id in service_ratings_numerical_info:
+
+ ( lower, upper ) = service_ratings_numerical_info[ service_id ]
+
+ info[ 'lower' ] = lower
+ info[ 'upper' ] = upper
+
+
+ c.execute( 'INSERT INTO services ( service_id, service_key, service_type, name, info ) VALUES ( ?, ?, ?, ?, ? );', ( service_id, sqlite3.Binary( service_key ), service_type, name, info ) )
+
+
+ c.execute( 'COMMIT' )
+
+ c.execute( 'PRAGMA foreign_keys = ON;' )
+
+ c.execute( 'BEGIN IMMEDIATE' )
+
+
+ for ( service_id, info ) in c.execute( 'SELECT service_id, info FROM services;' ).fetchall():
+
+ if 'account' in info:
+
+ info[ 'account' ].MakeStale()
+
+ c.execute( 'UPDATE services SET info = ? WHERE service_id = ?;', ( info, service_id ) )
+
+
+
+ c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
+
+ HC.is_db_updated = True
def _UpdateDBOld( self, c, version ):
# upgrade to version 4 was too complicated, needs entire rebuild
- if version < 13:
+ if version == 12:
c.execute( 'ALTER TABLE public_tag_repository ADD COLUMN first_begin INTEGER;' )
c.execute( 'ALTER TABLE file_repositories ADD COLUMN first_begin INTEGER;' )
@@ -5039,7 +5024,7 @@ class DB( ServiceDB ):
c.execute( 'DELETE FROM file_downloads;' )
- if version < 16:
+ if version == 15:
c.execute( 'CREATE TABLE accounts ( service_id INTEGER, access_key BLOB_BYTES, account TEXT_YAML );' )
@@ -5171,7 +5156,7 @@ class DB( ServiceDB ):
c.execute( 'DROP TABLE local_thumbnails_resized;' )
- if version < 19:
+ if version == 18:
c.execute( 'CREATE TABLE service_info ( service_id INTEGER, info_type INTEGER, info INTEGER, PRIMARY KEY ( service_id, info_type ) );', )
@@ -5180,12 +5165,12 @@ class DB( ServiceDB ):
c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) SELECT service_id, service_id FROM services WHERE type = ?;', ( HC.TAG_REPOSITORY, ) )
- if version < 21:
+ if version == 20:
c.execute( 'CREATE TABLE files_info_db.perceptual_hashes ( service_id INTEGER, hash_id INTEGER, phash BLOB_BYTES, PRIMARY KEY( service_id, hash_id ) );' )
- if version < 22:
+ if version == 21:
c.execute( 'DELETE FROM perceptual_hashes;' )
@@ -5204,21 +5189,36 @@ class DB( ServiceDB ):
- if version < 24:
+ if version == 23:
c.execute( 'CREATE TABLE imageboard_sites ( site_id INTEGER PRIMARY KEY, name TEXT );', )
c.execute( 'CREATE TABLE imageboards ( site_id INTEGER, name TEXT, imageboard TEXT_YAML, PRIMARY KEY ( site_id, name ) );', )
+ def old_get_site_id( c, name ):
+
+ result = c.execute( 'SELECT site_id FROM imageboard_sites WHERE name = ?;', ( name, ) ).fetchone()
+
+ if result is None:
+
+ c.execute( 'INSERT INTO imageboard_sites ( name ) VALUES ( ? );', ( name, ) )
+
+ site_id = c.lastrowid
+
+ else: ( site_id, ) = result
+
+ return site_id
+
+
for ( site_name, imageboards ) in CC.DEFAULT_IMAGEBOARDS:
- site_id = self._GetSiteId( c, site_name )
+ site_id = old_get_site_id( c, site_name )
c.executemany( 'INSERT INTO imageboards VALUES ( ?, ?, ? );', [ ( site_id, imageboard.GetName(), imageboard ) for imageboard in imageboards ] )
- if version < 26:
+ if version == 25:
( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
@@ -5227,13 +5227,13 @@ class DB( ServiceDB ):
c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
- if version < 27:
+ if version == 26:
c.execute( 'CREATE TABLE files_info_db.urls ( url TEXT PRIMARY KEY, hash_id INTEGER );' )
c.execute( 'CREATE INDEX files_info_db.urls_hash_id ON urls ( hash_id );' )
- if version < 29:
+ if version == 28:
files_db_path = HC.DB_DIR + os.path.sep + 'client_files.db'
@@ -5284,7 +5284,7 @@ class DB( ServiceDB ):
os.remove( files_db_path )
- if version < 30:
+ if version == 29:
thumbnails_db_path = HC.DB_DIR + os.path.sep + 'client_thumbnails.db'
thumbnails_resized_db_path = HC.DB_DIR + os.path.sep + 'client_thumbnails_resized.db'
@@ -5348,14 +5348,14 @@ class DB( ServiceDB ):
c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
- if version < 31:
+ if version == 30:
c.execute( 'CREATE TABLE boorus ( name TEXT PRIMARY KEY, booru TEXT_YAML );', )
c.executemany( 'INSERT INTO boorus VALUES ( ?, ? );', CC.DEFAULT_BOORUS.items() )
- if version < 33:
+ if version == 32:
try: c.execute( 'SELECT name, booru FROM boorus;' ).fetchall()
except:
@@ -5398,1117 +5398,12 @@ class DB( ServiceDB ):
- if version < 35:
-
- c.execute( 'CREATE TABLE active_pending_mappings ( namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( namespace_id, tag_id, hash_id ) );' )
- c.execute( 'CREATE INDEX active_pending_mappings_tag_id_index ON active_pending_mappings ( tag_id );' )
- c.execute( 'CREATE INDEX active_pending_mappings_hash_id_index ON active_pending_mappings ( hash_id );' )
-
- service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence DESC;' ) ]
-
- first_round = True
-
- for service_id in service_ids:
-
- c.execute( 'INSERT OR IGNORE INTO active_pending_mappings SELECT namespace_id, tag_id, hash_id FROM pending_mappings WHERE service_id = ?;', ( service_id, ) )
-
- # is this incredibly inefficient?
- # if this is O( n-squared ) or whatever, just rewrite it as two queries using indices
- if not first_round: c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id || "," || tag_id || "," || hash_id IN ( SELECT namespace_id || "," || tag_id || "," || hash_id FROM deleted_mappings WHERE service_id = ? );', ( service_id, ) )
-
- first_round = False
-
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- default_sort_by_choices = []
-
- default_sort_by_choices.append( ( 'namespaces', [ 'series', 'creator', 'title', 'volume', 'chapter', 'page' ] ) )
- default_sort_by_choices.append( ( 'namespaces', [ 'creator', 'series', 'title', 'volume', 'chapter', 'page' ] ) )
-
- HC.options[ 'sort_by' ] = default_sort_by_choices
-
- HC.options[ 'default_sort' ] = 0
- HC.options[ 'default_collect' ] = 0
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 36:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'gui_capitalisation' ] = False
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 37:
-
- # reconfig inbox -> file_inbox
-
- c.execute( 'CREATE TABLE file_inbox ( hash_id INTEGER PRIMARY KEY );' )
-
- c.execute( 'INSERT INTO file_inbox SELECT hash_id FROM inbox;' )
-
- c.execute( 'DROP TRIGGER inbox_insert_trigger;' )
- c.execute( 'DROP TRIGGER inbox_delete_trigger;' )
-
- c.execute( 'DROP TABLE inbox;' )
-
- inserts = []
- inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id IN ( SELECT service_id FROM files_info WHERE hash_id = new.hash_id ) AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
- c.execute( 'CREATE TRIGGER file_inbox_insert_trigger AFTER INSERT ON file_inbox BEGIN ' + ' '.join( inserts ) + ' END;' )
- deletes = []
- deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id IN ( SELECT service_id FROM files_info WHERE hash_id = old.hash_id ) AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
- c.execute( 'CREATE TRIGGER file_inbox_delete_trigger DELETE ON file_inbox BEGIN ' + ' '.join( deletes ) + ' END;' )
-
- # now set up new messaging stuff
-
- c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' )
- c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' )
- c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' )
-
- c.execute( 'CREATE VIRTUAL TABLE conversation_subjects USING fts4( subject );' )
-
- c.execute( 'CREATE TABLE message_attachments ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE, hash_id INTEGER );' )
-
- c.execute( 'CREATE TABLE message_depots ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, contact_id INTEGER, last_check INTEGER, check_period INTEGER, private_key TEXT );' )
- c.execute( 'CREATE UNIQUE INDEX message_depots_contact_id_index ON message_depots ( contact_id );' )
-
- c.execute( 'CREATE TABLE message_destination_map ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_to INTEGER, status_id INTEGER, PRIMARY KEY ( message_id, contact_id_to ) );' )
- c.execute( 'CREATE INDEX message_destination_map_contact_id_to_index ON message_destination_map ( contact_id_to );' )
- c.execute( 'CREATE INDEX message_destination_map_status_id_index ON message_destination_map ( status_id );' )
-
- c.execute( 'CREATE TABLE message_downloads ( service_id INTEGER REFERENCES services ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE );' )
- c.execute( 'CREATE INDEX message_downloads_service_id_index ON message_downloads ( service_id );' )
-
- c.execute( 'CREATE TABLE message_drafts ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, recipients_visible INTEGER_BOOLEAN );' )
-
- c.execute( 'CREATE TABLE message_inbox ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE );' )
-
- c.execute( 'CREATE TABLE message_keys ( message_id INTEGER PRIMARY KEY, message_key BLOB_BYTES );' )
- c.execute( 'CREATE INDEX message_keys_message_key_index ON message_keys ( message_key );' )
-
- c.execute( 'CREATE VIRTUAL TABLE message_bodies USING fts4( body );' )
-
- c.execute( 'CREATE TABLE messages ( conversation_id INTEGER REFERENCES message_keys ( message_id ) ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_from INTEGER, timestamp INTEGER, PRIMARY KEY( conversation_id, message_id ) );' )
- c.execute( 'CREATE UNIQUE INDEX messages_message_id_index ON messages ( message_id );' )
- c.execute( 'CREATE INDEX messages_contact_id_from_index ON messages ( contact_id_from );' )
- c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' )
-
- c.execute( 'CREATE TABLE statuses ( status_id INTEGER PRIMARY KEY, status TEXT );' )
- c.execute( 'CREATE UNIQUE INDEX statuses_status_index ON statuses ( status );' )
-
- c.execute( 'INSERT INTO contacts ( contact_id, contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ?, ? );', ( 1, None, None, 'Anonymous', 'internet', 0 ) )
- # fill the contact key and public key info in for hydrus admin
-
-
- if version < 38:
-
- c.execute( 'COMMIT' )
- c.execute( 'PRAGMA journal_mode=WAL;' ) # possibly didn't work last time, cause of sqlite dll issue
- c.execute( 'BEGIN IMMEDIATE' )
-
- contacts_contents = c.execute( 'SELECT * FROM contacts;' ).fetchall()
-
- c.execute( 'DROP TABLE contacts;' )
-
- c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' )
- c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' )
- c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' )
-
- c.executemany( 'INSERT INTO contacts VALUES ( ?, ?, ?, ?, ?, ? );', contacts_contents )
-
- c.execute( 'CREATE TABLE message_statuses_to_apply ( message_id INTEGER, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' )
-
-
- if version < 39:
-
- # I accidentally added some buffer public keys in v38, so this is to HC.u() them
- updates = [ ( HC.u( public_key ), contact_id ) for ( contact_id, public_key ) in c.execute( 'SELECT contact_id, public_key FROM contacts;' ).fetchall() ]
-
- c.executemany( 'UPDATE contacts SET public_key = ? WHERE contact_id = ?;', updates )
-
- with open( HC.STATIC_DIR + os.sep + 'contact - hydrus admin.yaml', 'rb' ) as f: hydrus_admin = yaml.safe_load( f.read() )
-
- ( public_key, name, host, port ) = hydrus_admin.GetInfo()
-
- contact_key = hydrus_admin.GetContactKey()
-
- c.execute( 'INSERT OR IGNORE INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( sqlite3.Binary( contact_key ), public_key, name, host, port ) )
-
-
- if version < 41:
-
- # better name and has foreign key assoc
-
- c.execute( 'CREATE TABLE incoming_message_statuses ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' )
-
- incoming_status_inserts = c.execute( 'SELECT * FROM message_statuses_to_apply;' ).fetchall()
-
- c.executemany( 'INSERT INTO incoming_message_statuses VALUES ( ?, ?, ? );', incoming_status_inserts )
-
- c.execute( 'DROP TABLE message_statuses_to_apply;' )
-
- # delete all drafts cause of plaintext->xml conversion
-
- message_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM message_drafts;' ) ]
-
- c.execute( 'DELETE FROM message_keys WHERE message_id IN ' + HC.SplayListForDB( message_ids ) + ';' )
- c.execute( 'DELETE FROM message_bodies WHERE docid IN ' + HC.SplayListForDB( message_ids ) + ';' )
- c.execute( 'DELETE FROM conversation_subjects WHERE docid IN ' + HC.SplayListForDB( message_ids ) + ';' )
-
- c.execute( 'ALTER TABLE message_depots ADD COLUMN receive_anon INTEGER_BOOLEAN' )
- c.execute( 'UPDATE message_depots SET receive_anon = ?;', ( True, ) )
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- system_predicates = {}
-
- system_predicates[ 'age' ] = ( 0, 0, 0, 7 )
- system_predicates[ 'duration' ] = ( 3, 0, 0 )
- system_predicates[ 'height' ] = ( 1, 1200 )
- system_predicates[ 'limit' ] = 600
- system_predicates[ 'mime' ] = ( 0, 0 )
- system_predicates[ 'num_tags' ] = ( 0, 4 )
- system_predicates[ 'ratio' ] = ( 0, 16, 9 )
- system_predicates[ 'size' ] = ( 0, 200, 3 )
- system_predicates[ 'width' ] = ( 1, 1920 )
-
- HC.options[ 'file_system_predicates' ] = system_predicates
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 42:
-
- self._RecalcCombinedMappings( c )
-
- c.execute( 'CREATE TABLE autocomplete_tags_cache ( file_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, tag_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, current_count INTEGER, pending_count INTEGER, PRIMARY KEY ( file_service_id, tag_service_id, namespace_id, tag_id ) );' )
- c.execute( 'CREATE INDEX autocomplete_tags_cache_tag_service_id_namespace_id_tag_id_index ON autocomplete_tags_cache ( tag_service_id, namespace_id, tag_id );' )
-
- c.execute( 'DROP TRIGGER files_info_insert_trigger;' )
- c.execute( 'DROP TRIGGER files_info_delete_trigger;' )
-
- c.execute( 'DROP TRIGGER mappings_insert_trigger;' )
- c.execute( 'DROP TRIGGER mappings_delete_trigger;' )
-
- inserts = []
- inserts.append( 'DELETE FROM deleted_files WHERE service_id = new.service_id AND hash_id = new.hash_id;' )
- inserts.append( 'DELETE FROM file_transfers WHERE service_id_to = new.service_id AND hash_id = new.hash_id;' )
- inserts.append( 'UPDATE service_info SET info = info + new.size WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_TOTAL_SIZE ) + ';' )
- inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_FILES ) + ';' )
- inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND new.mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS ) + ';' )
- inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
- inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';' )
- inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = new.service_id;' )
- c.execute( 'CREATE TRIGGER files_info_insert_trigger AFTER INSERT ON files_info BEGIN ' + ' '.join( inserts ) + ' END;' )
- deletes = []
- deletes.append( 'DELETE FROM file_petitions WHERE service_id = old.service_id AND hash_id = old.hash_id;' )
- deletes.append( 'UPDATE service_info SET info = info - old.size WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_TOTAL_SIZE ) + ';' )
- deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_FILES ) + ';' )
- deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND old.mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS ) + ';' )
- deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
- deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';' )
- deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = old.service_id;' )
- c.execute( 'CREATE TRIGGER files_info_delete_trigger DELETE ON files_info BEGIN ' + ' '.join( deletes ) + ' END;' )
-
- inserts = []
- inserts.append( 'DELETE FROM deleted_mappings WHERE service_id = new.service_id AND hash_id = new.hash_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
- inserts.append( 'DELETE FROM pending_mappings WHERE service_id = new.service_id AND hash_id = new.hash_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
- inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_MAPPINGS ) + ';' )
- inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS ) ) + ';' )
- inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = new.service_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
- inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
- c.execute( 'CREATE TRIGGER mappings_insert_trigger AFTER INSERT ON mappings BEGIN ' + ' '.join( inserts ) + ' END;' )
- deletes = []
- deletes.append( 'DELETE FROM mapping_petitions WHERE service_id = old.service_id AND hash_id = old.hash_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
- deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_MAPPINGS ) + ';' )
- deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS ) ) + ';' )
- deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = old.service_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
- deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
- c.execute( 'CREATE TRIGGER mappings_delete_trigger DELETE ON mappings BEGIN ' + ' '.join( deletes ) + ' END;' )
-
- inserts = []
- inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = new.service_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
- inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
- c.execute( 'CREATE TRIGGER pending_mappings_insert_trigger AFTER INSERT ON pending_mappings BEGIN ' + ' '.join( inserts ) + ' END;' )
- deletes = []
- deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = old.service_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
- deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
- c.execute( 'CREATE TRIGGER pending_mappings_delete_trigger DELETE ON pending_mappings BEGIN ' + ' '.join( deletes ) + ' END;' )
-
- # All of 4chan's post urls are now https. There is a 301 redirect from the http, but let's update anyway.
-
- all_imageboards = c.execute( 'SELECT site_id, name, imageboard FROM imageboards;' ).fetchall()
-
- for ( site_id, name, imageboard ) in all_imageboards:
-
- imageboard._post_url = imageboard._post_url.replace( 'http', 'https' )
-
-
- c.executemany( 'UPDATE imageboards SET imageboard = ? WHERE site_id = ? AND name = ?;', [ ( imageboard, site_id, name ) for ( site_id, name, imageboard ) in all_imageboards ] )
-
-
- if version < 43:
-
- name = 'konachan'
- search_url = 'http://konachan.com/post?page=%index%&tags=%tags%'
- search_separator = '+'
- gallery_advance_num = 1
- thumb_classname = 'thumb'
- image_id = None
- image_data = 'View larger version'
- tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
-
- booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) )
-
-
- if version < 44:
-
- name = 'e621'
-
- result = c.execute( 'SELECT booru FROM boorus WHERE name = ?;', ( name, ) ).fetchone()
-
- if result is not None:
-
- ( booru, ) = result
-
- ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData()
-
- thumb_classname = 'thumb blacklist' # from thumb_blacklisted
-
- booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
-
- c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, booru.GetName() ) )
-
-
- name = 'rule34@booru.org'
-
- result = c.execute( 'SELECT booru FROM boorus WHERE name = ?;', ( name, ) ).fetchone()
-
- if result is not None:
-
- ( booru, ) = result
-
- ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData()
-
- gallery_advance_num = 50
-
- booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
-
- c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, booru.GetName() ) )
-
-
- c.execute( 'DROP TRIGGER files_info_insert_trigger;' )
- c.execute( 'DROP TRIGGER files_info_delete_trigger;' )
- c.execute( 'DROP TRIGGER deleted_files_insert_trigger;' )
- c.execute( 'DROP TRIGGER deleted_files_delete_trigger;' )
- c.execute( 'DROP TRIGGER file_inbox_insert_trigger;' )
- c.execute( 'DROP TRIGGER file_inbox_delete_trigger;' )
- c.execute( 'DROP TRIGGER mappings_insert_trigger;' )
- c.execute( 'DROP TRIGGER mappings_delete_trigger;' )
- c.execute( 'DROP TRIGGER deleted_mappings_insert_trigger;' )
- c.execute( 'DROP TRIGGER deleted_mappings_delete_trigger;' )
- c.execute( 'DROP TRIGGER pending_mappings_insert_trigger;' )
- c.execute( 'DROP TRIGGER pending_mappings_delete_trigger;' )
-
- c.execute( 'UPDATE services SET name = ? WHERE name = ?;', ( 'local files renamed', 'local files' ) )
- c.execute( 'UPDATE services SET name = ? WHERE type = ?;', ( 'local files', HC.LOCAL_FILE ) )
-
- c.execute( 'INSERT INTO services ( type, name ) VALUES ( ?, ? );', ( HC.LOCAL_TAG, 'local tags' ) )
-
- local_tag_service_id = c.lastrowid
-
- c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) SELECT ?, CASE WHEN MIN( precedence ) NOT NULL THEN MIN( precedence ) - 1 ELSE 0 END FROM tag_service_precedence;', ( local_tag_service_id, ) )
-
-
- if version < 46:
-
- name = 'rule34@paheal'
- search_url = 'http://rule34.paheal.net/post/list/%tags%/%index%'
- search_separator = '%20'
- gallery_advance_num = 1
- thumb_classname = 'thumb'
- image_id = 'main_image'
- image_data = None
- tag_classnames_to_namespaces = { 'tag_name' : '' }
-
- booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) )
-
- name = 'tbib'
- search_url = 'http://tbib.org/index.php?page=post&s=list&tags=%tags%&pid=%index%'
- search_separator = '+'
- gallery_advance_num = 25
- thumb_classname = 'thumb'
- image_id = None
- image_data = 'Original image'
- tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
-
- booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) )
-
-
- if version < 48:
-
- c.execute( 'CREATE TABLE local_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, rating REAL, PRIMARY KEY( service_id, hash_id ) );' )
- c.execute( 'CREATE INDEX local_ratings_hash_id_index ON local_ratings ( hash_id );' )
- c.execute( 'CREATE INDEX local_ratings_rating_index ON local_ratings ( rating );' )
-
- c.execute( 'CREATE TABLE remote_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, count INTEGER, rating REAL, score REAL, PRIMARY KEY( service_id, hash_id ) );' )
- c.execute( 'CREATE INDEX remote_ratings_hash_id_index ON remote_ratings ( hash_id );' )
- c.execute( 'CREATE INDEX remote_ratings_rating_index ON remote_ratings ( rating );' )
- c.execute( 'CREATE INDEX remote_ratings_score_index ON remote_ratings ( score );' )
-
- c.execute( 'CREATE TABLE ratings_numerical ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, lower INTEGER, upper INTEGER );' )
-
- c.execute( 'CREATE TABLE ratings_like ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, like TEXT, dislike TEXT );' )
-
-
- if version < 49:
-
- result = c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( '', ) ).fetchone()
-
- if result is not None:
-
- ( tag_id, ) = result
-
- c.execute( 'DELETE FROM mappings WHERE tag_id = ?;', ( tag_id, ) )
- c.execute( 'DELETE FROM pending_mappings WHERE tag_id = ?;', ( tag_id, ) )
- c.execute( 'DELETE FROM active_mappings WHERE tag_id = ?;', ( tag_id, ) )
- c.execute( 'DELETE FROM active_pending_mappings WHERE tag_id = ?;', ( tag_id, ) )
-
-
- HC.app.SetSplashText( 'making new cache, may take a minute' )
-
- c.execute( 'CREATE TABLE existing_tags ( namespace_id INTEGER, tag_id INTEGER, PRIMARY KEY( namespace_id, tag_id ) );' )
- c.execute( 'CREATE INDEX existing_tags_tag_id_index ON existing_tags ( tag_id );' )
-
- all_tag_ids = set()
-
- all_tag_ids.update( c.execute( 'SELECT namespace_id, tag_id FROM mappings;' ).fetchall() )
- all_tag_ids.update( c.execute( 'SELECT namespace_id, tag_id FROM pending_mappings;' ).fetchall() )
-
- c.executemany( 'INSERT INTO existing_tags ( namespace_id, tag_id ) VALUES ( ?, ? );', all_tag_ids )
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'show_all_tags_in_autocomplete' ] = True
-
- HC.options[ 'file_system_predicates' ][ 'local_rating_numerical' ] = ( 0, 3 )
- HC.options[ 'file_system_predicates' ][ 'local_rating_like' ] = 0
-
- shortcuts = {}
-
- shortcuts[ wx.ACCEL_NORMAL ] = {}
- shortcuts[ wx.ACCEL_CTRL ] = {}
- shortcuts[ wx.ACCEL_ALT ] = {}
- shortcuts[ wx.ACCEL_SHIFT ] = {}
-
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F3 ] = 'manage_tags'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F4 ] = 'manage_ratings'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F5 ] = 'refresh'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F7 ] = 'archive'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F12 ] = 'filter'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F9 ] = 'new_page'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'T' ) ] = 'new_page'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'W' ) ] = 'close_page'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'R' ) ] = 'show_hide_splitters'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'S' ) ] = 'set_search_focus'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'I' ) ] = 'synchronised_wait_switch'
-
- HC.options[ 'shortcuts' ] = shortcuts
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 50:
-
- c.execute( 'CREATE TABLE fourchan_pass ( token TEXT, pin TEXT, timeout INTEGER );' )
-
-
- if version < 51:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- shortcuts = HC.options[ 'shortcuts' ]
-
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'B' ) ] = 'frame_back'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'N' ) ] = 'frame_next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F11 ] = 'ratings_filter'
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
- c.execute( 'CREATE TABLE ratings_filter ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, min REAL, max REAL, PRIMARY KEY( service_id, hash_id ) );' )
-
-
- if version < 52:
-
- HC.app.SetSplashText( 'making new indices' )
-
- c.execute( 'DROP INDEX mappings_namespace_id_index;' )
- c.execute( 'DROP INDEX mappings_tag_id_index;' )
-
- c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' )
- c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' )
-
- HC.app.SetSplashText( 'making some more new indices' )
-
- c.execute( 'DROP INDEX pending_mappings_namespace_id_index;' )
- c.execute( 'DROP INDEX pending_mappings_tag_id_index;' )
-
- c.execute( 'CREATE INDEX pending_mappings_service_id_tag_id_index ON pending_mappings ( service_id, tag_id );' )
- c.execute( 'CREATE INDEX pending_mappings_service_id_hash_id_index ON pending_mappings ( service_id, hash_id );' )
-
- c.execute( 'CREATE TABLE shutdown_timestamps ( shutdown_type INTEGER PRIMARY KEY, timestamp INTEGER );' )
-
-
- if version < 54:
-
- c.execute( 'DROP INDEX services_type_name_index;' )
-
- c.execute( 'ALTER TABLE services ADD COLUMN service_key BLOB_BYTES;' )
- c.execute( 'CREATE UNIQUE INDEX services_service_key_index ON services ( service_key );' )
-
- service_info = c.execute( 'SELECT service_id, type FROM services;' ).fetchall()
-
- updates = []
-
- for ( service_id, service_type ) in service_info:
-
- if service_type == HC.LOCAL_FILE: service_key = 'local files'
- elif service_type == HC.LOCAL_TAG: service_key = 'local tags'
- else: service_key = os.urandom( 32 )
-
- updates.append( ( sqlite3.Binary( service_key ), service_id ) )
-
-
- c.executemany( 'UPDATE services SET service_key = ? WHERE service_id = ?;', updates )
-
- c.execute( 'UPDATE files_info SET num_frames = num_frames / 1000 WHERE mime = ?;', ( HC.VIDEO_FLV, ) )
-
-
- if version < 55:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'default_tag_repository' ] = HC.LOCAL_TAG_SERVICE_IDENTIFIER
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 56:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'default_tag_sort' ] = CC.SORT_BY_LEXICOGRAPHIC_ASC
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 57:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- shortcuts = HC.options[ 'shortcuts' ]
-
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_UP ] = 'previous'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_LEFT ] = 'previous'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_UP ] = 'previous'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_LEFT ] = 'previous'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_PAGEUP ] = 'previous'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_PAGEUP ] = 'previous'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_DOWN ] = 'next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_RIGHT ] = 'next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_DOWN ] = 'next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_RIGHT ] = 'next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_PAGEDOWN ] = 'next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_PAGEDOWN ] = 'next'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_HOME ] = 'first'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_HOME ] = 'first'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_END ] = 'last'
- shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_END ] = 'last'
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 58:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- shortcuts = HC.options[ 'shortcuts' ]
-
- shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_F7 ] = 'inbox'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'M' ) ] = 'set_media_focus'
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 59:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- shortcuts = HC.options[ 'shortcuts' ]
-
- shortcuts[ wx.ACCEL_NORMAL ][ ord( 'F' ) ] = 'fullscreen_switch'
-
- HC.options[ 'fullscreen_borderless' ] = True
- HC.options[ 'default_collect' ] = None
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 60:
-
- c.execute( 'CREATE TABLE pixiv_account ( pixiv_id TEXT, password TEXT );' )
-
- c.execute( 'CREATE TABLE favourite_custom_filter_actions ( name TEXT, actions TEXT_YAML );' )
-
-
- if version < 61:
-
- c.execute( 'CREATE TABLE hydrus_sessions ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, session_key BLOB_BYTES, expiry INTEGER );' )
-
-
- if version < 63:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- system_predicates = HC.options[ 'file_system_predicates' ]
-
- ( sign, size, unit ) = system_predicates[ 'size' ]
-
- system_predicates[ 'size' ] = ( sign, size, 1 )
-
- system_predicates[ 'num_words' ] = ( 0, 30000 )
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 64:
-
- c.execute( 'CREATE TABLE web_sessions ( name TEXT PRIMARY KEY, cookies TEXT_YAML, expiry INTEGER );' )
-
- c.execute( 'UPDATE ADDRESSES SET host = ? WHERE host = ?;', ( 'hydrus.no-ip.org', '98.214.1.156' ) )
-
- c.execute( 'DELETE FROM service_info WHERE info_type IN ( 6, 7 );' ) # resetting thumb count, to see if it breaks again
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- shortcuts = HC.options[ 'shortcuts' ]
-
- shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_UP ] = 'pan_up'
- shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_DOWN ] = 'pan_down'
- shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_LEFT ] = 'pan_left'
- shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_RIGHT ] = 'pan_right'
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 65:
-
- HC.app.SetSplashText( 'renaming db files' )
-
- filenames = dircache.listdir( HC.CLIENT_FILES_DIR )
-
- i = 1
-
- for filename in filenames:
-
- if '.' not in filename:
-
- try:
-
- old_path = HC.CLIENT_FILES_DIR + os.path.sep + filename
-
- mime = HydrusFileHandling.GetMime( old_path )
-
- new_path = old_path + HC.mime_ext_lookup[ mime ]
-
- shutil.move( old_path, new_path )
-
- os.chmod( new_path, stat.S_IREAD )
-
- except: pass
-
-
- i += 1
-
- if i % 250 == 0: HC.app.SetSplashText( 'renaming file ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
-
-
- c.execute( 'CREATE TABLE subscriptions ( subscriptions TEXT_YAML );' )
-
-
- if version < 66:
-
- c.execute( 'DELETE FROM boorus;' )
-
- c.executemany( 'INSERT INTO boorus VALUES ( ?, ? );', CC.DEFAULT_BOORUS.items() )
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'pause_repo_sync' ] = False
- HC.options[ 'pause_subs_sync' ] = False
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 67:
-
- result = c.execute( 'SELECT subscriptions FROM subscriptions;' ).fetchone()
-
- if result is None: subscriptions = []
- else: ( subscriptions, ) = result
-
- c.execute( 'DROP TABLE subscriptions;' )
-
- c.execute( 'CREATE TABLE subscriptions ( site_download_type INTEGER, name TEXT, info TEXT_YAML, PRIMARY KEY( site_download_type, name ) );' )
-
- inserts = [ ( site_download_type, name, [ query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ] ) for ( site_download_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) in subscriptions ]
-
- c.executemany( 'INSERT INTO subscriptions ( site_download_type, name, info ) VALUES ( ?, ?, ? );', inserts )
-
- #
-
- HC.app.SetSplashText( 'creating new db directories' )
-
- hex_chars = '0123456789abcdef'
-
- for ( one, two ) in itertools.product( hex_chars, hex_chars ):
-
- dir = HC.CLIENT_FILES_DIR + os.path.sep + one + two
-
- if not os.path.exists( dir ): os.mkdir( dir )
-
- dir = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + one + two
-
- if not os.path.exists( dir ): os.mkdir( dir )
-
-
- HC.app.SetSplashText( 'generating file cache' )
-
- filenames = dircache.listdir( HC.CLIENT_FILES_DIR )
-
- i = 1
-
- for filename in filenames:
-
- try:
-
- source_path = HC.CLIENT_FILES_DIR + os.path.sep + filename
-
- first_two_chars = filename[:2]
-
- destination_path = HC.CLIENT_FILES_DIR + os.path.sep + first_two_chars + os.path.sep + filename
-
- shutil.move( source_path, destination_path )
-
- except: continue
-
- i += 1
-
- if i % 100 == 0: HC.app.SetSplashText( 'moving files - ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
-
-
- HC.app.SetSplashText( 'generating thumbnail cache' )
-
- filenames = dircache.listdir( HC.CLIENT_THUMBNAILS_DIR )
-
- i = 1
-
- for filename in filenames:
-
- try:
-
- source_path = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + filename
-
- first_two_chars = filename[:2]
-
- destination_path = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + first_two_chars + os.path.sep + filename
-
- shutil.move( source_path, destination_path )
-
- except: continue
-
- i += 1
-
- if i % 100 == 0: HC.app.SetSplashText( 'moving thumbnails - ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
-
-
-
- if version < 68:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'confirm_client_exit' ] = False
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
- #
-
- boorus = []
-
- name = 'e621'
- search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%'
- search_separator = '%20'
- advance_by_page_num = True
- thumb_classname = 'thumb'
- image_id = None
- image_data = 'Download'
- tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
-
- boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
-
- name = 'danbooru'
- search_url = 'http://danbooru.donmai.us/posts?page=%index%&tags=%tags%'
- search_separator = '%20'
- advance_by_page_num = True
- thumb_classname = 'post-preview'
- image_id = 'image'
- image_data = None
- tag_classnames_to_namespaces = { 'category-0' : '', 'category-4' : 'character', 'category-3' : 'series', 'category-1' : 'creator' }
-
- boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
-
- for booru in boorus:
-
- name = booru.GetName()
-
- c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
-
-
-
- if version < 69:
-
- boorus = []
-
- name = 'e621'
- search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%'
- search_separator = '%20'
- advance_by_page_num = True
- thumb_classname = 'thumb'
- image_id = None
- image_data = 'Download'
- tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
-
- boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
-
- for booru in boorus:
-
- name = booru.GetName()
-
- c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
-
-
- #
-
- c.execute( 'CREATE TABLE tag_siblings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER, PRIMARY KEY ( service_id, old_namespace_id, old_tag_id ) );' )
-
- #
-
- subscriptions = c.execute( 'SELECT site_download_type, name, info FROM subscriptions;' ).fetchall()
-
- paused = False
-
- for ( site_download_type, name, ( query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) ) in subscriptions:
-
- updated_info = [ query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache, paused ]
-
- c.execute( 'UPDATE subscriptions SET info = ? WHERE site_download_type = ? AND name = ?;', ( updated_info, site_download_type, name ) )
-
-
-
- if version < 70:
-
- c.execute( 'CREATE TABLE tag_parents ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER );' )
- c.execute( 'CREATE UNIQUE INDEX tag_parents_all_index ON tag_parents ( service_id, old_namespace_id, old_tag_id, new_namespace_id, new_tag_id );' )
-
- #
-
- c.execute( 'CREATE VIRTUAL TABLE tags_fts4 USING fts4( tag );' )
- c.execute( 'INSERT INTO tags_fts4 ( docid, tag ) SELECT tag_id, tag FROM tags;' )
-
-
- if version < 71:
-
- init_service_identifiers = [ HC.COMBINED_FILE_SERVICE_IDENTIFIER, HC.COMBINED_TAG_SERVICE_IDENTIFIER ]
-
- for init_service_identifier in init_service_identifiers:
-
- ( service_key, service_type, service_name ) = init_service_identifier.GetInfo()
-
- c.execute( 'INSERT INTO services ( service_key, type, name ) VALUES ( ?, ?, ? );', ( sqlite3.Binary( service_key ), service_type, service_name ) )
-
-
- c.execute( 'ALTER TABLE mappings ADD COLUMN status INTEGER;' )
- c.execute( 'UPDATE mappings SET status = ?;', ( HC.CURRENT, ) )
-
- c.execute( 'CREATE INDEX mappings_service_id_status_index ON mappings ( service_id, status );' )
- c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
-
- c.execute( 'ANALYZE' )
-
- deleted_mappings = set( c.execute( 'SELECT service_id, namespace_id, tag_id, hash_id FROM deleted_mappings;' ).fetchall() )
- pending_mappings = set( c.execute( 'SELECT service_id, namespace_id, tag_id, hash_id FROM pending_mappings;' ).fetchall() )
-
- deleted_pending_mappings = pending_mappings.intersection( deleted_mappings )
-
- deleted_mappings.difference_update( deleted_pending_mappings )
- pending_mappings.difference_update( deleted_pending_mappings )
-
- c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.DELETED_PENDING ) for ( service_id, namespace_id, tag_id, hash_id ) in deleted_pending_mappings ) )
- c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.DELETED ) for ( service_id, namespace_id, tag_id, hash_id ) in deleted_mappings ) )
- c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.PENDING ) for ( service_id, namespace_id, tag_id, hash_id ) in pending_mappings ) )
-
- c.execute( 'DROP TABLE deleted_mappings;' )
- c.execute( 'DROP TABLE pending_mappings;' )
- c.execute( 'DROP TABLE active_mappings;' )
- c.execute( 'DROP TABLE active_pending_mappings;' )
-
- #
-
- c.execute( 'DELETE FROM service_info;' )
-
- service_identifiers = self._GetServiceIdentifiers( c, ( HC.FILE_REPOSITORY, HC.LOCAL_FILE, HC.TAG_REPOSITORY, HC.LOCAL_TAG ) )
-
- for service_identifier in service_identifiers: self._GetServiceInfo( c, service_identifier )
-
- #
-
- self._combined_file_service_id = self._GetServiceId( c, HC.COMBINED_FILE_SERVICE_IDENTIFIER )
- self._combined_tag_service_id = self._GetServiceId( c, HC.COMBINED_TAG_SERVICE_IDENTIFIER )
-
- c.execute( 'DELETE FROM autocomplete_tags_cache;' )
-
- self._RecalcCombinedMappings( c )
-
- self._FattenAutocompleteCache( c )
-
- #
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- HC.options[ 'play_dumper_noises' ] = True
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
- if version < 72:
-
- c.execute( 'ALTER TABLE tag_siblings ADD COLUMN status INTEGER;' )
- c.execute( 'UPDATE tag_siblings SET status = ?;', ( HC.CURRENT, ) )
-
- c.execute( 'ALTER TABLE tag_parents ADD COLUMN status INTEGER;' )
- c.execute( 'UPDATE tag_parents SET status = ?;', ( HC.CURRENT, ) )
-
- tag_siblings = c.execute( 'SELECT * FROM tag_siblings;' ).fetchall()
- tag_parents = c.execute( 'SELECT * FROM tag_parents;' ).fetchall()
-
- c.execute( 'DROP TABLE tag_siblings;' )
- c.execute( 'DROP TABLE tag_parents;' )
-
- c.execute( 'CREATE TABLE tag_parents ( service_id INTEGER REFERENCES services ON DELETE CASCADE, child_namespace_id INTEGER, child_tag_id INTEGER, parent_namespace_id INTEGER, parent_tag_id INTEGER, status INTEGER, PRIMARY KEY ( service_id, child_namespace_id, child_tag_id, parent_namespace_id, parent_tag_id, status ) );' )
- c.execute( 'CREATE INDEX tag_parents_service_id_status_index ON tag_parents ( service_id, status );' )
- c.execute( 'CREATE INDEX tag_parents_status_index ON tag_parents ( status );' )
-
- c.execute( 'CREATE TABLE tag_parent_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, child_namespace_id INTEGER, child_tag_id INTEGER, parent_namespace_id INTEGER, parent_tag_id INTEGER, status INTEGER, reason_id INTEGER, PRIMARY KEY ( service_id, child_namespace_id, child_tag_id, parent_namespace_id, parent_tag_id, status ) );' )
-
- c.execute( 'CREATE TABLE tag_siblings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER, status INTEGER, PRIMARY KEY ( service_id, old_namespace_id, old_tag_id, status ) );' )
- c.execute( 'CREATE INDEX tag_siblings_service_id_status_index ON tag_siblings ( service_id, status );' )
- c.execute( 'CREATE INDEX tag_siblings_status_index ON tag_siblings ( status );' )
-
- c.execute( 'CREATE TABLE tag_sibling_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER, status INTEGER, reason_id INTEGER, PRIMARY KEY ( service_id, old_namespace_id, old_tag_id, status ) );' )
-
- c.executemany( 'INSERT INTO tag_siblings VALUES ( ?, ?, ?, ?, ?, ? );', tag_siblings )
- c.executemany( 'INSERT INTO tag_parents VALUES ( ?, ?, ?, ?, ?, ? );', tag_parents )
-
- #
-
- c.execute( 'ALTER TABLE mappings RENAME TO mappings_old;' )
-
- c.execute( 'DROP INDEX mappings_hash_id_index;' )
- c.execute( 'DROP INDEX mappings_service_id_tag_id_index;' )
- c.execute( 'DROP INDEX mappings_service_id_hash_id_index;' )
- c.execute( 'DROP INDEX mappings_service_id_status_index;' )
- c.execute( 'DROP INDEX mappings_status_index;' )
-
- c.execute( 'CREATE TABLE mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, status INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, status ) );' )
- c.execute( 'CREATE INDEX mappings_hash_id_index ON mappings ( hash_id );' )
- c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' )
- c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' )
- c.execute( 'CREATE INDEX mappings_service_id_status_index ON mappings ( service_id, status );' )
- c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
-
- c.execute( 'INSERT INTO mappings SELECT * FROM mappings_old;' )
-
- c.execute( 'DROP TABLE mappings_old;' )
-
- #
-
- download_data = c.execute( 'SELECT service_id_to, hash_id FROM file_transfers;' ).fetchall()
-
- c.execute( 'DROP TABLE file_transfers;' )
-
- c.execute( 'CREATE TABLE file_transfers ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
- c.execute( 'CREATE INDEX file_transfers_hash_id ON file_transfers ( hash_id );' )
-
- c.executemany( 'INSERT OR IGNORE INTO file_transfers ( service_id, hash_id ) VALUES ( ?, ? );', download_data )
-
- #
-
- c.execute( 'DELETE FROM service_info;' )
-
-
- if version < 73:
-
- inserts = c.execute( 'SELECT service_id, namespace_id, tag_id, hash_id FROM mappings WHERE status = ?;', ( HC.DELETED_PENDING, ) ).fetchall()
-
- c.execute( 'DELETE FROM mappings WHERE status = ?;', ( HC.DELETED_PENDING, ) )
-
- c.executemany( 'INSERT INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.DELETED ) for ( service_id, namespace_id, tag_id, hash_id ) in inserts ) )
- c.executemany( 'INSERT INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.PENDING ) for ( service_id, namespace_id, tag_id, hash_id ) in inserts ) )
-
- #
-
- self._combined_file_service_id = self._GetServiceId( c, HC.COMBINED_FILE_SERVICE_IDENTIFIER )
- self._combined_tag_service_id = self._GetServiceId( c, HC.COMBINED_TAG_SERVICE_IDENTIFIER )
-
- c.execute( 'DELETE FROM autocomplete_tags_cache;' )
-
- self._RecalcCombinedMappings( c )
-
- self._FattenAutocompleteCache( c )
-
-
- if version < 74:
-
- fourchan_imageboards = []
-
- fourchan_imageboards.append( CC.Imageboard( '/asp/', 'https://sys.4chan.org/asp/post', 75, CC.fourchan_typical_form_fields, CC.fourchan_typical_restrictions ) )
- fourchan_imageboards.append( CC.Imageboard( '/gd/', 'https://sys.4chan.org/gd/post', 75, CC.fourchan_typical_form_fields, { CC.RESTRICTION_MAX_FILE_SIZE : 8388608, CC.RESTRICTION_ALLOWED_MIMES : [ HC.IMAGE_GIF, HC.IMAGE_PNG, HC.IMAGE_JPEG, HC.APPLICATION_PDF ] } ) )
- fourchan_imageboards.append( CC.Imageboard( '/lgbt/', 'https://sys.4chan.org/lgbt/post', 75, CC.fourchan_typical_form_fields, CC.fourchan_typical_restrictions ) )
- fourchan_imageboards.append( CC.Imageboard( '/vr/', 'https://sys.4chan.org/vr/post', 75, CC.fourchan_typical_form_fields, CC.fourchan_typical_restrictions ) )
- fourchan_imageboards.append( CC.Imageboard( '/wsg/', 'https://sys.4chan.org/wsg/post', 75, CC.fourchan_typical_form_fields, { CC.RESTRICTION_MAX_FILE_SIZE : 4194304, CC.RESTRICTION_ALLOWED_MIMES : [ HC.IMAGE_GIF ] } ) )
-
- new_imageboards = []
-
- new_imageboards.append( ( '4chan', fourchan_imageboards ) )
-
- for ( site_name, imageboards ) in new_imageboards:
-
- site_id = self._GetSiteId( c, site_name )
-
- try: c.executemany( 'INSERT INTO imageboards VALUES ( ?, ?, ? );', [ ( site_id, imageboard.GetName(), imageboard ) for imageboard in imageboards ] )
- except: pass
-
-
- self._combined_file_service_id = self._GetServiceId( c, HC.COMBINED_FILE_SERVICE_IDENTIFIER )
- self._combined_tag_service_id = self._GetServiceId( c, HC.COMBINED_TAG_SERVICE_IDENTIFIER )
-
- c.execute( 'DELETE FROM autocomplete_tags_cache;' )
-
- self._RecalcCombinedMappings( c )
-
- self._FattenAutocompleteCache( c )
-
-
- if version < 77:
-
- c.execute( 'CREATE TABLE import_folders ( path TEXT, details TEXT_YAML );' )
-
-
- if version < 79:
-
- c.execute( 'DELETE FROM import_folders;' )
-
-
- if version < 80:
-
- boorus = []
-
- name = 'e621'
- search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%'
- search_separator = '%20'
- advance_by_page_num = True
- thumb_classname = 'thumb'
- image_id = None
- image_data = 'Download'
- tag_classnames_to_namespaces = { 'tag-type-general categorized-tag' : '', 'tag-type-character categorized-tag' : 'character', 'tag-type-copyright categorized-tag' : 'series', 'tag-type-artist categorized-tag' : 'creator', 'tag-type-species categorized-tag' : 'species' }
-
- boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
-
- for booru in boorus:
-
- name = booru.GetName()
-
- c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
-
-
-
- if version < 85:
-
- boorus = []
-
- name = 'e621'
- search_url = 'https://e621.net/post/index?page=%index%&tags=%tags%'
- search_separator = '%20'
- advance_by_page_num = True
- thumb_classname = 'thumb'
- image_id = None
- image_data = 'Download'
- tag_classnames_to_namespaces = { 'tag-type-general categorized-tag' : '', 'tag-type-character categorized-tag' : 'character', 'tag-type-copyright categorized-tag' : 'series', 'tag-type-artist categorized-tag' : 'creator', 'tag-type-species categorized-tag' : 'species' }
-
- boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
-
- for booru in boorus:
-
- name = booru.GetName()
-
- c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
-
- c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
-
-
-
- if version < 88:
-
- c.execute( 'CREATE TABLE namespace_blacklists ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, blacklist INTEGER_BOOLEAN, namespaces TEXT_YAML );' )
-
-
- if version < 91:
-
- ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
-
- shortcuts = HC.options[ 'shortcuts' ]
-
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'Z' ) ] = 'undo'
- shortcuts[ wx.ACCEL_CTRL ][ ord( 'Y' ) ] = 'redo'
-
- HC.options[ 'shortcuts' ] = shortcuts
-
- c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
-
-
-
- def _UpdateDBOldPost( self, c, version ):
-
- if version == 34: # == is important here
+ if version == 33:
try:
+ c.execute( 'COMMIT' )
+
main_db_path = HC.DB_DIR + os.path.sep + 'client_main.db'
mappings_db_path = HC.DB_DIR + os.path.sep + 'client_mappings.db'
active_mappings_db_path = HC.DB_DIR + os.path.sep + 'client_active_mappings.db'
@@ -6604,6 +5499,8 @@ class DB( ServiceDB ):
os.remove( active_mappings_db_path )
os.remove( files_info_db_path )
+ c.execute( 'BEGIN IMMEDIATE' )
+
except:
@@ -6613,6 +5510,1098 @@ class DB( ServiceDB ):
+ if version == 34:
+
+ c.execute( 'CREATE TABLE active_pending_mappings ( namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( namespace_id, tag_id, hash_id ) );' )
+ c.execute( 'CREATE INDEX active_pending_mappings_tag_id_index ON active_pending_mappings ( tag_id );' )
+ c.execute( 'CREATE INDEX active_pending_mappings_hash_id_index ON active_pending_mappings ( hash_id );' )
+
+ service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence DESC;' ) ]
+
+ first_round = True
+
+ for service_id in service_ids:
+
+ c.execute( 'INSERT OR IGNORE INTO active_pending_mappings SELECT namespace_id, tag_id, hash_id FROM pending_mappings WHERE service_id = ?;', ( service_id, ) )
+
+ # is this incredibly inefficient?
+ # if this is O( n-squared ) or whatever, just rewrite it as two queries using indices
+ if not first_round: c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id || "," || tag_id || "," || hash_id IN ( SELECT namespace_id || "," || tag_id || "," || hash_id FROM deleted_mappings WHERE service_id = ? );', ( service_id, ) )
+
+ first_round = False
+
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ default_sort_by_choices = []
+
+ default_sort_by_choices.append( ( 'namespaces', [ 'series', 'creator', 'title', 'volume', 'chapter', 'page' ] ) )
+ default_sort_by_choices.append( ( 'namespaces', [ 'creator', 'series', 'title', 'volume', 'chapter', 'page' ] ) )
+
+ HC.options[ 'sort_by' ] = default_sort_by_choices
+
+ HC.options[ 'default_sort' ] = 0
+ HC.options[ 'default_collect' ] = 0
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 35:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'gui_capitalisation' ] = False
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 36:
+
+ # reconfig inbox -> file_inbox
+
+ c.execute( 'CREATE TABLE file_inbox ( hash_id INTEGER PRIMARY KEY );' )
+
+ c.execute( 'INSERT INTO file_inbox SELECT hash_id FROM inbox;' )
+
+ c.execute( 'DROP TRIGGER inbox_insert_trigger;' )
+ c.execute( 'DROP TRIGGER inbox_delete_trigger;' )
+
+ c.execute( 'DROP TABLE inbox;' )
+
+ inserts = []
+ inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id IN ( SELECT service_id FROM files_info WHERE hash_id = new.hash_id ) AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
+ c.execute( 'CREATE TRIGGER file_inbox_insert_trigger AFTER INSERT ON file_inbox BEGIN ' + ' '.join( inserts ) + ' END;' )
+ deletes = []
+ deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id IN ( SELECT service_id FROM files_info WHERE hash_id = old.hash_id ) AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
+ c.execute( 'CREATE TRIGGER file_inbox_delete_trigger DELETE ON file_inbox BEGIN ' + ' '.join( deletes ) + ' END;' )
+
+ # now set up new messaging stuff
+
+ c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' )
+ c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' )
+ c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' )
+
+ c.execute( 'CREATE VIRTUAL TABLE conversation_subjects USING fts4( subject );' )
+
+ c.execute( 'CREATE TABLE message_attachments ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE, hash_id INTEGER );' )
+
+ c.execute( 'CREATE TABLE message_depots ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, contact_id INTEGER, last_check INTEGER, check_period INTEGER, private_key TEXT );' )
+ c.execute( 'CREATE UNIQUE INDEX message_depots_contact_id_index ON message_depots ( contact_id );' )
+
+ c.execute( 'CREATE TABLE message_destination_map ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_to INTEGER, status_id INTEGER, PRIMARY KEY ( message_id, contact_id_to ) );' )
+ c.execute( 'CREATE INDEX message_destination_map_contact_id_to_index ON message_destination_map ( contact_id_to );' )
+ c.execute( 'CREATE INDEX message_destination_map_status_id_index ON message_destination_map ( status_id );' )
+
+ c.execute( 'CREATE TABLE message_downloads ( service_id INTEGER REFERENCES services ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE );' )
+ c.execute( 'CREATE INDEX message_downloads_service_id_index ON message_downloads ( service_id );' )
+
+ c.execute( 'CREATE TABLE message_drafts ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, recipients_visible INTEGER_BOOLEAN );' )
+
+ c.execute( 'CREATE TABLE message_inbox ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE );' )
+
+ c.execute( 'CREATE TABLE message_keys ( message_id INTEGER PRIMARY KEY, message_key BLOB_BYTES );' )
+ c.execute( 'CREATE INDEX message_keys_message_key_index ON message_keys ( message_key );' )
+
+ c.execute( 'CREATE VIRTUAL TABLE message_bodies USING fts4( body );' )
+
+ c.execute( 'CREATE TABLE messages ( conversation_id INTEGER REFERENCES message_keys ( message_id ) ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_from INTEGER, timestamp INTEGER, PRIMARY KEY( conversation_id, message_id ) );' )
+ c.execute( 'CREATE UNIQUE INDEX messages_message_id_index ON messages ( message_id );' )
+ c.execute( 'CREATE INDEX messages_contact_id_from_index ON messages ( contact_id_from );' )
+ c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' )
+
+ c.execute( 'CREATE TABLE statuses ( status_id INTEGER PRIMARY KEY, status TEXT );' )
+ c.execute( 'CREATE UNIQUE INDEX statuses_status_index ON statuses ( status );' )
+
+ c.execute( 'INSERT INTO contacts ( contact_id, contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ?, ? );', ( 1, None, None, 'Anonymous', 'internet', 0 ) )
+ # fill the contact key and public key info in for hydrus admin
+
+
+ if version == 37:
+
+ c.execute( 'COMMIT' )
+ c.execute( 'PRAGMA journal_mode=WAL;' ) # possibly didn't work last time, cause of sqlite dll issue
+ c.execute( 'BEGIN IMMEDIATE' )
+
+ contacts_contents = c.execute( 'SELECT * FROM contacts;' ).fetchall()
+
+ c.execute( 'DROP TABLE contacts;' )
+
+ c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' )
+ c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' )
+ c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' )
+
+ c.executemany( 'INSERT INTO contacts VALUES ( ?, ?, ?, ?, ?, ? );', contacts_contents )
+
+ c.execute( 'CREATE TABLE message_statuses_to_apply ( message_id INTEGER, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' )
+
+
+ if version == 38:
+
+ # I accidentally added some buffer public keys in v38, so this is to HC.u() them
+ updates = [ ( HC.u( public_key ), contact_id ) for ( contact_id, public_key ) in c.execute( 'SELECT contact_id, public_key FROM contacts;' ).fetchall() ]
+
+ c.executemany( 'UPDATE contacts SET public_key = ? WHERE contact_id = ?;', updates )
+
+ with open( HC.STATIC_DIR + os.sep + 'contact - hydrus admin.yaml', 'rb' ) as f: hydrus_admin = yaml.safe_load( f.read() )
+
+ ( public_key, name, host, port ) = hydrus_admin.GetInfo()
+
+ contact_key = hydrus_admin.GetContactKey()
+
+ c.execute( 'INSERT OR IGNORE INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( sqlite3.Binary( contact_key ), public_key, name, host, port ) )
+
+
+ if version == 40:
+
+ # better name and has foreign key assoc
+
+ c.execute( 'CREATE TABLE incoming_message_statuses ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' )
+
+ incoming_status_inserts = c.execute( 'SELECT * FROM message_statuses_to_apply;' ).fetchall()
+
+ c.executemany( 'INSERT INTO incoming_message_statuses VALUES ( ?, ?, ? );', incoming_status_inserts )
+
+ c.execute( 'DROP TABLE message_statuses_to_apply;' )
+
+ # delete all drafts cause of plaintext->xml conversion
+
+ message_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM message_drafts;' ) ]
+
+ c.execute( 'DELETE FROM message_keys WHERE message_id IN ' + HC.SplayListForDB( message_ids ) + ';' )
+ c.execute( 'DELETE FROM message_bodies WHERE docid IN ' + HC.SplayListForDB( message_ids ) + ';' )
+ c.execute( 'DELETE FROM conversation_subjects WHERE docid IN ' + HC.SplayListForDB( message_ids ) + ';' )
+
+ c.execute( 'ALTER TABLE message_depots ADD COLUMN receive_anon INTEGER_BOOLEAN' )
+ c.execute( 'UPDATE message_depots SET receive_anon = ?;', ( True, ) )
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ system_predicates = {}
+
+ system_predicates[ 'age' ] = ( 0, 0, 0, 7 )
+ system_predicates[ 'duration' ] = ( 3, 0, 0 )
+ system_predicates[ 'height' ] = ( 1, 1200 )
+ system_predicates[ 'limit' ] = 600
+ system_predicates[ 'mime' ] = ( 0, 0 )
+ system_predicates[ 'num_tags' ] = ( 0, 4 )
+ system_predicates[ 'ratio' ] = ( 0, 16, 9 )
+ system_predicates[ 'size' ] = ( 0, 200, 3 )
+ system_predicates[ 'width' ] = ( 1, 1920 )
+
+ HC.options[ 'file_system_predicates' ] = system_predicates
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 41:
+
+ c.execute( 'CREATE TABLE autocomplete_tags_cache ( file_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, tag_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, current_count INTEGER, pending_count INTEGER, PRIMARY KEY ( file_service_id, tag_service_id, namespace_id, tag_id ) );' )
+ c.execute( 'CREATE INDEX autocomplete_tags_cache_tag_service_id_namespace_id_tag_id_index ON autocomplete_tags_cache ( tag_service_id, namespace_id, tag_id );' )
+
+ c.execute( 'DROP TRIGGER files_info_insert_trigger;' )
+ c.execute( 'DROP TRIGGER files_info_delete_trigger;' )
+
+ c.execute( 'DROP TRIGGER mappings_insert_trigger;' )
+ c.execute( 'DROP TRIGGER mappings_delete_trigger;' )
+
+ inserts = []
+ inserts.append( 'DELETE FROM deleted_files WHERE service_id = new.service_id AND hash_id = new.hash_id;' )
+ inserts.append( 'DELETE FROM file_transfers WHERE service_id_to = new.service_id AND hash_id = new.hash_id;' )
+ inserts.append( 'UPDATE service_info SET info = info + new.size WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_TOTAL_SIZE ) + ';' )
+ inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_FILES ) + ';' )
+ inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND new.mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS ) + ';' )
+ inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
+ inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';' )
+ inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = new.service_id;' )
+ c.execute( 'CREATE TRIGGER files_info_insert_trigger AFTER INSERT ON files_info BEGIN ' + ' '.join( inserts ) + ' END;' )
+ deletes = []
+ deletes.append( 'DELETE FROM file_petitions WHERE service_id = old.service_id AND hash_id = old.hash_id;' )
+ deletes.append( 'UPDATE service_info SET info = info - old.size WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_TOTAL_SIZE ) + ';' )
+ deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_FILES ) + ';' )
+ deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND old.mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS ) + ';' )
+ deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_INBOX ) + ';' )
+ deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';' )
+ deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = old.service_id;' )
+ c.execute( 'CREATE TRIGGER files_info_delete_trigger DELETE ON files_info BEGIN ' + ' '.join( deletes ) + ' END;' )
+
+ inserts = []
+ inserts.append( 'DELETE FROM deleted_mappings WHERE service_id = new.service_id AND hash_id = new.hash_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
+ inserts.append( 'DELETE FROM pending_mappings WHERE service_id = new.service_id AND hash_id = new.hash_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
+ inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_MAPPINGS ) + ';' )
+ inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS ) ) + ';' )
+ inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = new.service_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
+ inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
+ c.execute( 'CREATE TRIGGER mappings_insert_trigger AFTER INSERT ON mappings BEGIN ' + ' '.join( inserts ) + ' END;' )
+ deletes = []
+ deletes.append( 'DELETE FROM mapping_petitions WHERE service_id = old.service_id AND hash_id = old.hash_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
+ deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND info_type = ' + HC.u( HC.SERVICE_INFO_NUM_MAPPINGS ) + ';' )
+ deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS ) ) + ';' )
+ deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = old.service_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
+ deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
+ c.execute( 'CREATE TRIGGER mappings_delete_trigger DELETE ON mappings BEGIN ' + ' '.join( deletes ) + ' END;' )
+
+ inserts = []
+ inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = new.service_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
+ inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' )
+ c.execute( 'CREATE TRIGGER pending_mappings_insert_trigger AFTER INSERT ON pending_mappings BEGIN ' + ' '.join( inserts ) + ' END;' )
+ deletes = []
+ deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = old.service_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
+ deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' )
+ c.execute( 'CREATE TRIGGER pending_mappings_delete_trigger DELETE ON pending_mappings BEGIN ' + ' '.join( deletes ) + ' END;' )
+
+ # All of 4chan's post urls are now https. There is a 301 redirect from the http, but let's update anyway.
+
+ all_imageboards = c.execute( 'SELECT site_id, name, imageboard FROM imageboards;' ).fetchall()
+
+ for ( site_id, name, imageboard ) in all_imageboards:
+
+ imageboard._post_url = imageboard._post_url.replace( 'http', 'https' )
+
+
+ c.executemany( 'UPDATE imageboards SET imageboard = ? WHERE site_id = ? AND name = ?;', [ ( imageboard, site_id, name ) for ( site_id, name, imageboard ) in all_imageboards ] )
+
+
+ if version == 42:
+
+ name = 'konachan'
+ search_url = 'http://konachan.com/post?page=%index%&tags=%tags%'
+ search_separator = '+'
+ gallery_advance_num = 1
+ thumb_classname = 'thumb'
+ image_id = None
+ image_data = 'View larger version'
+ tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
+
+ booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) )
+
+
+ if version == 43:
+
+ name = 'e621'
+
+ result = c.execute( 'SELECT booru FROM boorus WHERE name = ?;', ( name, ) ).fetchone()
+
+ if result is not None:
+
+ ( booru, ) = result
+
+ ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData()
+
+ thumb_classname = 'thumb blacklist' # from thumb_blacklisted
+
+ booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
+
+ c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, booru.GetName() ) )
+
+
+ name = 'rule34@booru.org'
+
+ result = c.execute( 'SELECT booru FROM boorus WHERE name = ?;', ( name, ) ).fetchone()
+
+ if result is not None:
+
+ ( booru, ) = result
+
+ ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData()
+
+ gallery_advance_num = 50
+
+ booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
+
+ c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, booru.GetName() ) )
+
+
+ c.execute( 'DROP TRIGGER files_info_insert_trigger;' )
+ c.execute( 'DROP TRIGGER files_info_delete_trigger;' )
+ c.execute( 'DROP TRIGGER deleted_files_insert_trigger;' )
+ c.execute( 'DROP TRIGGER deleted_files_delete_trigger;' )
+ c.execute( 'DROP TRIGGER file_inbox_insert_trigger;' )
+ c.execute( 'DROP TRIGGER file_inbox_delete_trigger;' )
+ c.execute( 'DROP TRIGGER mappings_insert_trigger;' )
+ c.execute( 'DROP TRIGGER mappings_delete_trigger;' )
+ c.execute( 'DROP TRIGGER deleted_mappings_insert_trigger;' )
+ c.execute( 'DROP TRIGGER deleted_mappings_delete_trigger;' )
+ c.execute( 'DROP TRIGGER pending_mappings_insert_trigger;' )
+ c.execute( 'DROP TRIGGER pending_mappings_delete_trigger;' )
+
+ c.execute( 'UPDATE services SET name = ? WHERE name = ?;', ( 'local files renamed', 'local files' ) )
+ c.execute( 'UPDATE services SET name = ? WHERE type = ?;', ( 'local files', HC.LOCAL_FILE ) )
+
+ c.execute( 'INSERT INTO services ( type, name ) VALUES ( ?, ? );', ( HC.LOCAL_TAG, 'local tags' ) )
+
+ local_tag_service_id = c.lastrowid
+
+ c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) SELECT ?, CASE WHEN MIN( precedence ) NOT NULL THEN MIN( precedence ) - 1 ELSE 0 END FROM tag_service_precedence;', ( local_tag_service_id, ) )
+
+
+ if version == 45:
+
+ name = 'rule34@paheal'
+ search_url = 'http://rule34.paheal.net/post/list/%tags%/%index%'
+ search_separator = '%20'
+ gallery_advance_num = 1
+ thumb_classname = 'thumb'
+ image_id = 'main_image'
+ image_data = None
+ tag_classnames_to_namespaces = { 'tag_name' : '' }
+
+ booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) )
+
+ name = 'tbib'
+ search_url = 'http://tbib.org/index.php?page=post&s=list&tags=%tags%&pid=%index%'
+ search_separator = '+'
+ gallery_advance_num = 25
+ thumb_classname = 'thumb'
+ image_id = None
+ image_data = 'Original image'
+ tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
+
+ booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) )
+
+
+ if version == 47:
+
+ c.execute( 'CREATE TABLE local_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, rating REAL, PRIMARY KEY( service_id, hash_id ) );' )
+ c.execute( 'CREATE INDEX local_ratings_hash_id_index ON local_ratings ( hash_id );' )
+ c.execute( 'CREATE INDEX local_ratings_rating_index ON local_ratings ( rating );' )
+
+ c.execute( 'CREATE TABLE remote_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, count INTEGER, rating REAL, score REAL, PRIMARY KEY( service_id, hash_id ) );' )
+ c.execute( 'CREATE INDEX remote_ratings_hash_id_index ON remote_ratings ( hash_id );' )
+ c.execute( 'CREATE INDEX remote_ratings_rating_index ON remote_ratings ( rating );' )
+ c.execute( 'CREATE INDEX remote_ratings_score_index ON remote_ratings ( score );' )
+
+ c.execute( 'CREATE TABLE ratings_numerical ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, lower INTEGER, upper INTEGER );' )
+
+ c.execute( 'CREATE TABLE ratings_like ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, like TEXT, dislike TEXT );' )
+
+
+ if version == 48:
+
+ result = c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( '', ) ).fetchone()
+
+ if result is not None:
+
+ ( tag_id, ) = result
+
+ c.execute( 'DELETE FROM mappings WHERE tag_id = ?;', ( tag_id, ) )
+ c.execute( 'DELETE FROM pending_mappings WHERE tag_id = ?;', ( tag_id, ) )
+ c.execute( 'DELETE FROM active_mappings WHERE tag_id = ?;', ( tag_id, ) )
+ c.execute( 'DELETE FROM active_pending_mappings WHERE tag_id = ?;', ( tag_id, ) )
+
+
+ HC.app.SetSplashText( 'making new cache, may take a minute' )
+
+ c.execute( 'CREATE TABLE existing_tags ( namespace_id INTEGER, tag_id INTEGER, PRIMARY KEY( namespace_id, tag_id ) );' )
+ c.execute( 'CREATE INDEX existing_tags_tag_id_index ON existing_tags ( tag_id );' )
+
+ all_tag_ids = set()
+
+ all_tag_ids.update( c.execute( 'SELECT namespace_id, tag_id FROM mappings;' ).fetchall() )
+ all_tag_ids.update( c.execute( 'SELECT namespace_id, tag_id FROM pending_mappings;' ).fetchall() )
+
+ c.executemany( 'INSERT INTO existing_tags ( namespace_id, tag_id ) VALUES ( ?, ? );', all_tag_ids )
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'show_all_tags_in_autocomplete' ] = True
+
+ HC.options[ 'file_system_predicates' ][ 'local_rating_numerical' ] = ( 0, 3 )
+ HC.options[ 'file_system_predicates' ][ 'local_rating_like' ] = 0
+
+ shortcuts = {}
+
+ shortcuts[ wx.ACCEL_NORMAL ] = {}
+ shortcuts[ wx.ACCEL_CTRL ] = {}
+ shortcuts[ wx.ACCEL_ALT ] = {}
+ shortcuts[ wx.ACCEL_SHIFT ] = {}
+
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F3 ] = 'manage_tags'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F4 ] = 'manage_ratings'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F5 ] = 'refresh'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F7 ] = 'archive'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F12 ] = 'filter'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F9 ] = 'new_page'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'T' ) ] = 'new_page'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'W' ) ] = 'close_page'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'R' ) ] = 'show_hide_splitters'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'S' ) ] = 'set_search_focus'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'I' ) ] = 'synchronised_wait_switch'
+
+ HC.options[ 'shortcuts' ] = shortcuts
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 49:
+
+ c.execute( 'CREATE TABLE fourchan_pass ( token TEXT, pin TEXT, timeout INTEGER );' )
+
+
+ if version == 50:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ shortcuts = HC.options[ 'shortcuts' ]
+
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'B' ) ] = 'frame_back'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'N' ) ] = 'frame_next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F11 ] = 'ratings_filter'
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+ c.execute( 'CREATE TABLE ratings_filter ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, min REAL, max REAL, PRIMARY KEY( service_id, hash_id ) );' )
+
+
+ if version == 51:
+
+ HC.app.SetSplashText( 'making new indices' )
+
+ c.execute( 'DROP INDEX mappings_namespace_id_index;' )
+ c.execute( 'DROP INDEX mappings_tag_id_index;' )
+
+ c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' )
+ c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' )
+
+ HC.app.SetSplashText( 'making some more new indices' )
+
+ c.execute( 'DROP INDEX pending_mappings_namespace_id_index;' )
+ c.execute( 'DROP INDEX pending_mappings_tag_id_index;' )
+
+ c.execute( 'CREATE INDEX pending_mappings_service_id_tag_id_index ON pending_mappings ( service_id, tag_id );' )
+ c.execute( 'CREATE INDEX pending_mappings_service_id_hash_id_index ON pending_mappings ( service_id, hash_id );' )
+
+ c.execute( 'CREATE TABLE shutdown_timestamps ( shutdown_type INTEGER PRIMARY KEY, timestamp INTEGER );' )
+
+
+ if version == 53:
+
+ c.execute( 'DROP INDEX services_type_name_index;' )
+
+ c.execute( 'ALTER TABLE services ADD COLUMN service_key BLOB_BYTES;' )
+ c.execute( 'CREATE UNIQUE INDEX services_service_key_index ON services ( service_key );' )
+
+ service_info = c.execute( 'SELECT service_id, type FROM services;' ).fetchall()
+
+ updates = []
+
+ for ( service_id, service_type ) in service_info:
+
+ if service_type == HC.LOCAL_FILE: service_key = 'local files'
+ elif service_type == HC.LOCAL_TAG: service_key = 'local tags'
+ else: service_key = os.urandom( 32 )
+
+ updates.append( ( sqlite3.Binary( service_key ), service_id ) )
+
+
+ c.executemany( 'UPDATE services SET service_key = ? WHERE service_id = ?;', updates )
+
+ c.execute( 'UPDATE files_info SET num_frames = num_frames / 1000 WHERE mime = ?;', ( HC.VIDEO_FLV, ) )
+
+
+ if version == 54:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'default_tag_repository' ] = HC.LOCAL_TAG_SERVICE_IDENTIFIER
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 55:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'default_tag_sort' ] = CC.SORT_BY_LEXICOGRAPHIC_ASC
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 56:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ shortcuts = HC.options[ 'shortcuts' ]
+
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_UP ] = 'previous'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_LEFT ] = 'previous'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_UP ] = 'previous'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_LEFT ] = 'previous'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_PAGEUP ] = 'previous'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_PAGEUP ] = 'previous'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_DOWN ] = 'next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_RIGHT ] = 'next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_DOWN ] = 'next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_RIGHT ] = 'next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_PAGEDOWN ] = 'next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_PAGEDOWN ] = 'next'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_HOME ] = 'first'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_HOME ] = 'first'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_END ] = 'last'
+ shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_END ] = 'last'
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 57:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ shortcuts = HC.options[ 'shortcuts' ]
+
+ shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_F7 ] = 'inbox'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'M' ) ] = 'set_media_focus'
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 58:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ shortcuts = HC.options[ 'shortcuts' ]
+
+ shortcuts[ wx.ACCEL_NORMAL ][ ord( 'F' ) ] = 'fullscreen_switch'
+
+ HC.options[ 'fullscreen_borderless' ] = True
+ HC.options[ 'default_collect' ] = None
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 59:
+
+ c.execute( 'CREATE TABLE pixiv_account ( pixiv_id TEXT, password TEXT );' )
+
+ c.execute( 'CREATE TABLE favourite_custom_filter_actions ( name TEXT, actions TEXT_YAML );' )
+
+
+ if version == 60:
+
+ c.execute( 'CREATE TABLE hydrus_sessions ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, session_key BLOB_BYTES, expiry INTEGER );' )
+
+
+ if version == 62:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ system_predicates = HC.options[ 'file_system_predicates' ]
+
+ ( sign, size, unit ) = system_predicates[ 'size' ]
+
+ system_predicates[ 'size' ] = ( sign, size, 1 )
+
+ system_predicates[ 'num_words' ] = ( 0, 30000 )
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 63:
+
+ c.execute( 'CREATE TABLE web_sessions ( name TEXT PRIMARY KEY, cookies TEXT_YAML, expiry INTEGER );' )
+
+ c.execute( 'UPDATE ADDRESSES SET host = ? WHERE host = ?;', ( 'hydrus.no-ip.org', '98.214.1.156' ) )
+
+ c.execute( 'DELETE FROM service_info WHERE info_type IN ( 6, 7 );' ) # resetting thumb count, to see if it breaks again
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ shortcuts = HC.options[ 'shortcuts' ]
+
+ shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_UP ] = 'pan_up'
+ shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_DOWN ] = 'pan_down'
+ shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_LEFT ] = 'pan_left'
+ shortcuts[ wx.ACCEL_SHIFT ][ wx.WXK_RIGHT ] = 'pan_right'
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 64:
+
+ HC.app.SetSplashText( 'renaming db files' )
+
+ filenames = dircache.listdir( HC.CLIENT_FILES_DIR )
+
+ i = 1
+
+ for filename in filenames:
+
+ if '.' not in filename:
+
+ try:
+
+ old_path = HC.CLIENT_FILES_DIR + os.path.sep + filename
+
+ mime = HydrusFileHandling.GetMime( old_path )
+
+ new_path = old_path + HC.mime_ext_lookup[ mime ]
+
+ shutil.move( old_path, new_path )
+
+ os.chmod( new_path, stat.S_IREAD )
+
+ except: pass
+
+
+ i += 1
+
+ if i % 250 == 0: HC.app.SetSplashText( 'renaming file ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
+
+
+ c.execute( 'CREATE TABLE subscriptions ( subscriptions TEXT_YAML );' )
+
+
+ if version == 65:
+
+ c.execute( 'DELETE FROM boorus;' )
+
+ c.executemany( 'INSERT INTO boorus VALUES ( ?, ? );', CC.DEFAULT_BOORUS.items() )
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'pause_repo_sync' ] = False
+ HC.options[ 'pause_subs_sync' ] = False
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 66:
+
+ result = c.execute( 'SELECT subscriptions FROM subscriptions;' ).fetchone()
+
+ if result is None: subscriptions = []
+ else: ( subscriptions, ) = result
+
+ c.execute( 'DROP TABLE subscriptions;' )
+
+ c.execute( 'CREATE TABLE subscriptions ( site_download_type INTEGER, name TEXT, info TEXT_YAML, PRIMARY KEY( site_download_type, name ) );' )
+
+ inserts = [ ( site_download_type, name, [ query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ] ) for ( site_download_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) in subscriptions ]
+
+ c.executemany( 'INSERT INTO subscriptions ( site_download_type, name, info ) VALUES ( ?, ?, ? );', inserts )
+
+ #
+
+ HC.app.SetSplashText( 'creating new db directories' )
+
+ hex_chars = '0123456789abcdef'
+
+ for ( one, two ) in itertools.product( hex_chars, hex_chars ):
+
+ dir = HC.CLIENT_FILES_DIR + os.path.sep + one + two
+
+ if not os.path.exists( dir ): os.mkdir( dir )
+
+ dir = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + one + two
+
+ if not os.path.exists( dir ): os.mkdir( dir )
+
+
+ HC.app.SetSplashText( 'generating file cache' )
+
+ filenames = dircache.listdir( HC.CLIENT_FILES_DIR )
+
+ i = 1
+
+ for filename in filenames:
+
+ try:
+
+ source_path = HC.CLIENT_FILES_DIR + os.path.sep + filename
+
+ first_two_chars = filename[:2]
+
+ destination_path = HC.CLIENT_FILES_DIR + os.path.sep + first_two_chars + os.path.sep + filename
+
+ shutil.move( source_path, destination_path )
+
+ except: continue
+
+ i += 1
+
+ if i % 100 == 0: HC.app.SetSplashText( 'moving files - ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
+
+
+ HC.app.SetSplashText( 'generating thumbnail cache' )
+
+ filenames = dircache.listdir( HC.CLIENT_THUMBNAILS_DIR )
+
+ i = 1
+
+ for filename in filenames:
+
+ try:
+
+ source_path = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + filename
+
+ first_two_chars = filename[:2]
+
+ destination_path = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + first_two_chars + os.path.sep + filename
+
+ shutil.move( source_path, destination_path )
+
+ except: continue
+
+ i += 1
+
+ if i % 100 == 0: HC.app.SetSplashText( 'moving thumbnails - ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
+
+
+
+ if version == 67:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'confirm_client_exit' ] = False
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+ #
+
+ boorus = []
+
+ name = 'e621'
+ search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%'
+ search_separator = '%20'
+ advance_by_page_num = True
+ thumb_classname = 'thumb'
+ image_id = None
+ image_data = 'Download'
+ tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
+
+ boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
+
+ name = 'danbooru'
+ search_url = 'http://danbooru.donmai.us/posts?page=%index%&tags=%tags%'
+ search_separator = '%20'
+ advance_by_page_num = True
+ thumb_classname = 'post-preview'
+ image_id = 'image'
+ image_data = None
+ tag_classnames_to_namespaces = { 'category-0' : '', 'category-4' : 'character', 'category-3' : 'series', 'category-1' : 'creator' }
+
+ boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
+
+ for booru in boorus:
+
+ name = booru.GetName()
+
+ c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
+
+
+
+ if version == 68:
+
+ boorus = []
+
+ name = 'e621'
+ search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%'
+ search_separator = '%20'
+ advance_by_page_num = True
+ thumb_classname = 'thumb'
+ image_id = None
+ image_data = 'Download'
+ tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' }
+
+ boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
+
+ for booru in boorus:
+
+ name = booru.GetName()
+
+ c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
+
+
+ #
+
+ c.execute( 'CREATE TABLE tag_siblings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER, PRIMARY KEY ( service_id, old_namespace_id, old_tag_id ) );' )
+
+ #
+
+ subscriptions = c.execute( 'SELECT site_download_type, name, info FROM subscriptions;' ).fetchall()
+
+ paused = False
+
+ for ( site_download_type, name, ( query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) ) in subscriptions:
+
+ updated_info = [ query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache, paused ]
+
+ c.execute( 'UPDATE subscriptions SET info = ? WHERE site_download_type = ? AND name = ?;', ( updated_info, site_download_type, name ) )
+
+
+
+ if version == 69:
+
+ c.execute( 'CREATE TABLE tag_parents ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER );' )
+ c.execute( 'CREATE UNIQUE INDEX tag_parents_all_index ON tag_parents ( service_id, old_namespace_id, old_tag_id, new_namespace_id, new_tag_id );' )
+
+ #
+
+ c.execute( 'CREATE VIRTUAL TABLE tags_fts4 USING fts4( tag );' )
+ c.execute( 'INSERT INTO tags_fts4 ( docid, tag ) SELECT tag_id, tag FROM tags;' )
+
+
+ if version == 70:
+
+ init_service_identifiers = [ HC.COMBINED_FILE_SERVICE_IDENTIFIER, HC.COMBINED_TAG_SERVICE_IDENTIFIER ]
+
+ for init_service_identifier in init_service_identifiers:
+
+ ( service_key, service_type, service_name ) = init_service_identifier.GetInfo()
+
+ c.execute( 'INSERT INTO services ( service_key, type, name ) VALUES ( ?, ?, ? );', ( sqlite3.Binary( service_key ), service_type, service_name ) )
+
+
+ c.execute( 'ALTER TABLE mappings ADD COLUMN status INTEGER;' )
+ c.execute( 'UPDATE mappings SET status = ?;', ( HC.CURRENT, ) )
+
+ c.execute( 'CREATE INDEX mappings_service_id_status_index ON mappings ( service_id, status );' )
+ c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
+
+ c.execute( 'ANALYZE' )
+
+ deleted_mappings = set( c.execute( 'SELECT service_id, namespace_id, tag_id, hash_id FROM deleted_mappings;' ).fetchall() )
+ pending_mappings = set( c.execute( 'SELECT service_id, namespace_id, tag_id, hash_id FROM pending_mappings;' ).fetchall() )
+
+ deleted_pending_mappings = pending_mappings.intersection( deleted_mappings )
+
+ deleted_mappings.difference_update( deleted_pending_mappings )
+ pending_mappings.difference_update( deleted_pending_mappings )
+
+ c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.DELETED_PENDING ) for ( service_id, namespace_id, tag_id, hash_id ) in deleted_pending_mappings ) )
+ c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.DELETED ) for ( service_id, namespace_id, tag_id, hash_id ) in deleted_mappings ) )
+ c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.PENDING ) for ( service_id, namespace_id, tag_id, hash_id ) in pending_mappings ) )
+
+ c.execute( 'DROP TABLE deleted_mappings;' )
+ c.execute( 'DROP TABLE pending_mappings;' )
+ c.execute( 'DROP TABLE active_mappings;' )
+ c.execute( 'DROP TABLE active_pending_mappings;' )
+
+ #
+
+ c.execute( 'DELETE FROM service_info;' )
+
+ #
+
+ c.execute( 'DELETE FROM autocomplete_tags_cache;' )
+
+ #
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ HC.options[ 'play_dumper_noises' ] = True
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
+ if version == 71:
+
+ c.execute( 'ALTER TABLE tag_siblings ADD COLUMN status INTEGER;' )
+ c.execute( 'UPDATE tag_siblings SET status = ?;', ( HC.CURRENT, ) )
+
+ c.execute( 'ALTER TABLE tag_parents ADD COLUMN status INTEGER;' )
+ c.execute( 'UPDATE tag_parents SET status = ?;', ( HC.CURRENT, ) )
+
+ tag_siblings = c.execute( 'SELECT * FROM tag_siblings;' ).fetchall()
+ tag_parents = c.execute( 'SELECT * FROM tag_parents;' ).fetchall()
+
+ c.execute( 'DROP TABLE tag_siblings;' )
+ c.execute( 'DROP TABLE tag_parents;' )
+
+ c.execute( 'CREATE TABLE tag_parents ( service_id INTEGER REFERENCES services ON DELETE CASCADE, child_namespace_id INTEGER, child_tag_id INTEGER, parent_namespace_id INTEGER, parent_tag_id INTEGER, status INTEGER, PRIMARY KEY ( service_id, child_namespace_id, child_tag_id, parent_namespace_id, parent_tag_id, status ) );' )
+ c.execute( 'CREATE INDEX tag_parents_service_id_status_index ON tag_parents ( service_id, status );' )
+ c.execute( 'CREATE INDEX tag_parents_status_index ON tag_parents ( status );' )
+
+ c.execute( 'CREATE TABLE tag_parent_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, child_namespace_id INTEGER, child_tag_id INTEGER, parent_namespace_id INTEGER, parent_tag_id INTEGER, status INTEGER, reason_id INTEGER, PRIMARY KEY ( service_id, child_namespace_id, child_tag_id, parent_namespace_id, parent_tag_id, status ) );' )
+
+ c.execute( 'CREATE TABLE tag_siblings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER, status INTEGER, PRIMARY KEY ( service_id, old_namespace_id, old_tag_id, status ) );' )
+ c.execute( 'CREATE INDEX tag_siblings_service_id_status_index ON tag_siblings ( service_id, status );' )
+ c.execute( 'CREATE INDEX tag_siblings_status_index ON tag_siblings ( status );' )
+
+ c.execute( 'CREATE TABLE tag_sibling_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, old_namespace_id INTEGER, old_tag_id INTEGER, new_namespace_id INTEGER, new_tag_id INTEGER, status INTEGER, reason_id INTEGER, PRIMARY KEY ( service_id, old_namespace_id, old_tag_id, status ) );' )
+
+ c.executemany( 'INSERT INTO tag_siblings VALUES ( ?, ?, ?, ?, ?, ? );', tag_siblings )
+ c.executemany( 'INSERT INTO tag_parents VALUES ( ?, ?, ?, ?, ?, ? );', tag_parents )
+
+ #
+
+ c.execute( 'ALTER TABLE mappings RENAME TO mappings_old;' )
+
+ c.execute( 'DROP INDEX mappings_hash_id_index;' )
+ c.execute( 'DROP INDEX mappings_service_id_tag_id_index;' )
+ c.execute( 'DROP INDEX mappings_service_id_hash_id_index;' )
+ c.execute( 'DROP INDEX mappings_service_id_status_index;' )
+ c.execute( 'DROP INDEX mappings_status_index;' )
+
+ c.execute( 'CREATE TABLE mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, status INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, status ) );' )
+ c.execute( 'CREATE INDEX mappings_hash_id_index ON mappings ( hash_id );' )
+ c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' )
+ c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' )
+ c.execute( 'CREATE INDEX mappings_service_id_status_index ON mappings ( service_id, status );' )
+ c.execute( 'CREATE INDEX mappings_status_index ON mappings ( status );' )
+
+ c.execute( 'INSERT INTO mappings SELECT * FROM mappings_old;' )
+
+ c.execute( 'DROP TABLE mappings_old;' )
+
+ #
+
+ download_data = c.execute( 'SELECT service_id_to, hash_id FROM file_transfers;' ).fetchall()
+
+ c.execute( 'DROP TABLE file_transfers;' )
+
+ c.execute( 'CREATE TABLE file_transfers ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
+ c.execute( 'CREATE INDEX file_transfers_hash_id ON file_transfers ( hash_id );' )
+
+ c.executemany( 'INSERT OR IGNORE INTO file_transfers ( service_id, hash_id ) VALUES ( ?, ? );', download_data )
+
+ #
+
+ c.execute( 'DELETE FROM service_info;' )
+
+
+ if version == 72:
+
+ inserts = c.execute( 'SELECT service_id, namespace_id, tag_id, hash_id FROM mappings WHERE status = ?;', ( HC.DELETED_PENDING, ) ).fetchall()
+
+ c.execute( 'DELETE FROM mappings WHERE status = ?;', ( HC.DELETED_PENDING, ) )
+
+ c.executemany( 'INSERT INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.DELETED ) for ( service_id, namespace_id, tag_id, hash_id ) in inserts ) )
+ c.executemany( 'INSERT INTO mappings ( service_id, namespace_id, tag_id, hash_id, status ) VALUES ( ?, ?, ?, ?, ? );', ( ( service_id, namespace_id, tag_id, hash_id, HC.PENDING ) for ( service_id, namespace_id, tag_id, hash_id ) in inserts ) )
+
+ #
+
+ c.execute( 'DELETE FROM autocomplete_tags_cache;' )
+
+
+ if version == 73:
+
+ fourchan_imageboards = []
+
+ fourchan_imageboards.append( CC.Imageboard( '/asp/', 'https://sys.4chan.org/asp/post', 75, CC.fourchan_typical_form_fields, CC.fourchan_typical_restrictions ) )
+ fourchan_imageboards.append( CC.Imageboard( '/gd/', 'https://sys.4chan.org/gd/post', 75, CC.fourchan_typical_form_fields, { CC.RESTRICTION_MAX_FILE_SIZE : 8388608, CC.RESTRICTION_ALLOWED_MIMES : [ HC.IMAGE_GIF, HC.IMAGE_PNG, HC.IMAGE_JPEG, HC.APPLICATION_PDF ] } ) )
+ fourchan_imageboards.append( CC.Imageboard( '/lgbt/', 'https://sys.4chan.org/lgbt/post', 75, CC.fourchan_typical_form_fields, CC.fourchan_typical_restrictions ) )
+ fourchan_imageboards.append( CC.Imageboard( '/vr/', 'https://sys.4chan.org/vr/post', 75, CC.fourchan_typical_form_fields, CC.fourchan_typical_restrictions ) )
+ fourchan_imageboards.append( CC.Imageboard( '/wsg/', 'https://sys.4chan.org/wsg/post', 75, CC.fourchan_typical_form_fields, { CC.RESTRICTION_MAX_FILE_SIZE : 4194304, CC.RESTRICTION_ALLOWED_MIMES : [ HC.IMAGE_GIF ] } ) )
+
+ new_imageboards = []
+
+ new_imageboards.append( ( '4chan', fourchan_imageboards ) )
+
+ def old_get_site_id( c, name ):
+
+ result = c.execute( 'SELECT site_id FROM imageboard_sites WHERE name = ?;', ( name, ) ).fetchone()
+
+ if result is None:
+
+ c.execute( 'INSERT INTO imageboard_sites ( name ) VALUES ( ? );', ( name, ) )
+
+ site_id = c.lastrowid
+
+ else: ( site_id, ) = result
+
+ return site_id
+
+
+ for ( site_name, imageboards ) in new_imageboards:
+
+ site_id = old_get_site_id( c, site_name )
+
+ try: c.executemany( 'INSERT INTO imageboards VALUES ( ?, ?, ? );', [ ( site_id, imageboard.GetName(), imageboard ) for imageboard in imageboards ] )
+ except: pass
+
+
+ c.execute( 'DELETE FROM autocomplete_tags_cache;' )
+
+
+ if version == 76:
+
+ c.execute( 'CREATE TABLE import_folders ( path TEXT, details TEXT_YAML );' )
+
+
+ if version == 78:
+
+ c.execute( 'DELETE FROM import_folders;' )
+
+
+ if version == 79:
+
+ boorus = []
+
+ name = 'e621'
+ search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%'
+ search_separator = '%20'
+ advance_by_page_num = True
+ thumb_classname = 'thumb'
+ image_id = None
+ image_data = 'Download'
+ tag_classnames_to_namespaces = { 'tag-type-general categorized-tag' : '', 'tag-type-character categorized-tag' : 'character', 'tag-type-copyright categorized-tag' : 'series', 'tag-type-artist categorized-tag' : 'creator', 'tag-type-species categorized-tag' : 'species' }
+
+ boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
+
+ for booru in boorus:
+
+ name = booru.GetName()
+
+ c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
+
+
+
+ if version == 84:
+
+ boorus = []
+
+ name = 'e621'
+ search_url = 'https://e621.net/post/index?page=%index%&tags=%tags%'
+ search_separator = '%20'
+ advance_by_page_num = True
+ thumb_classname = 'thumb'
+ image_id = None
+ image_data = 'Download'
+ tag_classnames_to_namespaces = { 'tag-type-general categorized-tag' : '', 'tag-type-character categorized-tag' : 'character', 'tag-type-copyright categorized-tag' : 'series', 'tag-type-artist categorized-tag' : 'creator', 'tag-type-species categorized-tag' : 'species' }
+
+ boorus.append( CC.Booru( name, search_url, search_separator, advance_by_page_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) )
+
+ for booru in boorus:
+
+ name = booru.GetName()
+
+ c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) )
+
+ c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( name, booru ) )
+
+
+
+ if version == 87:
+
+ c.execute( 'CREATE TABLE namespace_blacklists ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, blacklist INTEGER_BOOLEAN, namespaces TEXT_YAML );' )
+
+
+ if version == 90:
+
+ ( HC.options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
+
+ shortcuts = HC.options[ 'shortcuts' ]
+
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'Z' ) ] = 'undo'
+ shortcuts[ wx.ACCEL_CTRL ][ ord( 'Y' ) ] = 'redo'
+
+ HC.options[ 'shortcuts' ] = shortcuts
+
+ c.execute( 'UPDATE options SET options = ?;', ( HC.options, ) )
+
+
def _Vacuum( self ):
@@ -6624,7 +6613,7 @@ class DB( ServiceDB ):
c.execute( 'REPLACE INTO shutdown_timestamps ( shutdown_type, timestamp ) VALUES ( ?, ? );', ( CC.SHUTDOWN_TIMESTAMP_VACUUM, HC.GetNow() ) )
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'vacuumed successfully' ) )
+ HC.ShowText( 'vacuumed successfully' )
def GetLoopFinished( self ): return self._loop_finished
@@ -6722,7 +6711,6 @@ class DB( ServiceDB ):
elif action == 'gui_session': result = self._SetGUISession( c, *args, **kwargs )
elif action == 'hydrus_session': result = self._AddHydrusSession( c, *args, **kwargs )
elif action == 'import_file': result = self._ImportFile( c, *args, **kwargs )
- elif action == 'import_file_from_page': result = self._ImportFilePage( c, *args, **kwargs )
elif action == 'import_folder': result = self._UpdateImportFolder( c, *args, **kwargs )
elif action == 'import_folders': result = self._SetImportFolders( c, *args, **kwargs )
elif action == 'inbox_conversation': result = self._InboxConversation( c, *args, **kwargs )
@@ -6782,20 +6770,14 @@ class DB( ServiceDB ):
if job_type != 'write_special': c.execute( 'ROLLBACK' )
- if type( e ) == MemoryError: HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'The client is running out of memory! Restart it asap!' ) )
+ if type( e ) == MemoryError: HC.ShowText( 'The client is running out of memory! Restart it asap!' )
( etype, value, tb ) = sys.exc_info()
new_e = type( e )( os.linesep.join( traceback.format_exception( etype, value, tb ) ) )
- if job.IsSynchronous():
-
- job.PutResult( new_e )
-
- else:
-
- if action != 'import_file_from_page': HC.ShowException( new_e )
-
+ if job.IsSynchronous(): job.PutResult( new_e )
+ else: HC.ShowException( new_e )
HC.pubsub.pub( 'db_locked_status', '' )
@@ -6839,7 +6821,7 @@ class DB( ServiceDB ):
synchronous = True
- job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
+ job = HC.JobDatabase( action, job_type, synchronous, *args, **kwargs )
if HC.shutdown: raise Exception( 'Application has shutdown!' )
@@ -6905,7 +6887,7 @@ class DB( ServiceDB ):
if action == 'vacuum': job_type = 'write_special'
else: job_type = 'write'
- job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
+ job = HC.JobDatabase( action, job_type, synchronous, *args, **kwargs )
if HC.shutdown: raise Exception( 'Application has shutdown!' )
@@ -7088,7 +7070,7 @@ def DAEMONDownloadFiles():
message = 'Error downloading file:' + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
@@ -7187,13 +7169,13 @@ def DAEMONResizeThumbnails():
message = 'Thumbnail rendering error:' + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
except Exception as e:
message = 'Thumbnail rendering error:' + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
if i % 10 == 0: time.sleep( 2 )
@@ -7247,7 +7229,7 @@ def DAEMONSynchroniseAccounts():
message = 'Failed to refresh account for ' + name + ':' + os.linesep + os.linesep + HC.u( e )
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
@@ -7293,7 +7275,7 @@ def DAEMONSynchroniseMessages():
contact = service.GetContact()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'associated public key with account at ' + service_identifier.GetName() ) )
+ HC.ShowText( 'associated public key with account at ' + service_identifier.GetName() )
except:
@@ -7319,7 +7301,7 @@ def DAEMONSynchroniseMessages():
HC.app.WriteSynchronous( 'message_info_since', service_identifier, message_keys, decrypted_statuses, new_last_check )
- if len( message_keys ) > 0: HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'checked ' + service_identifier.GetName() + ' up to ' + HC.ConvertTimestampToPrettyTime( new_last_check ) + ', finding ' + HC.u( len( message_keys ) ) + ' new messages' ) )
+ if len( message_keys ) > 0: HC.ShowText( 'checked ' + service_identifier.GetName() + ' up to ' + HC.ConvertTimestampToPrettyTime( new_last_check ) + ', finding ' + HC.u( len( message_keys ) ) + ' new messages' )
# try to download any messages that still need downloading
@@ -7356,7 +7338,7 @@ def DAEMONSynchroniseMessages():
if num_processed > 0:
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'downloaded and parsed ' + HC.u( num_processed ) + ' messages from ' + service_identifier.GetName() ) )
+ HC.ShowText( 'downloaded and parsed ' + HC.u( num_processed ) + ' messages from ' + service_identifier.GetName() )
@@ -7365,7 +7347,7 @@ def DAEMONSynchroniseMessages():
message = 'Failed to check ' + name + ':' + os.linesep + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
@@ -7415,7 +7397,7 @@ def DAEMONSynchroniseMessages():
message = 'Sending a message failed: ' + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
status = 'failed'
@@ -7554,7 +7536,7 @@ def DAEMONSynchroniseRepositoriesAndSubscriptions():
message = 'Failed to update ' + name + ':' + os.linesep + os.linesep + HC.u( e )
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
time.sleep( 3 )
@@ -7784,7 +7766,7 @@ def DAEMONSynchroniseRepositoriesAndSubscriptions():
message = 'While trying to execute a subscription, the url ' + url + ' caused this problem:' + os.linesep + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
i += 1
@@ -7819,7 +7801,7 @@ def DAEMONSynchroniseRepositoriesAndSubscriptions():
message = 'Problem with ' + name + ' ' + traceback.format_exc()
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
time.sleep( 3 )
diff --git a/include/ClientGUI.py b/include/ClientGUI.py
index 68043bd9..c0318895 100755
--- a/include/ClientGUI.py
+++ b/include/ClientGUI.py
@@ -81,7 +81,6 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
HC.pubsub.sub( self, 'ClearClosedPages', 'clear_closed_pages' )
HC.pubsub.sub( self, 'NewCompose', 'new_compose_frame' )
- HC.pubsub.sub( self, 'NewPageImportBooru', 'new_page_import_booru' )
HC.pubsub.sub( self, 'NewPageImportGallery', 'new_page_import_gallery' )
HC.pubsub.sub( self, 'NewPageImportHDD', 'new_hdd_import' )
HC.pubsub.sub( self, 'NewPageImportThreadWatcher', 'new_page_import_thread_watcher' )
@@ -335,7 +334,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
HC.app.Write( 'update_services', edit_log )
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, 'Auto repo setup done!' ) )
+ HC.ShowText( 'Auto repo setup done!' )
@@ -501,14 +500,6 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
- def _CloseAllPages( self ):
-
- while self._notebook.GetPageCount() > 0:
-
- self._CloseCurrentPage( polite = False )
-
-
-
def _CloseCurrentPage( self, polite = True ):
selection = self._notebook.GetSelection()
@@ -525,7 +516,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
if polite:
- try: page.TryToClose()
+ try: page.TestAbleToClose()
except: return
@@ -544,9 +535,9 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
self.RefreshMenuBar()
- def _DeleteAllPages( self ):
+ def _DeleteAllClosedPages( self ):
- for ( time_closed, selection, name, page ) in self._closed_pages: wx.CallAfter( page.Destroy )
+ for ( time_closed, selection, name, page ) in self._closed_pages: self._DestroyPage( page )
self._closed_pages = []
@@ -563,6 +554,15 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
+ def _DestroyPage( self, page ):
+
+ page.Hide()
+
+ page.CleanBeforeDestroy()
+
+ page.Destroy()
+
+
def _FetchIP( self, service_identifier ):
with wx.TextEntryDialog( self, 'File Hash' ) as dlg:
@@ -610,11 +610,14 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
- try: page.TryToClose()
+ try: page.TestAbleToClose()
except: return
- self._CloseAllPages()
+ while self._notebook.GetPageCount() > 0:
+
+ self._CloseCurrentPage( polite = False )
+
for ( page_name, c_text, args, kwargs ) in info:
@@ -758,30 +761,15 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
- def _NewPageImportBooru( self ):
+ def _NewPageImportGallery( self, name, type ):
- with ClientGUIDialogs.DialogSelectBooru( self ) as dlg:
-
- if dlg.ShowModal() == wx.ID_OK:
-
- booru = dlg.GetBooru()
-
- new_page = ClientGUIPages.PageImportBooru( self._notebook, booru )
-
- self._notebook.AddPage( new_page, booru.GetName(), select = True )
-
- self._notebook.SetSelection( self._notebook.GetPageCount() - 1 )
-
- new_page.SetSearchFocus()
-
-
+ new_page = ClientGUIPages.PageImportGallery( self._notebook, name, type )
-
- def _NewPageImportGallery( self, name ):
+ if name == 'booru': page_name = type.GetName()
+ elif type is None: page_name = name
+ else: page_name = name + ' by ' + type
- new_page = ClientGUIPages.PageImportGallery( self._notebook, name )
-
- self._notebook.AddPage( new_page, name, select = True )
+ self._notebook.AddPage( new_page, page_name, select = True )
self._notebook.SetSelection( self._notebook.GetPageCount() - 1 )
@@ -1176,7 +1164,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
for ( time_closed, index, name, page ) in self._closed_pages:
- if time_closed + timeout < now: page.Destroy()
+ if time_closed + timeout < now: self._DestroyPage( page )
else: new_closed_pages.append( ( time_closed, index, name, page ) )
@@ -1226,14 +1214,18 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
- try: page.TryToClose()
+ try: page.TestAbleToClose()
+ except Exception as e: return
+
+
+ for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
+
+ try: page.CleanBeforeDestroy()
except: return
- self._DeleteAllPages()
-
- self._message_manager.CleanUp()
- self._message_manager.Destroy()
+ self._message_manager.CleanBeforeDestroy()
+ self._message_manager.Hide()
self.Hide()
@@ -1302,7 +1294,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
print( 'garbage: ' + HC.u( gc.garbage ) )
- elif command == 'delete_all_pages': self._DeleteAllPages()
+ elif command == 'delete_all_closed_pages': self._DeleteAllClosedPages()
elif command == 'delete_gui_session': HC.app.Write( 'gui_session', data, None )
elif command == 'delete_pending': self._DeletePending( data )
elif command == 'exit': self.EventExit( event )
@@ -1330,7 +1322,6 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
elif command == 'manage_upnp': self._ManageUPnP( data )
elif command == 'modify_account': self._ModifyAccount( data )
elif command == 'new_accounts': self._GenerateNewAccounts( data )
- elif command == 'new_import_booru': self._NewPageImportBooru()
elif command == 'new_import_thread_watcher': self._NewPageImportThreadWatcher()
elif command == 'new_import_url': self._NewPageImportURL()
elif command == 'new_log_page': self._NewPageLog()
@@ -1418,9 +1409,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
FrameComposeMessage( empty_draft_message )
- def NewPageImportBooru( self ): self._NewPageImportBooru()
-
- def NewPageImportGallery( self, name ): self._NewPageImportGallery( name )
+ def NewPageImportGallery( self, gallery_name, gallery_type ): self._NewPageImportGallery( gallery_name, gallery_type )
def NewPageImportHDD( self, paths_info, advanced_import_options = {}, paths_to_tags = {}, delete_after_success = False ):
@@ -1578,7 +1567,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
undo_pages = wx.Menu()
- undo_pages.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete_all_pages' ), 'clear all' )
+ undo_pages.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete_all_closed_pages' ), 'clear all' )
undo_pages.AppendSeparator()
@@ -1882,12 +1871,12 @@ class FrameComposeMessage( ClientGUICommon.Frame ):
def DeleteConversation( self, conversation_key ):
- if self._draft_panel.GetConversationKey() == conversation_key: self.Destroy()
+ if self._draft_panel.GetConversationKey() == conversation_key: self.Close()
def DeleteDraft( self, draft_key ):
- if draft_key == self._draft_panel.GetDraftKey(): self.Destroy()
+ if draft_key == self._draft_panel.GetDraftKey(): self.Close()
class FramePageChooser( ClientGUICommon.Frame ):
@@ -1993,7 +1982,13 @@ class FramePageChooser( ClientGUICommon.Frame ):
button.SetLabel( name )
elif entry_type == 'page_import_booru': button.SetLabel( 'booru' )
- elif entry_type == 'page_import_gallery': button.SetLabel( obj )
+ elif entry_type == 'page_import_gallery':
+
+ ( name, type ) = obj
+
+ if type is None: button.SetLabel( name )
+ else: button.SetLabel( name + ' by ' + type )
+
elif entry_type == 'page_import_thread_watcher': button.SetLabel( 'thread watcher' )
elif entry_type == 'page_import_url': button.SetLabel( 'url' )
@@ -2019,16 +2014,16 @@ class FramePageChooser( ClientGUICommon.Frame ):
elif menu_keyword == 'download': entries = [ ( 'page_import_url', None ), ( 'page_import_thread_watcher', None ), ( 'menu', 'gallery' ) ]
elif menu_keyword == 'gallery':
- entries = [ ( 'page_import_booru', None ), ( 'page_import_gallery', 'giphy' ), ( 'page_import_gallery', 'deviant art by artist' ), ( 'menu', 'hentai foundry' ), ( 'page_import_gallery', 'newgrounds' ) ]
+ entries = [ ( 'page_import_booru', None ), ( 'page_import_gallery', ( 'giphy', None ) ), ( 'page_import_gallery', ( 'deviant art', 'artist' ) ), ( 'menu', 'hentai foundry' ), ( 'page_import_gallery', ( 'newgrounds', None ) ) ]
( id, password ) = HC.app.Read( 'pixiv_account' )
if id != '' and password != '': entries.append( ( 'menu', 'pixiv' ) )
- entries.extend( [ ( 'page_import_gallery', 'tumblr' ) ] )
+ entries.extend( [ ( 'page_import_gallery', ( 'tumblr', None ) ) ] )
- elif menu_keyword == 'hentai foundry': entries = [ ( 'page_import_gallery', 'hentai foundry by artist' ), ( 'page_import_gallery', 'hentai foundry by tags' ) ]
- elif menu_keyword == 'pixiv': entries = [ ( 'page_import_gallery', 'pixiv by artist' ), ( 'page_import_gallery', 'pixiv by tag' ) ]
+ elif menu_keyword == 'hentai foundry': entries = [ ( 'page_import_gallery', ( 'hentai foundry', 'artist' ) ), ( 'page_import_gallery', ( 'hentai foundry', 'tags' ) ) ]
+ elif menu_keyword == 'pixiv': entries = [ ( 'page_import_gallery', ( 'pixiv', 'artist' ) ), ( 'page_import_gallery', ( 'pixiv', 'tag' ) ) ]
elif menu_keyword == 'petitions': entries = [ ( 'page_petitions', service_identifier ) for service_identifier in self._petition_service_identifiers ]
if len( entries ) <= 4:
@@ -2067,13 +2062,29 @@ class FramePageChooser( ClientGUICommon.Frame ):
else:
if entry_type == 'page_query': HC.pubsub.pub( 'new_page_query', obj )
- elif entry_type == 'page_import_booru': HC.pubsub.pub( 'new_page_import_booru' )
- elif entry_type == 'page_import_gallery': HC.pubsub.pub( 'new_page_import_gallery', obj )
+ elif entry_type == 'page_import_booru':
+
+ with ClientGUIDialogs.DialogSelectBooru( self ) as dlg:
+
+ if dlg.ShowModal() == wx.ID_OK:
+
+ booru = dlg.GetBooru()
+
+ HC.pubsub.pub( 'new_page_import_gallery', 'booru', booru )
+
+
+
+ elif entry_type == 'page_import_gallery':
+
+ ( gallery_name, gallery_type ) = obj
+
+ HC.pubsub.pub( 'new_page_import_gallery', gallery_name, gallery_type )
+
elif entry_type == 'page_import_thread_watcher': HC.pubsub.pub( 'new_page_import_thread_watcher' )
elif entry_type == 'page_import_url': HC.pubsub.pub( 'new_page_import_url' )
elif entry_type == 'page_petitions': HC.pubsub.pub( 'new_page_petitions', obj )
- self.Destroy()
+ self.Close()
@@ -2090,7 +2101,7 @@ class FramePageChooser( ClientGUICommon.Frame ):
self.ProcessEvent( new_event )
- elif event.KeyCode == wx.WXK_ESCAPE: self.Destroy()
+ elif event.KeyCode == wx.WXK_ESCAPE: self.Close()
else: event.Skip()
@@ -2218,7 +2229,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
HC.options[ 'pause_repo_sync' ] = original_pause_status
- def EventOk( self, event ): self.Destroy()
+ def EventOk( self, event ): self.Close()
def RefreshServices( self ): self._InitialiseServices()
diff --git a/include/ClientGUICanvas.py b/include/ClientGUICanvas.py
index e06e36f7..1aee0e3a 100755
--- a/include/ClientGUICanvas.py
+++ b/include/ClientGUICanvas.py
@@ -585,7 +585,7 @@ class Canvas():
self._media_container.Hide()
- wx.CallAfter( self._media_container.Destroy )
+ self._media_container.Close()
self._media_container = None
diff --git a/include/ClientGUICommon.py b/include/ClientGUICommon.py
index 109701a7..59ffe808 100755
--- a/include/ClientGUICommon.py
+++ b/include/ClientGUICommon.py
@@ -281,7 +281,7 @@ class AutoCompleteDropdown( wx.TextCtrl ):
def EventText( self, event ):
if len( self.GetValue() ) == 0: self._UpdateList()
- else: self._lag_timer.Start( 150, wx.TIMER_ONE_SHOT )
+ else: self._lag_timer.Start( 250, wx.TIMER_ONE_SHOT )
class AutoCompleteDropdownContacts( AutoCompleteDropdown ):
@@ -2248,9 +2248,12 @@ class PopupMessageError( PopupMessage ):
error = wx.StaticText( self, label = HC.u( etype.__name__ ), style = wx.ALIGN_CENTER )
error.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
- text = wx.StaticText( self, label = HC.u( value ) )
- text.Wrap( 380 )
- text.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
+ if len( HC.u( value ) ) > 0:
+
+ text = wx.StaticText( self, label = HC.u( value ) )
+ text.Wrap( 380 )
+ text.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
+
self._show_tb_button = wx.Button( self, label = 'show traceback' )
self._show_tb_button.Bind( wx.EVT_BUTTON, self.EventShowButton )
@@ -2267,7 +2270,7 @@ class PopupMessageError( PopupMessage ):
self._copy_tb_button.Hide()
vbox.AddF( error, FLAGS_EXPAND_PERPENDICULAR )
- vbox.AddF( text, FLAGS_EXPAND_PERPENDICULAR )
+ if len( HC.u( value ) ) > 0: vbox.AddF( text, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._show_tb_button, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._tb_text, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._copy_tb_button, FLAGS_EXPAND_PERPENDICULAR )
@@ -2469,7 +2472,7 @@ class PopupMessageText( PopupMessage ):
vbox = wx.BoxSizer( wx.VERTICAL )
- text = wx.StaticText( self, label = message_string ) # make this multi-line. There's an easy way to do that, right? A func that takes a pixel width, I think
+ text = wx.StaticText( self, label = message_string )
text.Wrap( 380 )
text.Bind( wx.EVT_RIGHT_DOWN, self.EventDismiss )
@@ -2515,6 +2518,7 @@ class PopupMessageManager( wx.Frame ):
sys.excepthook = CC.CatchExceptionClient
HC.ShowException = CC.ShowExceptionClient
+ HC.ShowText = CC.ShowTextClient
def _CheckPending( self ):
@@ -2636,22 +2640,18 @@ class PopupMessageManager( wx.Frame ):
self._CheckPending()
- def CleanUp( self ):
+ def CleanBeforeDestroy( self ):
sys.excepthook = self._old_excepthook
HC.ShowException = self._old_show_exception
- self.DismissAll()
-
- self.Hide()
-
def Dismiss( self, window ):
self._message_vbox.Detach( window )
- wx.CallAfter( window.Destroy )
+ window.Destroy()
self._SizeAndPositionAndShow()
@@ -3607,10 +3607,7 @@ class ShowKeys( Frame ):
self.Show( True )
- def EventDone( self, event ):
-
- self.Destroy()
-
+ def EventDone( self, event ): self.Close()
def EventSaveToFile( self, event ):
diff --git a/include/ClientGUIDialogs.py b/include/ClientGUIDialogs.py
index 1baf3743..eac1ad3f 100755
--- a/include/ClientGUIDialogs.py
+++ b/include/ClientGUIDialogs.py
@@ -2279,11 +2279,11 @@ class DialogInputLocalFiles( Dialog ):
message = 'Tried to read a key, but did not understand it.'
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
- job = HC.Job()
+ job_key = HC.JobKey()
def WXTHREADGetAESKey():
@@ -2301,11 +2301,11 @@ class DialogInputLocalFiles( Dialog ):
( aes_key, iv ) = HydrusEncryption.AESTextToKey( key_text )
- job.PutResult( ( aes_key, iv ) )
+ job_key.SetVariable( 'result', ( aes_key, iv ) )
except: wx.MessageBox( 'Did not understand that key!' )
- elif result == wx.ID_CANCEL: job.PutResult( ( None, None ) )
+ elif result == wx.ID_CANCEL: job_key.SetVariable( 'result', ( None, None ) )
@@ -2314,7 +2314,14 @@ class DialogInputLocalFiles( Dialog ):
wx.CallAfter( WXTHREADGetAESKey )
- ( aes_key, iv ) = job.GetResult()
+ while not job_key.HasVariable( 'result' ):
+
+ if job_key.IsCancelled(): return
+
+ time.sleep( 0.1 )
+
+
+ ( aes_key, iv ) = job_key.GetVariable( 'result' )
if aes_key is not None:
diff --git a/include/ClientGUIDialogsManage.py b/include/ClientGUIDialogsManage.py
index 90f88abe..8a7b2bd1 100644
--- a/include/ClientGUIDialogsManage.py
+++ b/include/ClientGUIDialogsManage.py
@@ -20,7 +20,6 @@ import traceback
import urllib
import wx
import yaml
-import zipfile
# Option Enums
diff --git a/include/ClientGUIManagement.py b/include/ClientGUIManagement.py
index 413550f1..60d3fb69 100755
--- a/include/ClientGUIManagement.py
+++ b/include/ClientGUIManagement.py
@@ -20,15 +20,12 @@ import urllib
import urlparse
import wx
import wx.lib.scrolledpanel
-import zipfile
CAPTCHA_FETCH_EVENT_TYPE = wx.NewEventType()
CAPTCHA_FETCH_EVENT = wx.PyEventBinder( CAPTCHA_FETCH_EVENT_TYPE )
ID_TIMER_CAPTCHA = wx.NewId()
ID_TIMER_DUMP = wx.NewId()
-ID_TIMER_PROCESS_IMPORT_QUEUE = wx.NewId()
-ID_TIMER_PROCESS_FEED_QUEUE = wx.NewId()
ID_TIMER_UPDATE = wx.NewId()
# Sizer Flags
@@ -400,6 +397,8 @@ class ManagementPanel( wx.lib.scrolledpanel.ScrolledPanel ):
sizer.AddF( self._sort_by, FLAGS_EXPAND_PERPENDICULAR )
+ def CleanBeforeDestroy( self ): pass
+
def Pause( self, page_key ):
if page_key == self._page_key: self._paused = True
@@ -412,7 +411,7 @@ class ManagementPanel( wx.lib.scrolledpanel.ScrolledPanel ):
def SetSearchFocus( self, page_key ): pass
- def TryToClose( self ): pass
+ def TestAbleToClose( self ): pass
class ManagementPanelDumper( ManagementPanel ):
@@ -450,19 +449,19 @@ class ManagementPanelDumper( ManagementPanel ):
# progress
- self._processing_panel = ClientGUICommon.StaticBox( self, 'processing' )
+ self._import_queue_panel = ClientGUICommon.StaticBox( self, 'import queue' )
- self._progress_info = wx.StaticText( self._processing_panel )
+ self._progress_info = wx.StaticText( self._import_queue_panel )
- self._progress_gauge = ClientGUICommon.Gauge( self._processing_panel )
+ self._progress_gauge = ClientGUICommon.Gauge( self._import_queue_panel )
self._progress_gauge.SetRange( len( media_results ) )
- self._start_button = wx.Button( self._processing_panel, label = 'start' )
+ self._start_button = wx.Button( self._import_queue_panel, label = 'start' )
self._start_button.Bind( wx.EVT_BUTTON, self.EventStartButton )
- self._processing_panel.AddF( self._progress_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._start_button, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_queue_panel.AddF( self._progress_info, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_queue_panel.AddF( self._progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_queue_panel.AddF( self._start_button, FLAGS_EXPAND_PERPENDICULAR )
# thread options
@@ -558,7 +557,7 @@ class ManagementPanelDumper( ManagementPanel ):
self._MakeSort( vbox )
- vbox.AddF( self._processing_panel, FLAGS_EXPAND_PERPENDICULAR )
+ vbox.AddF( self._import_queue_panel, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._thread_panel, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._post_panel, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR )
@@ -835,7 +834,7 @@ class ManagementPanelDumper( ManagementPanel ):
dump_status_enum = CC.DUMPER_UNRECOVERABLE_ERROR
dump_status_string = ''
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, phrase ) )
+ HC.ShowText( phrase )
self._progress_info.SetLabel( 'error: ' + phrase )
@@ -1143,7 +1142,7 @@ class ManagementPanelDumper( ManagementPanel ):
- def TryToClose( self ):
+ def TestAbleToClose( self ):
if self._dumping:
@@ -1156,77 +1155,114 @@ class ManagementPanelDumper( ManagementPanel ):
class ManagementPanelImport( ManagementPanel ):
- def __init__( self, parent, page, page_key, starting_from_session = False ):
+ def __init__( self, parent, page, page_key, import_controller, starting_from_session = False ):
ManagementPanel.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
- self._successful = 0
- self._failed = 0
- self._deleted = 0
- self._redundant = 0
+ self._import_controller = import_controller
- self._import_queue = []
- self._import_queue_position = 0
+ self._import_panel = ClientGUICommon.StaticBox( self, 'current file' )
- self._import_job_key = HC.JobKey()
- self._import_queue_job_key = HC.JobKey()
- self._feed_queue_job_key = HC.JobKey()
+ self._import_current_info = wx.StaticText( self._import_panel )
+ self._import_gauge = ClientGUICommon.Gauge( self._import_panel )
- self._processing_panel = ClientGUICommon.StaticBox( self, 'progress' )
+ self._import_queue_panel = ClientGUICommon.StaticBox( self, 'import queue' )
- self._import_overall_info = wx.StaticText( self._processing_panel )
- self._import_current_info_string = ''
- self._import_current_info = wx.StaticText( self._processing_panel )
- self._import_gauge = ClientGUICommon.Gauge( self._processing_panel )
+ self._import_overall_info = wx.StaticText( self._import_queue_panel )
+ self._import_queue_info = wx.StaticText( self._import_queue_panel )
+ self._import_queue_gauge = ClientGUICommon.Gauge( self._import_queue_panel )
- self._import_pause_button = wx.Button( self._processing_panel, label = 'pause' )
+ self._import_pause_button = wx.Button( self._import_queue_panel, label = 'pause' )
self._import_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseImportQueue )
self._import_pause_button.Disable()
- self._timer_process_import_queue = wx.Timer( self, id = ID_TIMER_PROCESS_IMPORT_QUEUE )
+ self._import_cancel_button = wx.Button( self._import_queue_panel, label = 'that\'s enough' )
+ self._import_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelImportQueue )
+ self._import_cancel_button.SetForegroundColour( ( 128, 0, 0 ) )
+ self._import_cancel_button.Disable()
- self.Bind( wx.EVT_TIMER, self.TIMEREventProcessImportQueue, id = ID_TIMER_PROCESS_IMPORT_QUEUE )
+ #
- self._timer_process_import_queue.Start( 1000, wx.TIMER_ONE_SHOT )
+ vbox = wx.BoxSizer( wx.VERTICAL )
+
+ self._MakeSort( vbox )
+
+ self._import_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
+
+ vbox.AddF( self._import_panel, FLAGS_EXPAND_PERPENDICULAR )
+
+ c_p_hbox = wx.BoxSizer( wx.HORIZONTAL )
+
+ c_p_hbox.AddF( self._import_pause_button, FLAGS_EXPAND_BOTH_WAYS )
+ c_p_hbox.AddF( self._import_cancel_button, FLAGS_EXPAND_BOTH_WAYS )
+
+ self._import_queue_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_queue_panel.AddF( self._import_queue_info, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_queue_panel.AddF( self._import_queue_gauge, FLAGS_EXPAND_PERPENDICULAR )
+ self._import_queue_panel.AddF( c_p_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
+
+ vbox.AddF( self._import_queue_panel, FLAGS_EXPAND_PERPENDICULAR )
+
+ self._InitExtraVboxElements( vbox )
+
+ self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self )
+
+ vbox.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR )
+
+ self._MakeCurrentSelectionTagsBox( vbox )
+
+ self.SetSizer( vbox )
+
+ #
self.Bind( wx.EVT_TIMER, self.TIMEREventUpdate, id = ID_TIMER_UPDATE )
self._timer_update = wx.Timer( self, id = ID_TIMER_UPDATE )
self._timer_update.Start( 100, wx.TIMER_CONTINUOUS )
- HC.pubsub.sub( self, 'ImportDone', 'import_done' )
- HC.pubsub.sub( self, 'SetImportInfo', 'set_import_info' )
-
- def _GetPreimportStatus( self ):
+ def _InitExtraVboxElements( self, vbox ):
- status = 'importing ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) )
+ pass
- return status
-
-
- def _GetPreprocessStatus( self ): pass
def _UpdateGUI( self ):
+ import_controller_job_key = self._import_controller.GetJobKey( 'controller' )
+ import_job_key = self._import_controller.GetJobKey( 'import' )
+ import_queue_position_job_key = self._import_controller.GetJobKey( 'import_queue_position' )
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
+
# info
status_strings = []
- if self._successful > 0: status_strings.append( HC.u( self._successful ) + ' successful' )
- if self._failed > 0: status_strings.append( HC.u( self._failed ) + ' failed' )
- if self._deleted > 0: status_strings.append( HC.u( self._deleted ) + ' already deleted' )
- if self._redundant > 0: status_strings.append( HC.u( self._redundant ) + ' already in db' )
+ num_successful = import_controller_job_key.GetVariable( 'num_successful' )
+ num_failed = import_controller_job_key.GetVariable( 'num_failed' )
+ num_deleted = import_controller_job_key.GetVariable( 'num_deleted' )
+ num_redundant = import_controller_job_key.GetVariable( 'num_redundant' )
+
+ if num_successful > 0: status_strings.append( HC.u( num_successful ) + ' successful' )
+ if num_failed > 0: status_strings.append( HC.u( num_failed ) + ' failed' )
+ if num_deleted > 0: status_strings.append( HC.u( num_deleted ) + ' already deleted' )
+ if num_redundant > 0: status_strings.append( HC.u( num_redundant ) + ' already in db' )
overall_info = ', '.join( status_strings )
if overall_info != self._import_overall_info.GetLabel(): self._import_overall_info.SetLabel( overall_info )
- if self._import_current_info_string != self._import_current_info.GetLabel(): self._import_current_info.SetLabel( self._import_current_info_string )
+ import_status = import_job_key.GetVariable( 'status' )
+
+ if import_status != self._import_current_info.GetLabel(): self._import_current_info.SetLabel( import_status )
+
+ import_queue_status = import_queue_position_job_key.GetVariable( 'status' )
+
+ if import_queue_status != self._import_queue_info.GetLabel(): self._import_queue_info.SetLabel( import_queue_status )
# buttons
- if self._import_queue_job_key.IsPaused():
+ if import_queue_position_job_key.IsPaused():
if self._import_pause_button.GetLabel() != 'resume':
@@ -1243,148 +1279,79 @@ class ManagementPanelImport( ManagementPanel ):
- if self._import_queue_job_key.IsWorking() and not self._import_queue_job_key.IsCancelled(): self._import_pause_button.Enable()
- else: self._import_pause_button.Disable()
-
-
- def CALLBACKAddToImportQueue( self, items ):
-
- if self._import_queue_job_key.IsWorking(): self._import_queue.extend( items )
- else:
+ if import_queue_position_job_key.IsWorking() and not import_queue_position_job_key.IsCancelled():
- self._import_queue = items
- self._import_queue_position = 0
-
- self._timer_process_import_queue.Start( 10, wx.TIMER_ONE_SHOT )
-
- self._import_queue_job_key = HC.JobKey()
-
- self._import_queue_job_key.Begin()
-
- self._UpdateGUI()
-
-
- self._import_gauge.SetRange( len( self._import_queue ) )
-
-
- def CALLBACKImportArgs( self, path, advanced_import_options, service_identifiers_to_tags, url = None, exception = None ):
-
- if exception is None:
-
- self._import_current_info_string = self._GetPreimportStatus()
-
- HC.app.Write( 'import_file_from_page', self._page_key, path, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, url = url )
+ self._import_pause_button.Enable()
+ self._import_cancel_button.Enable()
else:
- self._import_job_key.Cancel()
- self._import_current_info_string = HC.u( exception )
- self._import_gauge.SetValue( self._import_queue_position + 1 )
- self._import_queue_position += 1
+ self._import_pause_button.Disable()
+ self._import_cancel_button.Disable()
- self._timer_process_import_queue.Start( 2000, wx.TIMER_ONE_SHOT )
+
+ # gauges
+
+ range = import_job_key.GetVariable( 'range' )
+
+ if range is None: self._import_gauge.Pulse()
+ else:
+
+ value = import_job_key.GetVariable( 'value' )
+
+ self._import_gauge.SetRange( range )
+ self._import_gauge.SetValue( value )
+
+
+ queue = import_queue_job_key.GetVariable( 'queue' )
+
+ if len( queue ) == 0:
+
+ if import_queue_job_key.IsWorking(): self._import_queue_gauge.Pulse()
+ else:
+
+ self._import_queue_gauge.SetRange( 1 )
+ self._import_queue_gauge.SetValue( 0 )
+
+
+ else:
+
+ queue_position = import_queue_position_job_key.GetVariable( 'queue_position' )
+
+ self._import_queue_gauge.SetRange( len( queue ) )
+ self._import_queue_gauge.SetValue( queue_position )
- def EventPauseImportQueue( self, event ):
+ def EventCancelImportQueue( self, event ):
- self._import_queue_job_key.PauseResume()
+ import_queue_position_job_key = self._import_controller.GetJobKey( 'import_queue_position' )
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
+
+ import_queue_position_job_key.Cancel()
+ import_queue_job_key.Cancel()
self._UpdateGUI()
- def ImportDone( self, page_key, result, exception = None ):
+ def EventPauseImportQueue( self, event ):
- if page_key == self._page_key:
-
- if result == 'successful': self._successful += 1
- elif result == 'failed': self._failed += 1
- elif result == 'deleted': self._deleted += 1
- elif result == 'redundant': self._redundant += 1
-
- self._import_job_key.Finish()
- self._import_gauge.SetValue( self._import_queue_position + 1 )
- self._import_queue_position += 1
-
- if exception is None: self._timer_process_import_queue.Start( 10, wx.TIMER_ONE_SHOT )
- else:
-
- message = os.linesep + 'Had trouble importing ' + HC.u( self._import_queue[ self._import_queue_position - 1 ] ) + ':' + os.linesep + HC.u( exception )
-
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
-
- self._import_current_info_string = HC.u( exception )
-
- self._timer_process_import_queue.Start( 2000, wx.TIMER_ONE_SHOT )
-
-
+ import_queue_position_job_key = self._import_controller.GetJobKey( 'import_queue_position' )
+
+ import_queue_position_job_key.PauseResume()
+
+ self._UpdateGUI()
- def SetImportInfo( self, page_key, info ):
-
- if self._page_key == page_key: self._import_current_info_string = info
-
-
- def TIMEREventProcessImportQueue( self, event ):
-
- if self._import_queue_job_key.IsPaused() or self._paused: self._import_current_info_string = 'paused'
- else:
-
- if self._import_queue_job_key.IsCancelled(): self._import_queue = self._import_queue[ : self._import_queue_position ] # cut excess queue
-
- if len( self._import_queue ) == 0: self._import_current_info_string = ''
- else:
-
- if not self._import_job_key.IsWorking():
-
- if self._import_queue_position < len( self._import_queue ):
-
- self._import_job_key = HC.JobKey()
-
- self._import_job_key.Begin()
-
- self._import_current_info_string = self._GetPreprocessStatus()
-
- item = self._import_queue[ self._import_queue_position ]
-
- threading.Thread( target = self._THREADGetImportArgs, args = ( item, ), name = 'Generate Import Args' ).start()
-
- else:
-
- if self._feed_queue_job_key.IsWorking(): self._import_current_info_string = 'waiting for more items'
- else:
-
- num_files_processed = 0
-
- num_files_processed += self._successful
- num_files_processed += self._failed
- num_files_processed += self._deleted
- num_files_processed += self._redundant
-
- if num_files_processed > 0: status = 'import done'
- else: status = 'import abandoned'
-
- self._import_current_info_string = status
-
- self._import_queue_job_key.Finish()
-
- self._feed_queue_job_key.Finish()
-
- self._UpdateGUI()
-
-
-
-
-
-
- self._timer_process_import_queue.Start( 1000, wx.TIMER_ONE_SHOT )
-
+ def GetAdvancedImportOptions( self ): return self._advanced_import_options.GetInfo()
def TIMEREventUpdate( self, event ): self._UpdateGUI()
- def TryToClose( self ):
+ def TestAbleToClose( self ):
- if self._import_queue_job_key.IsWorking() and not self._import_queue_job_key.IsPaused():
+ import_queue_position_job_key = self._import_controller.GetJobKey( 'import_queue_position' )
+
+ if import_queue_position_job_key.IsWorking() and not import_queue_position_job_key.IsPaused():
with ClientGUIDialogs.DialogYesNo( self, 'This page is still importing. Are you sure you want to close it?' ) as dlg:
@@ -1393,175 +1360,148 @@ class ManagementPanelImport( ManagementPanel ):
-class ManagementPanelImportHDD( ManagementPanelImport ):
+class ManagementPanelImports( ManagementPanelImport ):
- def __init__( self, parent, page, page_key, paths_info, advanced_import_options = {}, paths_to_tags = {}, delete_after_success = False, starting_from_session = False ):
+ def _InitExtraVboxElements( self, vbox ):
- self._advanced_import_options = advanced_import_options
- self._paths_to_tags = paths_to_tags
- self._delete_after_success = delete_after_success
+ ManagementPanelImport._InitExtraVboxElements( self, vbox )
- ManagementPanelImport.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
+ #
- vbox = wx.BoxSizer( wx.VERTICAL )
+ self._building_import_queue_panel = ClientGUICommon.StaticBox( self, 'building import queue' )
- self._MakeSort( vbox )
+ self._building_import_queue_info = wx.StaticText( self._building_import_queue_panel )
- self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_pause_button, FLAGS_EXPAND_PERPENDICULAR )
+ self._building_import_queue_pause_button = wx.Button( self._building_import_queue_panel, label = 'pause' )
+ self._building_import_queue_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseBuildImportQueue )
+ self._building_import_queue_pause_button.Disable()
- vbox.AddF( self._processing_panel, FLAGS_EXPAND_PERPENDICULAR )
+ self._building_import_queue_cancel_button = wx.Button( self._building_import_queue_panel, label = 'that\'s enough' )
+ self._building_import_queue_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelBuildImportQueue )
+ self._building_import_queue_cancel_button.SetForegroundColour( ( 128, 0, 0 ) )
+ self._building_import_queue_cancel_button.Disable()
- self._MakeCurrentSelectionTagsBox( vbox )
+ queue_pause_buttons_hbox = wx.BoxSizer( wx.HORIZONTAL )
- self.SetSizer( vbox )
+ queue_pause_buttons_hbox.AddF( self._building_import_queue_pause_button, FLAGS_EXPAND_BOTH_WAYS )
+ queue_pause_buttons_hbox.AddF( self._building_import_queue_cancel_button, FLAGS_EXPAND_BOTH_WAYS )
- if self._starting_from_session: self._processing_panel.Hide()
+ self._building_import_queue_panel.AddF( self._building_import_queue_info, FLAGS_EXPAND_PERPENDICULAR )
+ self._building_import_queue_panel.AddF( queue_pause_buttons_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
- self.CALLBACKAddToImportQueue( paths_info )
+ vbox.AddF( self._building_import_queue_panel, FLAGS_EXPAND_PERPENDICULAR )
-
- def _THREADGetImportArgs( self, queue_object ):
+ #
- try:
-
- self._last_queue_object = queue_object
-
- ( path_type, path_info ) = queue_object
-
- service_identifiers_to_tags = {}
-
- if path_type == 'path':
-
- path = path_info
-
- if path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ path ]
-
- elif path_type == 'zip':
-
- ( zip_path, name ) = path_info
-
- pretty_path = zip_path + os.path.sep + name
-
- if pretty_path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ pretty_path ]
-
- path = HC.GetTempPath()
-
- with open( path, 'wb' ) as f:
-
- with zipfile.ZipFile( zip_path, 'r' ) as z: f.write( z.read( name ) )
-
-
-
- wx.CallAfter( self.CALLBACKImportArgs, path, self._advanced_import_options, service_identifiers_to_tags )
-
- except Exception as e:
-
- wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = e )
-
- raise
-
+ self._pending_import_queues_panel = ClientGUICommon.StaticBox( self, 'pending imports' )
-
- def _GetPreprocessStatus( self ):
+ self._pending_import_queues_listbox = wx.ListBox( self._pending_import_queues_panel, size = ( -1, 200 ) )
- status = 'reading ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) )
-
- return status
-
-
- def ImportDone( self, page_key, result, exception = None ):
-
- if page_key == self._page_key:
-
- ManagementPanelImport.ImportDone( self, page_key, result, exception = exception )
-
- if self._delete_after_success and result in ( 'successful', 'redundant' ):
-
- ( path_type, path_info ) = self._last_queue_object
-
- if path_type == 'path':
-
- path = path_info
-
- try: os.remove( path )
- except: pass
-
-
-
-
-
-class ManagementPanelImportWithQueue( ManagementPanelImport ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- ManagementPanelImport.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
-
- self._download_progress_gauge = ClientGUICommon.Gauge( self._processing_panel )
-
- self._import_cancel_button = wx.Button( self._processing_panel, label = 'that\'s enough' )
- self._import_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelImportQueue )
- self._import_cancel_button.SetForegroundColour( ( 128, 0, 0 ) )
- self._import_cancel_button.Disable()
-
- self._feed_queue_panel = ClientGUICommon.StaticBox( self, 'queue' )
-
- self._feed_queue_info_string = ''
- self._feed_queue_info = wx.StaticText( self._feed_queue_panel )
-
- self._feed_queue = wx.ListBox( self._feed_queue_panel, size = ( -1, 200 ) )
-
- self._new_queue_input = wx.TextCtrl( self._feed_queue_panel, style=wx.TE_PROCESS_ENTER )
+ self._new_queue_input = wx.TextCtrl( self._pending_import_queues_panel, style = wx.TE_PROCESS_ENTER )
self._new_queue_input.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown )
- self._up = wx.Button( self._feed_queue_panel, label = u'\u2191' )
+ self._up = wx.Button( self._pending_import_queues_panel, label = u'\u2191' )
self._up.Bind( wx.EVT_BUTTON, self.EventUp )
- self._remove = wx.Button( self._feed_queue_panel, label = 'X' )
+ self._remove = wx.Button( self._pending_import_queues_panel, label = 'X' )
self._remove.Bind( wx.EVT_BUTTON, self.EventRemove )
- self._down = wx.Button( self._feed_queue_panel, label = u'\u2193' )
+ self._down = wx.Button( self._pending_import_queues_panel, label = u'\u2193' )
self._down.Bind( wx.EVT_BUTTON, self.EventDown )
- self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self )
+ queue_buttons_vbox = wx.BoxSizer( wx.VERTICAL )
- self._feed_queue_timer = wx.Timer( self, id = ID_TIMER_PROCESS_FEED_QUEUE )
+ queue_buttons_vbox.AddF( self._up, FLAGS_MIXED )
+ queue_buttons_vbox.AddF( self._remove, FLAGS_MIXED )
+ queue_buttons_vbox.AddF( self._down, FLAGS_MIXED )
- self.Bind( wx.EVT_TIMER, self.TIMEREventProcessFeedQueue, id = ID_TIMER_PROCESS_FEED_QUEUE )
+ queue_hbox = wx.BoxSizer( wx.HORIZONTAL )
- self._feed_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT )
+ queue_hbox.AddF( self._pending_import_queues_listbox, FLAGS_EXPAND_BOTH_WAYS )
+ queue_hbox.AddF( queue_buttons_vbox, FLAGS_MIXED )
- HC.pubsub.sub( self, 'SetFeedQueueInfo', 'set_feed_queue_info' )
- HC.pubsub.sub( self, 'SetDownloadProgress', 'set_download_progress' )
+ self._pending_import_queues_panel.AddF( queue_hbox, FLAGS_EXPAND_SIZER_BOTH_WAYS )
+ self._pending_import_queues_panel.AddF( self._new_queue_input, FLAGS_EXPAND_PERPENDICULAR )
-
- def _GetPreprocessStatus( self ):
+ vbox.AddF( self._pending_import_queues_panel, FLAGS_EXPAND_BOTH_WAYS )
- status = 'checking url status ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) )
-
- return status
+ wx.CallAfter( self._new_queue_input.SelectAll ) # to select the 'artist username' init gumpf
def _UpdateGUI( self ):
- super( ManagementPanelImportWithQueue, self )._UpdateGUI()
+ ManagementPanelImport._UpdateGUI( self )
+
+ import_job_key = self._import_controller.GetJobKey( 'import' )
+ import_queue_position_job_key = self._import_controller.GetJobKey( 'import_queue_position' )
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
# info
- if self._feed_queue_info_string != self._feed_queue_info.GetLabel(): self._feed_queue_info.SetLabel( self._feed_queue_info_string )
+ extend_import_queue_status = import_queue_job_key.GetVariable( 'status' )
+
+ if extend_import_queue_status != self._building_import_queue_info.GetLabel(): self._building_import_queue_info.SetLabel( extend_import_queue_status )
# buttons
- if self._import_queue_job_key.IsWorking() and not self._import_queue_job_key.IsCancelled(): self._import_cancel_button.Enable()
- else: self._import_cancel_button.Disable()
+ #
+
+ if import_queue_job_key.IsPaused():
+
+ if self._building_import_queue_pause_button.GetLabel() != 'resume':
+
+ self._building_import_queue_pause_button.SetLabel( 'resume' )
+ self._building_import_queue_pause_button.SetForegroundColour( ( 0, 128, 0 ) )
+
+
+ else:
+
+ if self._building_import_queue_pause_button.GetLabel() != 'pause':
+
+ self._building_import_queue_pause_button.SetLabel( 'pause' )
+ self._building_import_queue_pause_button.SetForegroundColour( ( 0, 0, 0 ) )
+
+
+
+ if import_queue_job_key.IsWorking() and not import_queue_job_key.IsCancelled():
+
+ self._building_import_queue_pause_button.Enable()
+ self._building_import_queue_cancel_button.Enable()
+
+ else:
+
+ self._building_import_queue_pause_button.Disable()
+ self._building_import_queue_cancel_button.Disable()
+
+
+ # gauge
+
+ range = import_job_key.GetVariable( 'range' )
+
+ if range is None: self._import_gauge.Pulse()
+ else:
+
+ value = import_job_key.GetVariable( 'value' )
+
+ self._import_gauge.SetRange( range )
+ self._import_gauge.SetValue( value )
+
+
+ # pending import queues
+
+ import_queues = self._import_controller.GetPendingImportQueues()
+
+ if import_queues != self._pending_import_queues_listbox.GetItems():
+
+ self._pending_import_queues_listbox.SetItems( import_queues )
+
- def EventCancelImportQueue( self, event ):
+ def EventCancelBuildImportQueue( self, event ):
- self._import_queue_job_key.Cancel()
- self._feed_queue_job_key.Cancel()
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
+
+ import_queue_job_key.Cancel()
self._UpdateGUI()
@@ -1570,13 +1510,13 @@ class ManagementPanelImportWithQueue( ManagementPanelImport ):
if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ):
- url = self._new_queue_input.GetValue()
+ s = self._new_queue_input.GetValue()
- if url != '':
+ if s != '':
- self._feed_queue.Append( url, url )
+ self._import_controller.PendImportQueue( s )
- self._feed_queue_timer.Start( 10, wx.TIMER_ONE_SHOT )
+ self._UpdateGUI()
self._new_queue_input.SetValue( '' )
@@ -1584,648 +1524,163 @@ class ManagementPanelImportWithQueue( ManagementPanelImport ):
else: event.Skip()
+ def EventPauseBuildImportQueue( self, event ):
+
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
+
+ import_queue_job_key.PauseResume()
+
+ self._UpdateGUI()
+
+
def EventUp( self, event ):
- selection = self._feed_queue.GetSelection()
+ selection = self._pending_import_queues_listbox.GetSelection()
if selection != wx.NOT_FOUND:
if selection > 0:
- url = self._feed_queue.GetClientData( selection )
+ s = self._pending_import_queues_listbox.GetString( selection )
- self._feed_queue.Delete( selection )
+ self._import_controller.MovePendingImportQueueUp( s )
- self._feed_queue.Insert( url, selection - 1, url )
+ self._UpdateGUI()
- self._feed_queue.Select( selection - 1 )
+ self._pending_import_queues_listbox.Select( selection - 1 )
def EventRemove( self, event ):
- selection = self._feed_queue.GetSelection()
+ selection = self._pending_import_queues_listbox.GetSelection()
- if selection != wx.NOT_FOUND: self._feed_queue.Delete( selection )
+ if selection != wx.NOT_FOUND:
+
+ s = self._pending_import_queues_listbox.GetString( selection )
+
+ self._import_controller.RemovePendingImportQueue( s )
+
+ self._UpdateGUI()
+
def EventDown( self, event ):
- selection = self._feed_queue.GetSelection()
+ selection = self._pending_import_queues_listbox.GetSelection()
if selection != wx.NOT_FOUND:
- if selection + 1 < self._feed_queue.GetCount():
+ if selection + 1 < self._pending_import_queues_listbox.GetCount():
- url = self._feed_queue.GetClientData( selection )
+ s = self._pending_import_queues_listbox.GetString( selection )
- self._feed_queue.Delete( selection )
+ self._import_controller.MovePendingImportQueueDown( s )
- self._feed_queue.Insert( url, selection + 1, url )
+ self._UpdateGUI()
- self._feed_queue.Select( selection + 1 )
+ self._pending_import_queues_listbox.Select( selection + 1 )
- def SetDownloadProgress( self, range, value ):
-
- if range is None: self._download_progress_gauge.Pulse()
- else:
-
- self._download_progress_gauge.SetRange( range )
- self._download_progress_gauge.SetValue( value )
-
-
-
- def SetFeedQueueInfo( self, page_key, info ):
-
- if self._page_key == page_key: self._feed_queue_info_string = info
-
-
- def SetImportInfo( self, page_key, info ):
-
- if self._page_key == page_key: self._import_current_info_string = info
-
-
def SetSearchFocus( self, page_key ):
if page_key == self._page_key: self._new_queue_input.SetFocus()
- def TIMEREventProcessFeedQueue( self, event ):
-
- if self._feed_queue_job_key.IsPaused(): self._feed_queue_info_string = 'paused'
- else:
-
- if self._feed_queue.GetCount() > 0 and not self._import_queue_job_key.IsWorking() and not self._feed_queue_job_key.IsWorking():
-
- self._feed_queue_job_key = HC.JobKey()
-
- self._feed_queue_job_key.Begin()
-
- item = self._feed_queue.GetClientData( 0 )
-
- self._feed_queue.Delete( 0 )
-
- threading.Thread( target = self._THREADDownloadImportItems, args = ( item, ), name = 'Generate Import Items' ).start()
-
-
-
- self._feed_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT )
-
+class ManagementPanelImportsGallery( ManagementPanelImports ):
-class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
-
- def __init__( self, parent, page, page_key, name, namespaces, starting_from_session = False ):
+ def __init__( self, parent, page, page_key, import_controller, name, namespaces, initial_search_value, starting_from_session = False ):
- ManagementPanelImportWithQueue.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
+ self._name = name
+ self._namespaces = namespaces
+ self._initial_search_value = initial_search_value
- self._advanced_tag_options = ClientGUICommon.AdvancedTagOptions( self, 'send ' + name + ' tags to ', namespaces )
-
- self._feed_queue_pause_button = wx.Button( self._feed_queue_panel, label = 'pause' )
- self._feed_queue_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseFeedQueue )
- self._feed_queue_pause_button.Disable()
-
- self._feed_queue_cancel_button = wx.Button( self._feed_queue_panel, label = 'that\'s enough' )
- self._feed_queue_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelFeedQueue )
- self._feed_queue_cancel_button.SetForegroundColour( ( 128, 0, 0 ) )
- self._feed_queue_cancel_button.Disable()
-
- c_p_hbox = wx.BoxSizer( wx.HORIZONTAL )
-
- c_p_hbox.AddF( self._import_pause_button, FLAGS_EXPAND_BOTH_WAYS )
- c_p_hbox.AddF( self._import_cancel_button, FLAGS_EXPAND_BOTH_WAYS )
-
- self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._download_progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( c_p_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
-
- queue_buttons_vbox = wx.BoxSizer( wx.VERTICAL )
-
- queue_buttons_vbox.AddF( self._up, FLAGS_MIXED )
- queue_buttons_vbox.AddF( self._remove, FLAGS_MIXED )
- queue_buttons_vbox.AddF( self._down, FLAGS_MIXED )
-
- queue_pause_buttons_hbox = wx.BoxSizer( wx.HORIZONTAL )
-
- queue_pause_buttons_hbox.AddF( self._feed_queue_pause_button, FLAGS_EXPAND_BOTH_WAYS )
- queue_pause_buttons_hbox.AddF( self._feed_queue_cancel_button, FLAGS_EXPAND_BOTH_WAYS )
-
- queue_hbox = wx.BoxSizer( wx.HORIZONTAL )
-
- queue_hbox.AddF( self._feed_queue, FLAGS_EXPAND_BOTH_WAYS )
- queue_hbox.AddF( queue_buttons_vbox, FLAGS_MIXED )
-
- self._feed_queue_panel.AddF( queue_pause_buttons_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
- self._feed_queue_panel.AddF( self._feed_queue_info, FLAGS_EXPAND_PERPENDICULAR )
- self._feed_queue_panel.AddF( queue_hbox, FLAGS_EXPAND_SIZER_BOTH_WAYS )
- self._feed_queue_panel.AddF( self._new_queue_input, FLAGS_EXPAND_PERPENDICULAR )
-
- vbox = wx.BoxSizer( wx.VERTICAL )
-
- self._MakeSort( vbox )
-
- vbox.AddF( self._processing_panel, FLAGS_EXPAND_PERPENDICULAR )
- vbox.AddF( self._feed_queue_panel, FLAGS_EXPAND_BOTH_WAYS )
- self._InitExtraVboxElements( vbox )
- vbox.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR )
- vbox.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR )
-
- self._MakeCurrentSelectionTagsBox( vbox )
-
- self.SetSizer( vbox )
-
- wx.CallAfter( self._new_queue_input.SelectAll )
-
-
- def _InitExtraVboxElements( self, vbox ): pass
-
- def _THREADGetImportArgs( self, url_args ):
-
- try:
-
- downloader = self._GetDownloaders( 'example' )[0]
-
- advanced_tag_options = self._advanced_tag_options.GetInfo()
-
- do_tags = len( advanced_tag_options ) > 0
-
- url = url_args[0]
-
- ( status, hash ) = HC.app.Read( 'url_status', url )
-
- if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new'
-
- if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
- elif status == 'redundant':
-
- ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
-
- HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
-
- if do_tags:
-
- tags = downloader.GetTags( *url_args )
-
- service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
-
- service_identifiers_to_content_updates = HydrusDownloading.ConvertServiceIdentifiersToTagsToServiceIdentifiersToContentUpdates( hash, service_identifiers_to_tags )
-
- HC.app.Write( 'content_updates', service_identifiers_to_content_updates )
-
-
- HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
-
- else:
-
- HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) ) )
-
- def hook( range, value ): wx.CallAfter( self.SetDownloadProgress, range, value )
-
- downloader.AddReportHook( hook )
-
- if do_tags: ( temp_path, tags ) = downloader.GetFileAndTags( *url_args )
- else:
-
- temp_path = downloader.GetFile( *url_args )
-
- tags = []
-
-
- downloader.ClearReportHooks()
-
- service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
-
- advanced_import_options = self._advanced_import_options.GetInfo()
-
- wx.CallAfter( self.CALLBACKImportArgs, temp_path, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, url = url )
-
-
- except Exception as e:
-
- wx.CallAfter( self.CALLBACKImportArgs, self._page_key, '', {}, {}, exception = e )
-
- raise
-
-
-
- def _THREADDownloadImportItems( self, raw_query ):
-
- # this is important, because we'll instantiate new objects in the eventcancel
-
- feed_queue_job_key = self._feed_queue_job_key
-
- try:
-
- downloaders = list( self._GetDownloaders( raw_query ) )
-
- downloaders[0].SetupGallerySearch() # for now this is cookie-based for hf, so only have to do it on one
-
- total_urls_found = 0
-
- while True:
-
- downloaders_to_remove = []
-
- for downloader in downloaders:
-
- HC.pubsub.pub( 'set_feed_queue_info', self._page_key, 'found ' + HC.u( total_urls_found ) + ' urls' )
-
- self._feed_queue_job_key.WaitOnPause()
-
- if feed_queue_job_key.IsCancelled(): break
-
- page_of_url_args = downloader.GetAnotherPage()
-
- total_urls_found += len( page_of_url_args )
-
- if len( page_of_url_args ) == 0: downloaders_to_remove.append( downloader )
- else: wx.CallAfter( self.CALLBACKAddToImportQueue, page_of_url_args )
-
-
- if feed_queue_job_key.IsCancelled(): break
-
- for downloader in downloaders_to_remove: downloaders.remove( downloader )
-
- if len( downloaders ) == 0: break
-
-
- HC.pubsub.pub( 'set_feed_queue_info', self._page_key, '' )
-
- finally: self._feed_queue_job_key.Finish()
-
-
- def _UpdateGUI( self ):
-
- super( ManagementPanelImportWithQueueAdvanced, self )._UpdateGUI()
-
- # info
-
- # buttons
-
- if self._feed_queue_job_key.IsPaused():
-
- if self._feed_queue_pause_button.GetLabel() != 'resume':
-
- self._feed_queue_pause_button.SetLabel( 'resume' )
- self._feed_queue_pause_button.SetForegroundColour( ( 0, 128, 0 ) )
-
-
- else:
-
- if self._feed_queue_pause_button.GetLabel() != 'pause':
-
- self._feed_queue_pause_button.SetLabel( 'pause' )
- self._feed_queue_pause_button.SetForegroundColour( ( 0, 0, 0 ) )
-
-
-
- if self._feed_queue_job_key.IsWorking() and not self._feed_queue_job_key.IsCancelled():
-
- self._feed_queue_pause_button.Enable()
- self._feed_queue_cancel_button.Enable()
-
- else:
-
- self._feed_queue_pause_button.Disable()
- self._feed_queue_cancel_button.Disable()
-
-
-
- def EventCancelFeedQueue( self, event ):
-
- self._feed_queue_job_key.Cancel()
-
- self._UpdateGUI()
-
-
- def EventPauseFeedQueue( self, event ):
-
- self._feed_queue_job_key.PauseResume()
-
- self._UpdateGUI()
-
-
-class ManagementPanelImportWithQueueAdvancedBooru( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, booru, starting_from_session = False ):
-
- self._booru = booru
-
- name = self._booru.GetName()
- namespaces = booru.GetNamespaces()
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
-
-
- def _GetDownloaders( self, raw_tags ):
-
- tags = raw_tags.split( ' ' )
-
- return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_BOORU, self._booru, tags ), )
-
-
-class ManagementPanelImportWithQueueAdvancedDeviantArt( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- name = 'deviant art'
- namespaces = [ 'creator', 'title', '' ]
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
-
- self._new_queue_input.SetValue( 'artist username' )
-
-
- def _GetDownloaders( self, artist ): return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_DEVIANT_ART, artist ), )
-
-class ManagementPanelImportWithQueueAdvancedGiphy( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- name = 'giphy'
- namespaces = [ '' ]
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
-
- self._new_queue_input.SetValue( 'tag' )
-
-
- def _GetDownloaders( self, tag ): return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_GIPHY, tag ), )
-
-class ManagementPanelImportWithQueueAdvancedHentaiFoundry( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- name = 'hentai foundry'
- namespaces = [ 'creator', 'title', '' ]
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
+ ManagementPanelImports.__init__( self, parent, page, page_key, import_controller, starting_from_session = starting_from_session )
def _InitExtraVboxElements( self, vbox ):
+ ManagementPanelImports._InitExtraVboxElements( self, vbox )
+
+ #
+
+ self._new_queue_input.SetValue( self._initial_search_value )
+
+ #
+
+ self._advanced_tag_options = ClientGUICommon.AdvancedTagOptions( self, 'send ' + self._name + ' tags to ', self._namespaces )
+
+ vbox.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR )
+
+
+ def GetAdvancedTagOptions( self ): return self._advanced_tag_options.GetInfo()
+
+class ManagementPanelImportsGalleryHentaiFoundry( ManagementPanelImportsGallery ):
+
+ def _InitExtraVboxElements( self, vbox ):
+
+ ManagementPanelImportsGallery._InitExtraVboxElements( self, vbox )
+
self._advanced_hentai_foundry_options = ClientGUICommon.AdvancedHentaiFoundryOptions( self )
vbox.AddF( self._advanced_hentai_foundry_options, FLAGS_EXPAND_PERPENDICULAR )
-class ManagementPanelImportWithQueueAdvancedHentaiFoundryArtist( ManagementPanelImportWithQueueAdvancedHentaiFoundry ):
+ def GetAdvancedHentaiFoundryOptions( self ): return self._advanced_hentai_foundry_options.GetInfo()
- def __init__( self, parent, page, page_key, starting_from_session = False ):
+class ManagementPanelImportsURL( ManagementPanelImports ):
+
+ def _InitExtraVboxElements( self, vbox ):
- ManagementPanelImportWithQueueAdvancedHentaiFoundry.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
+ ManagementPanelImports._InitExtraVboxElements( self, vbox )
- self._new_queue_input.SetValue( 'artist username' )
+ self._building_import_queue_pause_button.Hide()
+ self._building_import_queue_cancel_button.Hide()
- def _GetDownloaders( self, artist ):
+class ManagementPanelImportHDD( ManagementPanelImport ):
+
+ def __init__( self, *args, **kwargs ):
- pictures_downloader = HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_HENTAI_FOUNDRY, 'artist pictures', artist, self._advanced_hentai_foundry_options.GetInfo() )
- scraps_downloader = HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_HENTAI_FOUNDRY, 'artist scraps', artist, self._advanced_hentai_foundry_options.GetInfo() )
+ ManagementPanelImport.__init__( self, *args, **kwargs )
- return ( pictures_downloader, scraps_downloader )
+ self._advanced_import_options.Hide()
-class ManagementPanelImportWithQueueAdvancedHentaiFoundryTags( ManagementPanelImportWithQueueAdvancedHentaiFoundry ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
+ def _InitExtraVboxElements( self, vbox ):
- ManagementPanelImportWithQueueAdvancedHentaiFoundry.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
+ ManagementPanelImport._InitExtraVboxElements( self, vbox )
- self._new_queue_input.SetValue( 'search tags' )
-
-
- def _GetDownloaders( self, tags_string ):
-
- tags = tags_string.split( ' ' )
-
- return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_HENTAI_FOUNDRY, 'tags', tags, self._advanced_hentai_foundry_options.GetInfo() ), )
-
-
-class ManagementPanelImportWithQueueAdvancedNewgrounds( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- name = 'newgrounds'
- namespaces = [ 'creator', 'title', '' ]
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
-
- self._new_queue_input.SetValue( 'artist' )
-
-
- def _GetDownloaders( self, artist ): return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_NEWGROUNDS, artist ), )
-
-class ManagementPanelImportWithQueueAdvancedPixiv( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- name = 'pixiv'
- namespaces = [ 'creator', 'title', '' ]
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
-
-
-class ManagementPanelImportWithQueueAdvancedPixivArtist( ManagementPanelImportWithQueueAdvancedPixiv ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- ManagementPanelImportWithQueueAdvancedPixiv.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
-
- self._new_queue_input.SetValue( 'artist id number' )
-
-
- def _GetDownloaders( self, query ): return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_PIXIV, 'artist', query ), )
-
-class ManagementPanelImportWithQueueAdvancedPixivTag( ManagementPanelImportWithQueueAdvancedPixiv ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- ManagementPanelImportWithQueueAdvancedPixiv.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
-
- self._new_queue_input.SetValue( 'search tag' )
-
-
- def _GetDownloaders( self, query ): return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_PIXIV, 'tag', query ), )
-
-class ManagementPanelImportWithQueueAdvancedTumblr( ManagementPanelImportWithQueueAdvanced ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- name = 'tumblr'
- namespaces = [ '' ]
-
- ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces, starting_from_session = starting_from_session )
-
- self._new_queue_input.SetValue( 'username' )
-
-
- def _GetDownloaders( self, username ): return ( HydrusDownloading.GetDownloader( HC.SITE_DOWNLOAD_TYPE_TUMBLR, username ), )
-
-class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ):
-
- def __init__( self, parent, page, page_key, starting_from_session = False ):
-
- ManagementPanelImportWithQueue.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
-
- self._connections = {}
-
- c_p_hbox = wx.BoxSizer( wx.HORIZONTAL )
-
- c_p_hbox.AddF( self._import_pause_button, FLAGS_EXPAND_BOTH_WAYS )
- c_p_hbox.AddF( self._import_cancel_button, FLAGS_EXPAND_BOTH_WAYS )
-
- self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._download_progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( c_p_hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
-
- queue_buttons_vbox = wx.BoxSizer( wx.VERTICAL )
-
- queue_buttons_vbox.AddF( self._up, FLAGS_MIXED )
- queue_buttons_vbox.AddF( self._remove, FLAGS_MIXED )
- queue_buttons_vbox.AddF( self._down, FLAGS_MIXED )
-
- queue_hbox = wx.BoxSizer( wx.HORIZONTAL )
-
- queue_hbox.AddF( self._feed_queue, FLAGS_EXPAND_BOTH_WAYS )
- queue_hbox.AddF( queue_buttons_vbox, FLAGS_MIXED )
-
- self._feed_queue_panel.AddF( self._feed_queue_info, FLAGS_EXPAND_PERPENDICULAR )
- self._feed_queue_panel.AddF( queue_hbox, FLAGS_EXPAND_SIZER_BOTH_WAYS )
- self._feed_queue_panel.AddF( self._new_queue_input, FLAGS_EXPAND_PERPENDICULAR )
-
- vbox = wx.BoxSizer( wx.VERTICAL )
-
- self._MakeSort( vbox )
-
- vbox.AddF( self._processing_panel, FLAGS_EXPAND_PERPENDICULAR )
- vbox.AddF( self._feed_queue_panel, FLAGS_EXPAND_BOTH_WAYS )
- vbox.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR )
-
- self._MakeCurrentSelectionTagsBox( vbox )
-
- self.SetSizer( vbox )
-
-
- def _THREADGetImportArgs( self, queue_object ):
-
- url = queue_object
-
- ( status, hash ) = HC.app.Read( 'url_status', url )
-
- if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new'
-
- if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
- elif status == 'redundant':
-
- ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
-
- HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
- HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
-
- else:
-
- HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) ) )
-
- parse_result = urlparse.urlparse( url )
-
- ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
-
- if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.get_connection( scheme = scheme, host = host, port = port )
-
- connection = self._connections[ ( scheme, host, port ) ]
-
- def hook( range, value ): wx.CallAfter( self.SetDownloadProgress, range, value )
-
- connection.AddReportHook( hook )
-
- temp_path = connection.geturl( url, response_to_path = True )
-
- connection.ClearReportHooks()
-
- advanced_import_options = self._advanced_import_options.GetInfo()
-
- service_identifiers_to_tags = {}
-
- wx.CallAfter( self.CALLBACKImportArgs, temp_path, advanced_import_options, service_identifiers_to_tags, url = url )
-
-
-
- def _THREADDownloadImportItems( self, url ):
-
- try:
-
- HC.pubsub.pub( 'set_feed_queue_info', self._page_key, 'parsing url' )
-
- try:
-
- parse_result = urlparse.urlparse( url )
-
- ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
-
- except: raise Exception( 'Could not parse that URL' )
-
- HC.pubsub.pub( 'set_feed_queue_info', self._page_key, 'Connecting to address' )
-
- try: connection = HC.get_connection( scheme = scheme, host = host, port = port )
- except: raise Exception( 'Could not connect to server' )
-
- try: html = connection.geturl( url )
- except: raise Exception( 'Could not download that url' )
-
- HC.pubsub.pub( 'set_feed_queue_info', self._page_key, 'parsing html' )
-
- try: urls = ClientParsers.ParsePage( html, url )
- except: raise Exception( 'Could not parse that URL\'s html' )
-
- wx.CallAfter( self.CALLBACKAddToImportQueue, urls )
-
- except Exception as e: HC.pubsub.pub( 'set_feed_queue_info', self._page_key, HC.u( e ) )
-
- self._feed_queue_job_key.Finish()
+ self._import_gauge.Hide()
+ self._import_cancel_button.Hide()
class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
- def __init__( self, parent, page, page_key, starting_from_session = False ):
+ def _InitExtraVboxElements( self, vbox ):
- ManagementPanelImport.__init__( self, parent, page, page_key, starting_from_session = starting_from_session )
+ ManagementPanelImport._InitExtraVboxElements( self, vbox )
- self._download_progress_gauge = ClientGUICommon.Gauge( self._processing_panel )
+ self._import_cancel_button.Hide()
- self._connections = {}
-
- vbox = wx.BoxSizer( wx.VERTICAL )
-
- self._MakeSort( vbox )
-
- self._processing_panel.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._download_progress_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR )
- self._processing_panel.AddF( self._import_pause_button, FLAGS_EXPAND_PERPENDICULAR )
+ #
self._thread_panel = ClientGUICommon.StaticBox( self, 'thread checker' )
- self._thread_info = wx.StaticText( self._thread_panel, label = '' )
+ self._thread_info = wx.StaticText( self._thread_panel, label = 'enter a 4chan thread url' )
self._thread_time = wx.SpinCtrl( self._thread_panel, min = 30, max = 1800 )
self._thread_time.SetValue( 180 )
+ self._thread_time.Bind( wx.EVT_SPINCTRL, self.EventThreadTime )
self._thread_input = wx.TextCtrl( self._thread_panel, style = wx.TE_PROCESS_ENTER )
self._thread_input.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown )
self._thread_pause_button = wx.Button( self._thread_panel, label = 'pause' )
- self._thread_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseChecker )
- self._thread_pause_button.SetForegroundColour( ( 128, 0, 0 ) )
- self._thread_pause_button.Disable()
+ self._thread_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseBuildImportQueue )
hbox = wx.BoxSizer( wx.HORIZONTAL )
@@ -2238,151 +1693,55 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
self._thread_panel.AddF( hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._thread_panel.AddF( self._thread_pause_button, FLAGS_EXPAND_PERPENDICULAR )
+ vbox.AddF( self._thread_panel, FLAGS_EXPAND_SIZER_PERPENDICULAR )
+
+ #
+
self._advanced_tag_options = ClientGUICommon.AdvancedTagOptions( self, 'send to ', [ 'filename' ] )
- self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self )
-
- vbox.AddF( self._processing_panel, FLAGS_EXPAND_PERPENDICULAR )
- vbox.AddF( self._thread_panel, FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR )
- vbox.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR )
-
- self._MakeCurrentSelectionTagsBox( vbox )
-
- self.SetSizer( vbox )
-
- self._last_thread_check = None
- self._4chan_board = None
- self._thread_id = None
- self._currently_checking_thread = False
- self._currently_paused = False
- self._image_infos_already_added = set()
-
- self._feed_queue_timer = wx.Timer( self, id = ID_TIMER_PROCESS_FEED_QUEUE )
-
- self.Bind( wx.EVT_TIMER, self.TIMEREventProcessFeedQueue, id = ID_TIMER_PROCESS_FEED_QUEUE )
-
- self._feed_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT )
-
- HC.pubsub.sub( self, 'SetThreadInfo', 'set_thread_info' )
- def _THREADFetchThread( self ):
+ def _SetThreadTime( self ):
- HC.pubsub.pub( 'set_thread_info', self._page_key, 'checking thread' )
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
- url = 'http://api.4chan.org/' + self._4chan_board + '/res/' + self._thread_id + '.json'
+ thread_time = self._thread_time.GetValue()
- try:
-
- connection = HC.get_connection( url = url )
-
- raw_json = connection.geturl( url )
-
- json_dict = json.loads( raw_json )
-
- posts_list = json_dict[ 'posts' ]
-
- image_infos = [ ( post[ 'md5' ].decode( 'base64' ), HC.u( post[ 'tim' ] ), post[ 'ext' ], post[ 'filename' ] ) for post in posts_list if 'md5' in post ]
-
- image_infos_i_can_add = [ image_info for image_info in image_infos if image_info not in self._image_infos_already_added ]
-
- self._image_infos_already_added.update( image_infos_i_can_add )
-
- if len( image_infos_i_can_add ) > 0: wx.CallAfter( self.CALLBACKAddToImportQueue, image_infos_i_can_add )
-
- except HydrusExceptions.NotFoundException:
-
- HC.pubsub.pub( 'set_thread_info', self._page_key, 'Thread 404' )
-
- wx.CallAfter( self._thread_pause_button.Disable )
-
- return
-
- except Exception as e:
-
- HC.pubsub.pub( 'set_thread_info', self._page_key, HC.u( e ) )
-
- wx.CallAfter( self._thread_pause_button.Disable )
-
- return
-
-
- self._last_thread_check = HC.GetNow()
-
- self._currently_checking_thread = False
+ import_queue_job_key.SetVariable( 'thread_time', thread_time )
- def _THREADGetImportArgs( self, queue_object ):
+ def _UpdateGUI( self ):
- ( md5, image_name, ext, filename ) = queue_object
+ ManagementPanelImport._UpdateGUI( self )
- ( status, hash ) = HC.app.Read( 'md5_status', md5 )
+ import_job_key = self._import_controller.GetJobKey( 'import' )
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
- if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new'
+ # thread_info
- if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
- elif status == 'redundant':
+ status = import_queue_job_key.GetVariable( 'status' )
+
+ if status != self._thread_info.GetLabel(): self._thread_info.SetLabel( status )
+
+ # button
+
+ if import_queue_job_key.IsWorking():
- ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
+ self._thread_pause_button.Enable()
- HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
- HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
-
- else:
-
- url = 'http://images.4chan.org/' + self._4chan_board + '/src/' + image_name + ext
-
- ( status, hash ) = HC.app.Read( 'url_status', url )
-
- if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new'
-
- if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
- elif status == 'redundant':
+ if import_queue_job_key.IsPaused():
- ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
-
- HC.pubsub.pub( 'add_media_results', self._page_key, ( media_result, ) )
- HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
+ self._thread_pause_button.SetLabel( 'resume' )
+ self._thread_pause_button.SetForegroundColour( ( 0, 128, 0 ) )
else:
- HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) ) )
-
- parse_result = urlparse.urlparse( url )
-
- ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
-
- if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.get_connection( scheme = scheme, host = host, port = port )
-
- connection = self._connections[ ( scheme, host, port ) ]
-
- def hook( range, value ): wx.CallAfter( self.SetDownloadProgress, range, value )
-
- connection.AddReportHook( hook )
-
- temp_path = connection.geturl( url, response_to_path = True )
-
- connection.ClearReportHooks()
-
- advanced_import_options = self._advanced_import_options.GetInfo()
-
- advanced_tag_options = self._advanced_tag_options.GetInfo()
-
- tags = [ 'filename:' + filename + ext ]
-
- service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
-
- wx.CallAfter( self.CALLBACKImportArgs, temp_path, advanced_import_options, service_identifiers_to_tags, url = url )
+ self._thread_pause_button.SetLabel( 'pause' )
+ self._thread_pause_button.SetForegroundColour( ( 0, 0, 0 ) )
-
-
- def _GetPreprocessStatus( self ):
-
- status = 'checking url/hash status ' + HC.u( self._import_queue_position + 1 ) + '/' + HC.u( len( self._import_queue ) )
-
- return status
+ else: self._thread_pause_button.Disable()
def EventKeyDown( self, event ):
@@ -2419,79 +1778,33 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
return
- self._4chan_board = board
- self._thread_id = thread_id
-
- self._last_thread_check = 0
-
self._thread_input.Disable()
- self._thread_pause_button.Enable()
+
+ self._SetThreadTime()
+
+ self._import_controller.PendImportQueue( ( board, thread_id ) )
else: event.Skip()
- def EventPauseChecker( self, event ):
+ def EventPauseBuildImportQueue( self, event ):
- if self._currently_paused:
-
- self._currently_paused = False
-
- self._thread_pause_button.SetLabel( 'pause' )
- self._thread_pause_button.SetForegroundColour( ( 0, 0, 0 ) )
-
- else:
-
- self._currently_paused = True
-
- self._thread_pause_button.SetLabel( 'resume' )
- self._thread_pause_button.SetForegroundColour( ( 0, 128, 0 ) )
-
+ import_queue_job_key = self._import_controller.GetJobKey( 'import_queue' )
+
+ import_queue_job_key.PauseResume()
+
+ self._UpdateGUI()
- def SetDownloadProgress( self, range, value ):
-
- if range is None: self._download_progress_gauge.Pulse()
- else:
-
- self._download_progress_gauge.SetRange( range )
- self._download_progress_gauge.SetValue( value )
-
-
+ def EventThreadTime( self, event ): self._SetThreadTime()
+
+ def GetAdvancedTagOptions( self ): return self._advanced_tag_options.GetInfo()
def SetSearchFocus( self, page_key ):
if page_key == self._page_key: self._thread_input.SetFocus()
- def SetThreadInfo( self, page_key, info ):
-
- if self._page_key == page_key: self._thread_info.SetLabel( info )
-
-
- def TIMEREventProcessFeedQueue( self, event ):
-
- if self._4chan_board is None: self._thread_info.SetLabel( 'enter a 4chan thread url' )
- elif self._currently_paused: self._thread_info.SetLabel( 'paused' )
- elif not self._currently_checking_thread:
-
- thread_time = self._thread_time.GetValue()
-
- if thread_time < 30: thread_time = 30
-
- next_thread_check = self._last_thread_check + thread_time
-
- if next_thread_check < HC.GetNow():
-
- self._currently_checking_thread = True
-
- threading.Thread( target = self._THREADFetchThread, name = 'Fetch Thread' ).start()
-
- else: self._thread_info.SetLabel( 'rechecking thread ' + HC.ConvertTimestampToPrettyPending( next_thread_check ) )
-
-
- self._feed_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT )
-
-
class ManagementPanelPetitions( ManagementPanel ):
def __init__( self, parent, page, page_key, file_service_identifier, petition_service_identifier, starting_from_session = False ):
@@ -2828,6 +2141,13 @@ class ManagementPanelQuery( ManagementPanel ):
+ def CleanBeforeDestroy( self ):
+
+ ManagementPanel.CleanBeforeDestroy( self )
+
+ self._query_key.Cancel()
+
+
def GetPredicates( self ):
if hasattr( self, '_current_predicates_box' ): return self._current_predicates_box.GetPredicates()
@@ -3080,7 +2400,7 @@ class ManagementPanelMessages( wx.ScrolledWindow ):
except: wx.MessageBox( traceback.format_exc() )
- def TryToClose( self ):
+ def TestAbleToClose( self ):
pass
diff --git a/include/ClientGUIMedia.py b/include/ClientGUIMedia.py
index e2e5d587..fabae628 100755
--- a/include/ClientGUIMedia.py
+++ b/include/ClientGUIMedia.py
@@ -863,10 +863,12 @@ class MediaPanelLoading( MediaPanel ):
class MediaPanelThumbnails( MediaPanel ):
- def __init__( self, parent, page_key, file_service_identifier, media_results ):
+ def __init__( self, parent, page_key, file_service_identifier, media_results, refreshable = True ):
MediaPanel.__init__( self, parent, page_key, file_service_identifier, media_results )
+ self._refreshable = refreshable
+
self._num_columns = 1
self._num_rows_in_client_height = 0
self._drawn_index_bounds = None
@@ -1516,27 +1518,44 @@ class MediaPanelThumbnails( MediaPanel ):
thumbnail = self._GetThumbnailUnderMouse( event )
+ if thumbnail is not None: self._HitMedia( thumbnail, event.CmdDown(), event.ShiftDown() )
+
+ all_service_identifiers = [ media.GetFileServiceIdentifiersCDPP() for media in self._selected_media ]
+
+ selection_has_local = True in ( s_is.HasLocal() for s_is in all_service_identifiers )
+ selection_has_inbox = True in ( media.HasInbox() for media in self._selected_media )
+ selection_has_archive = True in ( media.HasArchive() for media in self._selected_media )
+
menu = wx.Menu()
if thumbnail is None:
- menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
+ if self._refreshable:
+
+ menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
+
- menu.AppendSeparator()
-
- select_menu = wx.Menu()
-
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
-
- menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
+ if len( self._sorted_media ) > 0:
+
+ if menu.GetMenuItemCount() > 0: menu.AppendSeparator()
+
+ select_menu = wx.Menu()
+
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
+
+ if selection_has_archive and selection_has_inbox:
+
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
+
+
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
+
+ menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
+
else:
- self._HitMedia( thumbnail, event.CmdDown(), event.ShiftDown() )
-
if self._focussed_media is not None:
# variables
@@ -1562,12 +1581,6 @@ class MediaPanelThumbnails( MediaPanel ):
user_manageable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.MANAGE_USERS ) }
admin_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.GENERAL_ADMIN ) }
- all_service_identifiers = [ media.GetFileServiceIdentifiersCDPP() for media in self._selected_media ]
-
- selection_has_local = True in ( s_is.HasLocal() for s_is in all_service_identifiers )
- selection_has_inbox = True in ( media.HasInbox() for media in self._selected_media )
- selection_has_archive = True in ( media.HasArchive() for media in self._selected_media )
-
if multiple_selected:
uploaded_phrase = 'all uploaded to'
@@ -1826,20 +1839,31 @@ class MediaPanelThumbnails( MediaPanel ):
#
- menu.AppendSeparator()
+ if self._refreshable:
+
+ menu.AppendSeparator()
+
+ menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
+
- menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), 'refresh' )
-
- menu.AppendSeparator()
-
- select_menu = wx.Menu()
-
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
- select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
-
- menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
+ if len( self._sorted_media ) > 0:
+
+ menu.AppendSeparator()
+
+ select_menu = wx.Menu()
+
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'all' ), 'all' )
+
+ if selection_has_archive and selection_has_inbox:
+
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'inbox' ), 'inbox' )
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
+
+
+ select_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
+
+ menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
+
menu.AppendSeparator()
@@ -1854,7 +1878,7 @@ class MediaPanelThumbnails( MediaPanel ):
- self.PopupMenu( menu )
+ if menu.GetMenuItemCount() > 0: self.PopupMenu( menu )
menu.Destroy()
diff --git a/include/ClientGUIMessages.py b/include/ClientGUIMessages.py
index e3c21cac..9a3a2e2c 100755
--- a/include/ClientGUIMessages.py
+++ b/include/ClientGUIMessages.py
@@ -433,7 +433,7 @@ class ConversationPanel( wx.Panel ):
self._drafts_vbox.Detach( draft_panel )
- draft_panel.Destroy()
+ draft_panel.Close()
self._scrolling_messages_window.FitInside()
@@ -522,7 +522,7 @@ class ConversationSplitter( wx.SplitterWindow ):
self.ReplaceWindow( self._conversation_panel, new_panel )
- self._conversation_panel.Destroy()
+ self._conversation_panel.Close()
self._conversation_panel = new_panel
@@ -694,7 +694,7 @@ class DestinationPanel( wx.Panel ):
self._hbox.Replace( self._status_panel, new_status_panel )
- self._status_panel.Destroy()
+ self._status_panel.Close()
self._status_panel = new_status_panel
@@ -1285,7 +1285,7 @@ class DraftPanel( wx.Panel ):
message = 'The hydrus client could not connect to your message depot, so the message could not be sent!'
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
return
@@ -1413,7 +1413,7 @@ class MessagePanel( wx.Panel ):
self._hbox.Replace( self._body_panel, body_panel )
- self._body_panel.Destroy()
+ self._body_panel.Close()
self._body_panel = body_panel
diff --git a/include/ClientGUIPages.py b/include/ClientGUIPages.py
index 6aca7b07..966134e1 100755
--- a/include/ClientGUIPages.py
+++ b/include/ClientGUIPages.py
@@ -6,6 +6,7 @@ import ClientGUIManagement
import ClientGUIMedia
import ClientGUIMessages
import ClientGUICanvas
+import HydrusDownloading
import inspect
import os
import sys
@@ -39,11 +40,21 @@ class PageBase():
self._starting_from_session = starting_from_session
self._page_key = os.urandom( 32 )
+ self._InitControllers()
+
self._pretty_status = ''
HC.pubsub.sub( self, 'SetPrettyStatus', 'new_page_status' )
+ def _InitControllers( self ): pass
+
+ def _PauseControllers( self ): pass
+
+ def _ResumeControllers( self ): pass
+
+ def CleanBeforeDestroy( self ): pass
+
def GetPrettyStatus( self ): return self._pretty_status
def GetSashPositions( self ):
@@ -61,6 +72,8 @@ class PageBase():
def Pause( self ):
+ self._PauseControllers()
+
HC.pubsub.pub( 'pause', self._page_key )
HC.pubsub.pub( 'set_focus', self._page_key, None )
@@ -86,9 +99,14 @@ class PageBase():
def ShowHideSplit( self ): pass
- def TryToClose( self ): pass
+ def TestAbleToClose( self ): pass
- def Resume( self ): HC.pubsub.pub( 'resume', self._page_key )
+ def Resume( self ):
+
+ self._ResumeControllers()
+
+ HC.pubsub.pub( 'resume', self._page_key )
+
class PageLog( PageBase, wx.Panel ):
@@ -204,7 +222,7 @@ class PageMessages( PageBase, wx.SplitterWindow ):
else: self._search_preview_split.SplitHorizontally( self._management_panel, self._preview_panel, HC.options[ 'vpos' ] )
- def TryToClose( self ): self._management_panel.TryToClose()
+ def TestAbleToClose( self ): self._management_panel.TestAbleToClose()
class PageWithMedia( PageBase, wx.SplitterWindow ):
@@ -243,6 +261,8 @@ class PageWithMedia( PageBase, wx.SplitterWindow ):
HC.pubsub.sub( self, 'SwapMediaPanel', 'swap_media_panel' )
+ def CleanBeforeDestroy( self ): self._management_panel.CleanBeforeDestroy()
+
def EventPreviewUnsplit( self, event ): self._search_preview_split.Unsplit( self._preview_panel )
def EventUnsplit( self, event ): self.Unsplit( self._search_preview_split )
@@ -285,18 +305,71 @@ class PageWithMedia( PageBase, wx.SplitterWindow ):
self.ReplaceWindow( self._media_panel, new_panel )
- self._media_panel.Destroy()
+ self._media_panel.Close()
self._media_panel = new_panel
- def TryToClose( self ): self._management_panel.TryToClose()
+ def TestAbleToClose( self ): self._management_panel.TestAbleToClose()
class PageImport( PageWithMedia ):
+ def _GenerateImportArgsGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ advanced_import_options = self._management_panel.GetAdvancedImportOptions()
+
+ return HydrusDownloading.ImportArgsGenerator( job_key, item, advanced_import_options )
+
+
+ return factory
+
+
+ def _GenerateImportQueueGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ return HydrusDownloading.ImportQueueGenerator( job_key, item )
+
+
+ return factory
+
+
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_media_results )
+ def _InitControllers( self ):
+
+ import_args_generator_factory = self._GenerateImportArgsGeneratorFactory()
+ import_queue_generator_factory = self._GenerateImportQueueGeneratorFactory()
+
+ self._import_controller = HydrusDownloading.ImportController( import_args_generator_factory, import_queue_generator_factory, page_key = self._page_key )
+
+ self._import_controller.StartThread()
+
+
+ def _PauseControllers( self ):
+
+ controller_job_key = self._import_controller.GetJobKey( 'controller' )
+
+ controller_job_key.Pause()
+
+
+ def _ResumeControllers( self ):
+
+ controller_job_key = self._import_controller.GetJobKey( 'controller' )
+
+ controller_job_key.Resume()
+
+
+ def CleanBeforeDestroy( self ):
+
+ PageWithMedia.CleanBeforeDestroy( self )
+
+ self._import_controller.CleanBeforeDestroy()
+
+
def GetSessionArgs( self ):
hashes = [ media.GetHash() for media in self._media_panel.GetFlatMedia() ]
@@ -307,55 +380,201 @@ class PageImport( PageWithMedia ):
return ( args, kwargs )
-class PageImportBooru( PageImport ):
-
- def __init__( self, parent, booru, initial_hashes = [], starting_from_session = False ):
-
- self._booru = booru
-
- PageImport.__init__( self, parent, initial_hashes = initial_hashes, starting_from_session = starting_from_session )
-
-
- def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedBooru( self._search_preview_split, self, self._page_key, self._booru, starting_from_session = self._starting_from_session )
-
- def GetSessionArgs( self ):
-
- hashes = [ media.GetHash() for media in self._media_panel.GetFlatMedia() ]
-
- args = ( self._booru, )
- kwargs = { 'initial_hashes' : hashes }
-
- return ( args, kwargs )
-
-
class PageImportGallery( PageImport ):
- def __init__( self, parent, name, initial_hashes = [], starting_from_session = False ):
+ def __init__( self, parent, gallery_name, gallery_type, initial_hashes = [], starting_from_session = False ):
- self._name = name
+ self._gallery_name = gallery_name
+ self._gallery_type = gallery_type
PageImport.__init__( self, parent, initial_hashes = initial_hashes, starting_from_session = starting_from_session )
+ def _GenerateImportArgsGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ advanced_import_options = self._management_panel.GetAdvancedImportOptions()
+ advanced_tag_options = self._management_panel.GetAdvancedTagOptions()
+
+ downloaders_factory = self._GetDownloadersFactory()
+
+ return HydrusDownloading.ImportArgsGeneratorGallery( job_key, item, advanced_import_options, advanced_tag_options, downloaders_factory )
+
+
+ return factory
+
+
+ def _GenerateImportQueueGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ downloaders_factory = self._GetDownloadersFactory()
+
+ return HydrusDownloading.ImportQueueGeneratorGallery( job_key, item, downloaders_factory )
+
+
+ return factory
+
+
+ def _GetDownloadersFactory( self ):
+
+ if self._gallery_name == 'booru':
+
+ def downloaders_factory( raw_tags ):
+
+ booru = self._gallery_type
+ tags = raw_tags.split( ' ' )
+
+ return ( HydrusDownloading.DownloaderBooru( booru, tags ), )
+
+
+ elif self._gallery_name == 'deviant art':
+
+ if self._gallery_type == 'artist':
+
+ def downloaders_factory( artist ):
+
+ return ( HydrusDownloading.DownloaderDeviantArt( artist ), )
+
+
+
+ elif self._gallery_name == 'giphy':
+
+ def downloaders_factory( tag ):
+
+ return ( HydrusDownloading.DownloaderGiphy( tag ), )
+
+
+ elif self._gallery_name == 'hentai foundry':
+
+ if self._gallery_type == 'artist':
+
+ def downloaders_factory( artist ):
+
+ advanced_hentai_foundry_options = self._management_panel.GetAdvancedHentaiFoundryOptions()
+
+ pictures_downloader = HydrusDownloading.DownloaderHentaiFoundry( 'artist pictures', artist, advanced_hentai_foundry_options )
+ scraps_downloader = HydrusDownloading.DownloaderHentaiFoundry( 'artist scraps', artist, advanced_hentai_foundry_options )
+
+ return ( pictures_downloader, scraps_downloader )
+
+
+ elif self._gallery_type == 'tags':
+
+ def downloaders_factory( raw_tags ):
+
+ advanced_hentai_foundry_options = self._management_panel.GetAdvancedHentaiFoundryOptions()
+
+ tags = raw_tags.split( ' ' )
+
+ return ( HydrusDownloading.DownloaderHentaiFoundry( 'tags', tags, advanced_hentai_foundry_options ), )
+
+
+
+ elif self._gallery_name == 'newgrounds':
+
+ def downloaders_factory( artist ):
+
+ return ( HydrusDownloading.DownloaderNewgrounds( artist ), )
+
+
+ elif self._gallery_name == 'pixiv':
+
+ if self._gallery_type == 'artist':
+
+ def downloaders_factory( artist ):
+
+ return ( HydrusDownloading.DownloaderPixiv( 'artist', artist ), )
+
+
+ elif self._gallery_type == 'tag':
+
+ def downloaders_factory( tag ):
+
+ return ( HydrusDownloading.DownloaderPixiv( 'tag', tag ), )
+
+
+
+ elif self._gallery_name == 'tumblr':
+
+ def downloaders_factory( username ):
+
+ return ( HydrusDownloading.DownloaderTumblr( username ), )
+
+
+
+ return downloaders_factory
+
+
def _InitManagementPanel( self ):
- if self._name == 'deviant art by artist': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedDeviantArt
- elif self._name == 'hentai foundry by artist': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedHentaiFoundryArtist
- elif self._name == 'hentai foundry by tags': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedHentaiFoundryTags
- elif self._name == 'giphy': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedGiphy
- elif self._name == 'newgrounds': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedNewgrounds
- elif self._name == 'pixiv by artist': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedPixivArtist
- elif self._name == 'pixiv by tag': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedPixivTag
- elif self._name == 'tumblr': c = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedTumblr
+ if self._gallery_name == 'hentai foundry':
+
+ name = 'hentai foundry'
+ namespaces = [ 'creator', 'title', '' ]
+
+ if self._gallery_type == 'artist': initial_search_value = 'artist username'
+ elif self._gallery_type == 'tags': initial_search_value = 'search tags'
+
+ self._management_panel = ClientGUIManagement.ManagementPanelImportsGalleryHentaiFoundry( self._search_preview_split, self, self._page_key, self._import_controller, name, namespaces, initial_search_value, starting_from_session = self._starting_from_session )
+
+ else:
+
+ if self._gallery_name == 'booru':
+
+ booru = self._gallery_type
+
+ name = booru.GetName()
+ namespaces = booru.GetNamespaces()
+ initial_search_value = 'search tags'
+
+ elif self._gallery_name == 'deviant art':
+
+ if self._gallery_type == 'artist':
+
+ name = 'deviant art'
+ namespaces = [ 'creator', 'title', '' ]
+ initial_search_value = 'artist username'
+
+
+ elif self._gallery_name == 'giphy':
+
+ name = 'giphy'
+ namespaces = [ '' ]
- self._management_panel = c( self._search_preview_split, self, self._page_key, starting_from_session = self._starting_from_session )
+ initial_search_value = 'search tag'
+
+ elif self._gallery_name == 'newgrounds':
+
+ name = 'newgrounds'
+ namespaces = [ 'creator', 'title', '' ]
+ initial_search_value = 'artist username'
+
+ elif self._gallery_name == 'pixiv':
+
+ name = 'pixiv'
+ namespaces = [ 'creator', 'title', '' ]
+
+ if self._gallery_type == 'artist': initial_search_value = 'artist username'
+ elif self._gallery_type == 'tag': initial_search_value = 'search tag'
+
+ elif self._gallery_name == 'tumblr':
+
+ name = 'tumblr'
+ namespaces = [ '' ]
+ initial_search_value = 'username'
+
+
+ self._management_panel = ClientGUIManagement.ManagementPanelImportsGallery( self._search_preview_split, self, self._page_key, self._import_controller, name, namespaces, initial_search_value, starting_from_session = self._starting_from_session )
+
def GetSessionArgs( self ):
hashes = [ media.GetHash() for media in self._media_panel.GetFlatMedia() ]
- args = ( self._name, )
+ args = ( self._gallery_name, self._gallery_type )
kwargs = { 'initial_hashes' : hashes }
return ( args, kwargs )
@@ -372,8 +591,20 @@ class PageImportHDD( PageImport ):
PageImport.__init__( self, parent, initial_hashes = initial_hashes, starting_from_session = starting_from_session )
+ self._import_controller.PendImportQueue( self._paths_info )
+
- def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportHDD( self._search_preview_split, self, self._page_key, self._paths_info, advanced_import_options = self._advanced_import_options, paths_to_tags = self._paths_to_tags, delete_after_success = self._delete_after_success, starting_from_session = self._starting_from_session )
+ def _GenerateImportArgsGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ return HydrusDownloading.ImportArgsGeneratorHDD( job_key, item, self._advanced_import_options, self._paths_to_tags, self._delete_after_success )
+
+
+ return factory
+
+
+ def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportHDD( self._search_preview_split, self, self._page_key, self._import_controller, starting_from_session = self._starting_from_session )
def GetSessionArgs( self ):
@@ -387,11 +618,60 @@ class PageImportHDD( PageImport ):
class PageImportThreadWatcher( PageImport ):
- def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportThreadWatcher( self._search_preview_split, self, self._page_key, starting_from_session = self._starting_from_session )
+ def _GenerateImportArgsGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ advanced_import_options = self._management_panel.GetAdvancedImportOptions()
+ advanced_tag_options = self._management_panel.GetAdvancedTagOptions()
+
+ # fourchan_board should be on the job_key or whatever. it is stuck on initial queue generation
+ # we should not be getting it from the management_panel
+ # we should have access to this info from the job_key or w/e
+
+ return HydrusDownloading.ImportArgsGeneratorThread( job_key, item, advanced_import_options, advanced_tag_options )
+
+
+ return factory
+
+
+ def _GenerateImportQueueGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ return HydrusDownloading.ImportQueueGeneratorThread( job_key, item )
+
+
+ return factory
+
+
+ def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportThreadWatcher( self._search_preview_split, self, self._page_key, self._import_controller, starting_from_session = self._starting_from_session )
class PageImportURL( PageImport ):
- def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueURL( self._search_preview_split, self, self._page_key, starting_from_session = self._starting_from_session )
+ def _GenerateImportArgsGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ advanced_import_options = self._management_panel.GetAdvancedImportOptions()
+
+ return HydrusDownloading.ImportArgsGeneratorURLs( job_key, item, advanced_import_options )
+
+
+ return factory
+
+
+ def _GenerateImportQueueGeneratorFactory( self ):
+
+ def factory( job_key, item ):
+
+ return HydrusDownloading.ImportQueueGeneratorURLs( job_key, item )
+
+
+ return factory
+
+
+ def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportsURL( self._search_preview_split, self, self._page_key, self._import_controller, starting_from_session = self._starting_from_session )
class PagePetitions( PageWithMedia ):
@@ -430,7 +710,12 @@ class PageQuery( PageWithMedia ):
def _InitMediaPanel( self ):
if len( self._initial_media_results ) == 0: self._media_panel = ClientGUIMedia.MediaPanelNoQuery( self, self._page_key, self._file_service_identifier )
- else: self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_media_results )
+ else:
+
+ refreshable = len( self._initial_predicates ) > 0 or len( self._initial_media_results ) == 0
+
+ self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, self._initial_media_results, refreshable = refreshable )
+
def GetSessionArgs( self ):
diff --git a/include/ClientParsers.py b/include/ClientParsers.py
index 04261072..0bfce1e5 100755
--- a/include/ClientParsers.py
+++ b/include/ClientParsers.py
@@ -16,7 +16,7 @@ def Parse4chanPostScreen( html ):
message = 'You are banned from this board! html written to log.'
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
return ( 'big error', message )
@@ -33,7 +33,7 @@ def Parse4chanPostScreen( html ):
message = 'Unknown problem; html written to log.'
- HC.pubsub.pub( 'message', HC.Message( HC.MESSAGE_TYPE_TEXT, message ) )
+ HC.ShowText( message )
return ( 'error', message )
diff --git a/include/HydrusConstants.py b/include/HydrusConstants.py
index a11d8e6a..00bce3e4 100755
--- a/include/HydrusConstants.py
+++ b/include/HydrusConstants.py
@@ -45,7 +45,7 @@ TEMP_DIR = BASE_DIR + os.path.sep + 'temp'
# Misc
NETWORK_VERSION = 13
-SOFTWARE_VERSION = 99
+SOFTWARE_VERSION = 100
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@@ -1037,23 +1037,6 @@ def ConvertZoomToPercentage( zoom ):
return pretty_zoom
-def ShowExceptionDefault( e ):
-
- etype = type( e )
-
- value = u( e )
-
- trace_list = traceback.format_stack()
-
- trace = ''.join( trace_list )
-
- message = u( etype.__name__ ) + ': ' + u( value ) + os.linesep + u( trace )
-
- try: print( message )
- except: print( repr( message ) )
-
-ShowException = ShowExceptionDefault
-
def GetEmptyDataDict():
data = collections.defaultdict( default_dict_list )
@@ -1114,20 +1097,6 @@ def MergeKeyToListDicts( key_to_list_dicts ):
return result
-
-def u( text_producing_object ):
-
- if type( text_producing_object ) in ( str, unicode, bs4.element.NavigableString ): text = text_producing_object
- else:
- try: text = str( text_producing_object ) # dealing with exceptions, etc...
- except: text = repr( text_producing_object )
-
- try: return unicode( text )
- except:
-
- try: return text.decode( locale.getpreferredencoding() )
- except: return str( text )
-
def SearchEntryMatchesPredicate( search_entry, predicate ):
@@ -1168,6 +1137,29 @@ def SearchEntryMatchesTag( search_entry, tag, search_siblings = True ):
return False
+def ShowExceptionDefault( e ):
+
+ etype = type( e )
+
+ value = u( e )
+
+ trace_list = traceback.format_stack()
+
+ trace = ''.join( trace_list )
+
+ message = u( etype.__name__ ) + ': ' + u( value ) + os.linesep + u( trace )
+
+ try: print( message )
+ except: print( repr( message ) )
+
+ShowException = ShowExceptionDefault
+
+def ShowTextDefault( text ):
+
+ print( text )
+
+ShowText = ShowTextDefault
+
def SplayListForDB( xs ): return '(' + ','.join( [ '"' + u( x ) + '"' for x in xs ] ) + ')'
def SplayTupleListForDB( first_column_name, second_column_name, xys ): return ' OR '.join( [ '( ' + first_column_name + '=' + u( x ) + ' AND ' + second_column_name + ' IN ' + SplayListForDB( ys ) + ' )' for ( x, ys ) in xys ] )
@@ -1190,7 +1182,21 @@ def ThumbnailResolution( original_resolution, target_resolution ):
return ( int( round( original_width ) ), int( round( original_height ) ) )
-
+
+def u( text_producing_object ):
+
+ if type( text_producing_object ) in ( str, unicode, bs4.element.NavigableString ): text = text_producing_object
+ else:
+ try: text = str( text_producing_object ) # dealing with exceptions, etc...
+ except: text = repr( text_producing_object )
+
+ try: return unicode( text )
+ except:
+
+ try: return text.decode( locale.getpreferredencoding() )
+ except: return str( text )
+
+
class AdvancedHTTPConnection():
def __init__( self, url = '', scheme = 'http', host = '', port = None, service_identifier = None, accept_cookies = False ):
@@ -2043,16 +2049,28 @@ class DAEMONWorker( DAEMON ):
def set( self, *args, **kwargs ): self._event.set()
-class Job():
+class JobDatabase():
- yaml_tag = u'!Job'
+ yaml_tag = u'!JobDatabase'
- def __init__( self ):
+ def __init__( self, action, type, synchronous, *args, **kwargs ):
+
+ self._action = action
+ self._type = type
+ self._synchronous = synchronous
+ self._args = args
+ self._kwargs = kwargs
self._result = None
self._result_ready = threading.Event()
+ def GetAction( self ): return self._action
+
+ def GetArgs( self ): return self._args
+
+ def GetKWArgs( self ): return self._kwargs
+
def GetResult( self ):
while True:
@@ -2069,13 +2087,19 @@ class Job():
trace_list = traceback.format_stack()
- my_trace = ''.join( trace_list )
+ my_trace = 'Stack Trace (most recent call last):' + os.linesep + os.linesep + os.linesep.join( trace_list )
- raise etype( my_trace + os.linesep + db_traceback )
+ full_message = os.linesep.join( ( 'GUI Thread:', my_trace, 'DB Thread:', db_traceback ) )
+
+ raise HydrusExceptions.DBException( full_message )
else: return self._result
+ def GetType( self ): return self._type
+
+ def IsSynchronous( self ): return self._synchronous
+
def PutResult( self, result ):
self._result = result
@@ -2083,31 +2107,6 @@ class Job():
self._result_ready.set()
-class JobInternal( Job ):
-
- yaml_tag = u'!JobInternal'
-
- def __init__( self, action, type, synchronous, *args, **kwargs ):
-
- Job.__init__( self )
-
- self._action = action
- self._type = type
- self._synchronous = synchronous
- self._args = args
- self._kwargs = kwargs
-
-
- def GetAction( self ): return self._action
-
- def GetArgs( self ): return self._args
-
- def GetKWArgs( self ): return self._kwargs
-
- def GetType( self ): return self._type
-
- def IsSynchronous( self ): return self._synchronous
-
class JobKey():
def __init__( self ):
@@ -2119,6 +2118,9 @@ class JobKey():
self._cancelled = threading.Event()
self._paused = threading.Event()
+ self._variable_lock = threading.Lock()
+ self._variables = dict()
+
def __eq__( self, other ): return self.__hash__() == other.__hash__()
@@ -2139,13 +2141,23 @@ class JobKey():
def GetKey( self ): return self._key
+ def GetVariable( self, name ):
+
+ with self._variable_lock: return self._variables[ name ]
+
+
+ def HasVariable( self, name ):
+
+ with self._variable_lock: return name in self._variables
+
+
def IsBegun( self ): return self._begun.is_set()
- def IsCancelled( self ): return self._cancelled.is_set()
+ def IsCancelled( self ): return shutdown or self._cancelled.is_set()
- def IsDone( self ): return self._done.is_set()
+ def IsDone( self ): return shutdown or self._done.is_set()
- def IsPaused( self ): return self.IsWorking() and self._paused.is_set()
+ def IsPaused( self ): return self._paused.is_set()
def IsWorking( self ): return self.IsBegun() and not self.IsDone()
@@ -2159,16 +2171,73 @@ class JobKey():
def Resume( self ): self._paused.clear()
+ def SetVariable( self, name, value ):
+
+ with self._variable_lock: self._variables[ name ] = value
+
+
def WaitOnPause( self ):
while self._paused.is_set():
time.sleep( 0.1 )
- if shutdown or self.IsCancelled() or self.IsDone(): return
+ if shutdown or self.IsDone(): return
+class JobNetwork():
+
+ yaml_tag = u'!JobNetwork'
+
+ def __init__( self, request_type, request, headers = {}, body = None, response_to_path = False, redirects_permitted = 4, service_identifier = None ):
+
+ self._request_type = request_type
+ self._request = request
+ self._headers = headers
+ self._body = body
+ self._response_to_path = response_to_path
+ self._redirects_permitted = redirects_permitted
+ self._service_identifier = service_identifier
+
+ self._result = None
+ self._result_ready = threading.Event()
+
+
+ def ToTuple( self ): return ( self._request_type, self._request, self._headers, self._body, self._response_to_path, self._redirects_permitted, self._service_identifier )
+
+ def GetResult( self ):
+
+ while True:
+
+ if self._result_ready.wait( 5 ) == True: break
+ elif shutdown: raise Exception( 'Application quit before network could serve result!' )
+
+
+ if issubclass( type( self._result ), Exception ):
+
+ etype = type( self._result )
+
+ network_traceback = unicode( self._result )
+
+ trace_list = traceback.format_stack()
+
+ my_trace = 'Stack Trace (most recent call last):' + os.linesep + os.linesep + os.linesep.join( trace_list )
+
+ full_message = os.linesep.join( ( 'Calling Thread:', my_trace, 'Network Thread:', network_traceback ) )
+
+ raise etype( full_message )
+
+ else: return self._result
+
+
+ def PutResult( self, result ):
+
+ self._result = result
+
+ self._result_ready.set()
+
+
class Message():
def __init__( self, message_type, info ):
diff --git a/include/HydrusDownloading.py b/include/HydrusDownloading.py
index e5aa27a7..0cfc2317 100644
--- a/include/HydrusDownloading.py
+++ b/include/HydrusDownloading.py
@@ -1,15 +1,20 @@
import bs4
+import ClientParsers
import collections
import httplib
import HydrusConstants as HC
+import HydrusExceptions
import json
import lxml
+import os
import pafy
import threading
+import time
import traceback
import urllib
import urlparse
import wx
+import zipfile
def ConvertServiceIdentifiersToTagsToServiceIdentifiersToContentUpdates( hash, service_identifiers_to_tags ):
@@ -1054,110 +1059,779 @@ class DownloaderTumblr( Downloader ):
def GetTags( self, url, tags ): return tags
-class DownloaderEngine(): # rename this to something more import related
+class ImportArgsGenerator():
- # this should be a yamlable thing
-
- def __init__( self, page_key, import_queue_generator ):
+ def __init__( self, job_key, item, advanced_import_options ):
- self._page_key = page_key
- self._import_queue_generator = import_queue_generator
-
- self._current_queue_processor = None
-
- self._pending_queue_jobs = []
+ self._job_key = job_key
+ self._item = item
+ self._advanced_import_options = advanced_import_options
- def GetCurrentQueueProcessor( self ): return self._current_queue_processor
-
- def ToTuple( self ): return ( self._pending_queue_jobs, )
-
- def PendQueueJob( self, job ):
+ def __call__( self ):
- self._pending_queue_jobs.append( job )
-
-
- def THREADProcessJobs( self ):
-
- while True:
+ try:
- if len( self._pending_queue_jobs ) > 0:
+ ( result, media_result ) = self._CheckCurrentStatus()
+
+ if result == 'new':
- job = self._pending_queue_jobs.pop( 0 )
+ ( temp_path, service_identifiers_to_tags, url ) = self._GetArgs()
- self._current_queue_processor = self._import_queue_generator( job )
+ self._job_key.SetVariable( 'status', 'importing' )
- self._current_queue_processor.ProcessQueue()
+ ( result, media_result ) = HC.app.WriteSynchronous( 'import_file', temp_path, advanced_import_options = self._advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, generate_media_result = True, url = url )
- else: time.sleep( 0.1 )
+
+ self._job_key.SetVariable( 'result', result )
+
+ if result in ( 'successful', 'redundant' ):
+
+ page_key = self._job_key.GetVariable( 'page_key' )
+
+ if media_result is not None and page_key is not None:
+
+ HC.pubsub.pub( 'add_media_results', page_key, ( media_result, ) )
+
+
+
+
+ self._job_key.SetVariable( 'status', '' )
+
+ self._job_key.Finish()
+
+ self._CleanUp() # e.g. possibly delete the file for hdd importargsgenerator
+
+ except Exception as e:
+
+ self._job_key.SetVariable( 'result', 'failed' )
+
+ HC.ShowException( e )
+
+ time.sleep( 2 )
+
+ self._job_key.Cancel()
-class ImportQueueProcessor():
+ def _CleanUp( self ): pass
- def __init__( self, page_key, import_args_generator ):
+ def _CheckCurrentStatus( self ): return ( 'new', None )
+
+class ImportArgsGeneratorGallery( ImportArgsGenerator ):
+
+ def __init__( self, job_key, item, advanced_import_options, advanced_tag_options, downloaders_factory ):
+ ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
+
+ self._advanced_tag_options = advanced_tag_options
+ self._downloaders_factory = downloaders_factory
+
+
+ def _GetArgs( self ):
+
+ url_args = self._item
+
+ url = url_args[0]
+
+ self._job_key.SetVariable( 'status', 'downloading' )
+
+ downloader = self._downloaders_factory( 'example' )[0]
+
+ def hook( range, value ):
+
+ self._job_key.SetVariable( 'range', range )
+ self._job_key.SetVariable( 'value', value )
+
+
+ downloader.AddReportHook( hook )
+
+ do_tags = len( self._advanced_tag_options ) > 0
+
+ if do_tags: ( temp_path, tags ) = downloader.GetFileAndTags( *url_args )
+ else:
+
+ temp_path = downloader.GetFile( *url_args )
+
+ tags = []
+
+
+ downloader.ClearReportHooks()
+
+ service_identifiers_to_tags = ConvertTagsToServiceIdentifiersToTags( tags, self._advanced_tag_options )
+
+ return ( temp_path, service_identifiers_to_tags, url )
+
+
+ def _CheckCurrentStatus( self ):
+
+ url_args = self._item
+
+ url = url_args[0]
+
+ self._job_key.SetVariable( 'status', 'checking url status' )
+
+ downloader = self._downloaders_factory( 'example' )[0]
+
+ ( status, hash ) = HC.app.Read( 'url_status', url )
+
+ if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options: status = 'new'
+
+ if status == 'redundant':
+
+ ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
+
+ do_tags = len( self._advanced_tag_options ) > 0
+
+ if do_tags:
+
+ tags = downloader.GetTags( *url_args )
+
+ service_identifiers_to_tags = ConvertTagsToServiceIdentifiersToTags( tags, self._advanced_tag_options )
+
+ service_identifiers_to_content_updates = ConvertServiceIdentifiersToTagsToServiceIdentifiersToContentUpdates( hash, service_identifiers_to_tags )
+
+ HC.app.Write( 'content_updates', service_identifiers_to_content_updates )
+
+
+ return ( status, media_result )
+
+ else: return ( status, None )
+
+
+class ImportArgsGeneratorHDD( ImportArgsGenerator ):
+
+ def __init__( self, job_key, item, advanced_import_options, paths_to_tags, delete_after_success ):
+
+ ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
+
+ self._paths_to_tags = paths_to_tags
+ self._delete_after_success = delete_after_success
+
+
+ def _CleanUp( self ):
+
+ result = self._job_key.GetVariable( 'result' )
+
+ if self._delete_after_success and result in ( 'successful', 'redundant' ):
+
+ ( path_type, path_info ) = self._item
+
+ if path_type == 'path':
+
+ path = path_info
+
+ try: os.remove( path )
+ except: pass
+
+
+
+
+ def _GetArgs( self ):
+
+ self._job_key.SetVariable( 'status', 'reading from hdd' )
+
+ ( path_type, path_info ) = self._item
+
+ service_identifiers_to_tags = {}
+
+ if path_type == 'path':
+
+ path = path_info
+
+ if path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ path ]
+
+ elif path_type == 'zip':
+
+ ( zip_path, name ) = path_info
+
+ path = HC.GetTempPath()
+
+ with open( path, 'wb' ) as f:
+
+ with zipfile.ZipFile( zip_path, 'r' ) as z: f.write( z.read( name ) )
+
+
+ pretty_path = zip_path + os.path.sep + name
+
+ if pretty_path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ pretty_path ]
+
+
+ return ( path, service_identifiers_to_tags, None )
+
+
+class ImportArgsGeneratorThread( ImportArgsGenerator ):
+
+ def __init__( self, job_key, item, advanced_import_options, advanced_tag_options ):
+
+ ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
+
+ self._advanced_tag_options = advanced_tag_options
+
+
+ def _GetArgs( self ):
+
+ self._job_key.SetVariable( 'status', 'downloading' )
+
+ ( md5, board, image_name, ext, filename ) = self._item
+
+ # where do I get 4chan_board from? is it set to the controller_job_key?
+ # that'd prob be the best place, but think about it
+
+ url = 'http://images.4chan.org/' + board + '/src/' + image_name + ext
+
+ parse_result = urlparse.urlparse( url )
+
+ ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
+
+ connection = HC.get_connection( scheme = scheme, host = host, port = port )
+
+ def hook( range, value ):
+
+ self._job_key.SetVariable( 'range', range )
+ self._job_key.SetVariable( 'value', value )
+
+
+ connection.AddReportHook( hook )
+
+ temp_path = connection.geturl( url, response_to_path = True )
+
+ connection.ClearReportHooks()
+
+ tags = [ 'filename:' + filename + ext ]
+
+ service_identifiers_to_tags = ConvertTagsToServiceIdentifiersToTags( tags, self._advanced_tag_options )
+
+ return ( temp_path, service_identifiers_to_tags, url )
+
+
+ def _CheckCurrentStatus( self ):
+
+ self._job_key.SetVariable( 'status', 'checking md5 status' )
+
+ ( md5, board, image_name, ext, filename ) = self._item
+
+ ( status, hash ) = HC.app.Read( 'md5_status', md5 )
+
+ if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options: status = 'new'
+
+ if status == 'redundant':
+
+ ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
+
+ return ( status, media_result )
+
+ else: return ( status, None )
+
+
+class ImportArgsGeneratorURLs( ImportArgsGenerator ):
+
+ def _GetArgs( self ):
+
+ url = self._item
+
+ self._job_key.SetVariable( 'status', 'downloading' )
+
+ parse_result = urlparse.urlparse( url )
+
+ ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
+
+ connection = HC.get_connection( scheme = scheme, host = host, port = port )
+
+ def hook( range, value ):
+
+ self._job_key.SetVariable( 'range', range )
+ self._job_key.SetVariable( 'value', value )
+
+
+ connection.AddReportHook( hook )
+
+ temp_path = connection.geturl( url, response_to_path = True )
+
+ connection.ClearReportHooks()
+
+ service_identifiers_to_tags = {}
+
+ return ( temp_path, service_identifiers_to_tags, url )
+
+
+ def _CheckCurrentStatus( self ):
+
+ url = self._item
+
+ self._job_key.SetVariable( 'status', 'checking url status' )
+
+ ( status, hash ) = HC.app.Read( 'url_status', url )
+
+ if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options: status = 'new'
+
+ if status == 'redundant':
+
+ ( media_result, ) = HC.app.Read( 'media_results', HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) )
+
+ return ( status, media_result )
+
+ else: return ( status, None )
+
+
+class ImportQueueGenerator():
+
+ def __init__( self, job_key, item ):
+
+ self._job_key = job_key
+ self._item = item
+
+
+ def __call__( self ):
+
+ queue = self._item
+
+ self._job_key.SetVariable( 'queue', queue )
+
+ self._job_key.Finish()
+
+
+class ImportQueueGeneratorGallery( ImportQueueGenerator ):
+
+ def __init__( self, job_key, item, downloaders_factory ):
+
+ ImportQueueGenerator.__init__( self, job_key, item )
+
+ self._downloaders_factory = downloaders_factory
+
+
+ def __call__( self ):
+
+ try:
+
+ raw_query = self._item
+
+ downloaders = list( self._downloaders_factory( raw_query ) )
+
+ downloaders[0].SetupGallerySearch() # for now this is cookie-based for hf, so only have to do it on one
+
+ total_urls_found = 0
+
+ while True:
+
+ downloaders_to_remove = []
+
+ for downloader in downloaders:
+
+ if self._job_key.IsPaused():
+
+ self._job_key.SetVariable( 'status', 'paused after ' + HC.u( total_urls_found ) + ' urls' )
+
+ self._job_key.WaitOnPause()
+
+
+ if self._job_key.IsCancelled(): break
+
+ self._job_key.SetVariable( 'status', 'found ' + HC.u( total_urls_found ) + ' urls' )
+
+ page_of_url_args = downloader.GetAnotherPage()
+
+ total_urls_found += len( page_of_url_args )
+
+ if len( page_of_url_args ) == 0: downloaders_to_remove.append( downloader )
+ else:
+
+ queue = self._job_key.GetVariable( 'queue' )
+
+ queue = list( queue )
+
+ queue.extend( page_of_url_args )
+
+ self._job_key.SetVariable( 'queue', queue )
+
+
+
+ for downloader in downloaders_to_remove: downloaders.remove( downloader )
+
+ if len( downloaders ) == 0: break
+
+ if self._job_key.IsPaused():
+
+ self._job_key.SetVariable( 'status', 'paused after ' + HC.u( total_urls_found ) + ' urls' )
+
+ self._job_key.WaitOnPause()
+
+
+ if self._job_key.IsCancelled(): break
+
+
+ self._job_key.SetVariable( 'status', '' )
+
+ except Exception as e:
+
+ self._job_key.SetVariable( 'status', HC.u( e ) )
+
+ HC.ShowException( e )
+
+ time.sleep( 2 )
+
+ finally: self._job_key.Finish()
+
+
+class ImportQueueGeneratorURLs( ImportQueueGenerator ):
+
+ def __call__( self ):
+
+ try:
+
+ url = self._item
+
+ self._job_key.SetVariable( 'status', 'parsing url' )
+
+ try:
+
+ parse_result = urlparse.urlparse( url )
+
+ ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
+
+ except: raise Exception( 'Could not parse that URL' )
+
+ self._job_key.SetVariable( 'status', 'Connecting to address' )
+
+ try: connection = HC.get_connection( scheme = scheme, host = host, port = port )
+ except: raise Exception( 'Could not connect to server' )
+
+ try: html = connection.geturl( url )
+ except: raise Exception( 'Could not download that url' )
+
+ self._job_key.SetVariable( 'status', 'parsing html' )
+
+ try: urls = ClientParsers.ParsePage( html, url )
+ except: raise Exception( 'Could not parse that URL\'s html' )
+
+ queue = urls
+
+ self._job_key.SetVariable( 'queue', queue )
+
+ except Exception as e:
+
+ self._job_key.SetVariable( 'status', HC.u( e ) )
+
+ HC.ShowException( e )
+
+ time.sleep( 2 )
+
+ finally: self._job_key.Finish()
+
+
+class ImportQueueGeneratorThread( ImportQueueGenerator ):
+
+ def __call__( self ):
+
+ try:
+
+ ( board, thread_id ) = self._item
+
+ last_thread_check = 0
+ image_infos_already_added = set()
+
+ while True:
+
+ if self._job_key.IsPaused():
+
+ self._job_key.SetVariable( 'status', 'paused' )
+
+ self._job_key.WaitOnPause()
+
+
+ if self._job_key.IsCancelled(): break
+
+ thread_time = self._job_key.GetVariable( 'thread_time' )
+
+ if thread_time < 30: thread_time = 30
+
+ next_thread_check = last_thread_check + thread_time
+
+ if next_thread_check < HC.GetNow():
+
+ self._job_key.SetVariable( 'status', 'checking thread' )
+
+ url = 'http://api.4chan.org/' + board + '/res/' + thread_id + '.json'
+
+ try:
+
+ connection = HC.get_connection( url = url )
+
+ raw_json = connection.geturl( url )
+
+ json_dict = json.loads( raw_json )
+
+ posts_list = json_dict[ 'posts' ]
+
+ image_infos = [ ( post[ 'md5' ].decode( 'base64' ), board, HC.u( post[ 'tim' ] ), post[ 'ext' ], post[ 'filename' ] ) for post in posts_list if 'md5' in post ]
+
+ image_infos_i_can_add = [ image_info for image_info in image_infos if image_info not in image_infos_already_added ]
+
+ image_infos_already_added.update( image_infos_i_can_add )
+
+ if len( image_infos_i_can_add ) > 0:
+
+ queue = self._job_key.GetVariable( 'queue' )
+
+ queue = list( queue )
+
+ queue.extend( image_infos_i_can_add )
+
+ self._job_key.SetVariable( 'queue', queue )
+
+
+ except HydrusExceptions.NotFoundException: raise Exception( 'Thread 404' )
+
+ last_thread_check = HC.GetNow()
+
+ else: self._job_key.SetVariable( 'status', 'rechecking thread ' + HC.ConvertTimestampToPrettyPending( next_thread_check ) )
+
+
+
+ except Exception as e:
+
+ self._job_key.SetVariable( 'status', HC.u( e ) )
+
+ HC.ShowException( e )
+
+ time.sleep( 2 )
+
+ finally: self._job_key.Finish()
+
+
+
+class ImportController():
+
+ def __init__( self, import_args_generator_factory, import_queue_generator_factory, page_key = None ):
+
+ self._controller_job_key = self._GetNewJobKey( 'controller' )
+
+ self._import_args_generator_factory = import_args_generator_factory
+ self._import_queue_generator_factory = import_queue_generator_factory
self._page_key = page_key
- self._import_args_generator = import_args_generator
- self._queue_is_done = False
-
- self._queue = []
-
- self._paused = False
-
- self._current_position = 0
+ self._import_job_key = self._GetNewJobKey( 'import' )
+ self._import_queue_position_job_key = self._GetNewJobKey( 'import_queue_position' )
+ self._import_queue_job_key = self._GetNewJobKey( 'import_queue' )
+ self._pending_import_queue_jobs = []
self._lock = threading.Lock()
- HC.pubsub.sub( self, 'SetPaused', 'pause_import_queue_processor' )
-
- def AddToQueue( self, queue_objects ):
+ def _GetNewJobKey( self, type ):
- with self._lock: self._queue.extend( queue_objects )
+ job_key = HC.JobKey()
-
- def QueueIsDone( self ): self._queue_is_done = True
-
- def SetPaused( self, status ): self._paused = status
-
- def ToTuple( self ):
-
- with self._lock: return ( self._current_position, len( self._queue ) )
-
-
- def ProcessQueue( self ):
-
- while not self._queue_is_done:
+ if type == 'controller':
- with self._lock: queue_length = len( self._queue )
+ job_key.SetVariable( 'num_successful', 0 )
+ job_key.SetVariable( 'num_failed', 0 )
+ job_key.SetVariable( 'num_deleted', 0 )
+ job_key.SetVariable( 'num_redundant', 0 )
- if not self._paused and self._current_position < queue_length:
+ else:
+
+ job_key.SetVariable( 'status', '' )
+
+ if type == 'import':
- with self._lock: queue_object = self._queue[ self._current_position ]
+ job_key.SetVariable( 'page_key', self._page_key )
+ job_key.SetVariable( 'range', 1 )
+ job_key.SetVariable( 'value', 0 )
- # reorder these params as is best
- ( temp_path, url, tags, anything_else ) = self._path_generator( self._page_key, queue_object )
+ elif type == 'import_queue_position':
- # synchronously write import to db
+ job_key.SetVariable( 'queue_position', 0 )
- self._current_position += 1
+ elif type == 'import_queue':
+
+ job_key.SetVariable( 'queue', [] )
- time.sleep( 1 )
+
+ return job_key
+
+
+ def CleanBeforeDestroy( self ): self._controller_job_key.Cancel()
+
+ def GetJobKey( self, type ):
+
+ with self._lock:
+
+ if type == 'controller': return self._controller_job_key
+ elif type == 'import': return self._import_job_key
+ elif type == 'import_queue_position': return self._import_queue_position_job_key
+ elif type == 'import_queue': return self._import_queue_job_key
-def PathGeneratorBooru( self, page_key, queue_object ):
+ def GetPendingImportQueues( self ):
+
+ with self._lock: return self._pending_import_queue_jobs
+
- # unpack queue_object
- # test url or whatever as appropriate
- # fetch file, possibly with help of downloader or whatever!
- # downloader should write file to path, returning temp_path
- # we should return temp_path
+ def PendImportQueue( self, job ):
+
+ with self._lock: self._pending_import_queue_jobs.append( job )
+
- pass
+ def RemovePendingImportQueue( self, job ):
+
+ with self._lock:
+
+ if job in self._pending_import_queue_jobs: self._pending_import_queue_jobs.remove( job )
+
+
+
+ def MovePendingImportQueueUp( self, job ):
+
+ with self._lock:
+
+ if job in self._pending_import_queue_jobs:
+
+ index = self._pending_import_queue_jobs.index( job )
+
+ if index > 0:
+
+ self._pending_import_queue_jobs.remove( job )
+
+ self._pending_import_queue_jobs.insert( index - 1, job )
+
+
+
+
+
+ def MovePendingImportQueueDown( self, job ):
+
+ with self._lock:
+
+ if s in self._pending_import_queue_jobs:
+
+ index = self._pending_import_queue_jobs.index( job )
+
+ if index + 1 < len( self._pending_import_queue_jobs ):
+
+ self._pending_import_queue_jobs.remove( job )
+
+ self._pending_import_queue_jobs.insert( index + 1, job )
+
+
+
+
+
+ def MainLoop( self ):
+
+ try:
+
+ while not self._controller_job_key.IsDone():
+
+ if self._controller_job_key.IsPaused():
+
+ self._import_job_key.Pause()
+ self._import_queue_position_job_key.Pause()
+ self._import_queue_job_key.Pause()
+
+ self._controller_job_key.WaitOnPause()
+
+
+ with self._lock:
+
+ queue_position = self._import_queue_position_job_key.GetVariable( 'queue_position' )
+ queue = self._import_queue_job_key.GetVariable( 'queue' )
+
+ if self._import_job_key.IsDone():
+
+ result = self._import_job_key.GetVariable( 'result' )
+
+ variable_name = 'num_' + result
+
+ num_result = self._controller_job_key.GetVariable( variable_name )
+
+ self._controller_job_key.SetVariable( variable_name, num_result + 1 )
+
+ self._import_job_key = self._GetNewJobKey( 'import' )
+
+ queue_position += 1
+
+ self._import_queue_position_job_key.SetVariable( 'queue_position', queue_position )
+
+
+ position_string = HC.u( queue_position + 1 ) + '/' + HC.u( len( queue ) )
+
+ if self._import_queue_position_job_key.IsPaused(): self._import_queue_position_job_key.SetVariable( 'status', 'paused at ' + position_string )
+ elif self._import_queue_position_job_key.IsWorking():
+
+ if self._import_job_key.IsWorking():
+
+ self._import_queue_position_job_key.SetVariable( 'status', 'processing ' + position_string )
+
+ else:
+
+ if queue_position < len( queue ):
+
+ self._import_queue_position_job_key.SetVariable( 'status', 'preparing ' + position_string )
+
+ self._import_job_key.Begin()
+
+ item = queue[ queue_position ]
+
+ args_generator = self._import_args_generator_factory( self._import_job_key, item )
+
+ threading.Thread( target = args_generator, name = 'Generate Import Args' ).start()
+
+ else:
+
+ if self._import_queue_job_key.IsWorking(): self._import_queue_position_job_key.SetVariable( 'status', 'waiting for more items' )
+ else: self._import_queue_position_job_key.Finish()
+
+
+
+ else:
+
+ if self._import_queue_position_job_key.IsDone():
+
+ if self._import_queue_position_job_key.IsCancelled(): status = 'cancelled at ' + position_string
+ else: status = 'done'
+
+ self._import_queue_position_job_key = self._GetNewJobKey( 'import_queue_position' )
+
+ self._import_queue_job_key = self._GetNewJobKey( 'import_queue' )
+
+ else: status = ''
+
+ self._import_queue_position_job_key.SetVariable( 'status', status )
+
+ if len( self._pending_import_queue_jobs ) > 0:
+
+ self._import_queue_position_job_key.Begin()
+
+ self._import_queue_job_key.Begin()
+
+ item = self._pending_import_queue_jobs.pop( 0 )
+
+ queue_generator = self._import_queue_generator_factory( self._import_queue_job_key, item )
+
+ threading.Thread( target = queue_generator, name = 'Generate Import Items' ).start()
+
+
+
+
+ time.sleep( 0.05 )
+
+
+ except Exception as e: HC.ShowException( e )
+ finally:
+
+ self._import_job_key.Cancel()
+ self._import_queue_position_job_key.Cancel()
+ self._import_queue_job_key.Cancel()
+
+
+
+ def StartThread( self ):
+
+ threading.Thread( target = self.MainLoop ).start()
+
def THREADDownloadURL( job_key, url, message_string ):
diff --git a/include/HydrusExceptions.py b/include/HydrusExceptions.py
index 4eb5436b..071a4168 100644
--- a/include/HydrusExceptions.py
+++ b/include/HydrusExceptions.py
@@ -1,3 +1,4 @@
+class DBException( Exception ): pass
class DBAccessException( Exception ): pass
class MimeException( Exception ): pass
class SizeException( Exception ): pass
diff --git a/include/ServerDB.py b/include/ServerDB.py
index 73ad418b..6cc98bef 100755
--- a/include/ServerDB.py
+++ b/include/ServerDB.py
@@ -2798,7 +2798,7 @@ class DB( ServiceDB ):
synchronous = True
- job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
+ job = HC.JobDatabase( action, job_type, synchronous, *args, **kwargs )
self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first
@@ -2811,7 +2811,7 @@ class DB( ServiceDB ):
synchronous = True
- job = HC.JobInternal( action, job_type, synchronous, *args, **kwargs )
+ job = HC.JobDatabase( action, job_type, synchronous, *args, **kwargs )
self._jobs.put( ( priority, job ) )
diff --git a/include/TestDB.py b/include/TestDB.py
index e32a19ae..04266620 100644
--- a/include/TestDB.py
+++ b/include/TestDB.py
@@ -618,11 +618,15 @@ class TestClientDB( unittest.TestCase ):
generate_media_result = True
- ( written_result, written_hash, written_media_result ) = self._write( 'import_file', path, generate_media_result = True )
+ ( written_result, written_hash ) = self._write( 'import_file', path )
self.assertEqual( written_result, 'successful' )
self.assertEqual( written_hash, hash )
+ ( written_result, written_media_result ) = self._write( 'import_file', path, generate_media_result = True )
+
+ self.assertEqual( written_result, 'redundant' )
+
( mr_hash, mr_inbox, mr_size, mr_mime, mr_timestamp, mr_width, mr_height, mr_duration, mr_num_frames, mr_num_words, mr_tags_manager, mr_file_service_identifiers_cdpp, mr_local_ratings, mr_remote_ratings ) = written_media_result.ToTuple()
now = HC.GetNow()