Version 240

This commit is contained in:
Hydrus Network Developer 2017-01-11 16:31:30 -06:00
parent 0d36699fb7
commit 643586ce04
22 changed files with 650 additions and 333 deletions

9
db/extract_options.sql Normal file
View File

@ -0,0 +1,9 @@
.open client.db
.out my_options.sql
.print .open client.db\r\n
.print delete from options;\r\n
.print delete from json_dumps where dump_type = 22;\r\n
.mode insert options
select * from options;
.mode insert json_dumps
select * from json_dumps where dump_type = 22;

View File

@ -0,0 +1,6 @@
.print The subscriptions will lose their tag import options, so make sure to check them once they are imported back in.
.open client.db
.out my_subscriptions.sql
.print .open client.db\r\n
.mode insert json_dumps_named
select * from json_dumps_named where dump_type = 3;

View File

@ -0,0 +1,9 @@
If your db is completely broken and you need to extract some important data, please check out the emergency extract scripts. To use them, put your old database, the sqlite3 executable, and the script in the same folder and feed the script into sqlite3, like so:
sqlite3 < extract_subscriptions.sql
This will connect to the database and copy your subscriptions to the new file my_subscriptions.sql, which you can then move and import to a new db folder in the same way:
sqlite3 < my_subscriptions.sql
Some things are difficult to copy over at this basic level. Your tag options and anything else service-specific will be lost or reset back to default.

View File

@ -8,6 +8,28 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 240</h3></li>
<ul>
<li>improved how the client analyzes itself, reducing maintenance latency and also overall cpu usage. syncing a big repo will no longer introduce lingering large lag, and huge analyze jobs will run significantly less frequently</li>
<li>the analyze cache will be reset on update, so you will have one big round of analyze the next time you maintain, and then you are good</li>
<li>added data structures to support auto-discovery of duplicate files</li>
<li>improved how some similar files maintenance occurs</li>
<li>flushed out duplicate status reporting</li>
<li>added a new page type, currently under pages->search pages->duplicates, to handle duplicate discovery and filtering</li>
<li>created a gui skeleton for the new duplicates page</li>
<li>started some handles and update code for the new duplicates page</li>
<li>wrote a new txt file in the db dir about the new emergency extract scripts</li>
<li>wrote an emergency extract script to migrate subscriptions to a new db</li>
<li>wrote an emergency extract script to migrate options to a new db</li>
<li>the trash clearing daemon now runs in the foreground, and a little of its code is improved</li>
<li>the trash clearing daemon now makes a popup message when it does work</li>
<li>the server's ssl keys are now set to read-only on Windows and user read-only only (i.e. chmod 400) on Linux and OS X on creation and update</li>
<li>added a explicitly unicode popup message to the debug test</li>
<li>fixed some network error catching code that was using Windows-only error codes</li>
<li>converted more of the thumbnail right-click menu over to the new system</li>
<li>improved some listctrl code</li>
<li>misc cleanup</li>
</ul>
<li><h3>version 239</h3></li>
<ul>
<li>finished up similar files search data maintenance code</li>

View File

@ -2951,10 +2951,8 @@ class WebSessionManagerClient( object ):
form_fields = {}
form_fields[ 'mode' ] = 'login'
form_fields[ 'pixiv_id' ] = id
form_fields[ 'pass' ] = password
form_fields[ 'skip' ] = '1'
form_fields[ 'password' ] = password
body = urllib.urlencode( form_fields )
@ -2964,7 +2962,7 @@ class WebSessionManagerClient( object ):
( response_gumpf, cookies ) = self._controller.DoHTTP( HC.POST, 'http://www.pixiv.net/login.php', request_headers = headers, body = body, return_cookies = True )
# _ only given to logged in php sessions
if 'PHPSESSID' not in cookies or '_' not in cookies[ 'PHPSESSID' ]: raise Exception( 'Pixiv login credentials not accepted!' )
if 'PHPSESSID' not in cookies: raise Exception( 'Pixiv login credentials not accepted!' )
expires = now + 30 * 86400

View File

@ -636,8 +636,8 @@ class Controller( HydrusController.HydrusController ):
self._daemons.append( HydrusThreading.DAEMONForegroundWorker( self, 'SynchroniseSubscriptions', ClientDaemons.DAEMONSynchroniseSubscriptions, ( 'notify_restart_subs_sync_daemon', 'notify_new_subscriptions' ), init_wait = 60, pre_call_wait = 3 ) )
self._daemons.append( HydrusThreading.DAEMONForegroundWorker( self, 'CheckImportFolders', ClientDaemons.DAEMONCheckImportFolders, ( 'notify_restart_import_folders_daemon', 'notify_new_import_folders' ), period = 180 ) )
self._daemons.append( HydrusThreading.DAEMONForegroundWorker( self, 'CheckExportFolders', ClientDaemons.DAEMONCheckExportFolders, ( 'notify_restart_export_folders_daemon', 'notify_new_export_folders' ), period = 180 ) )
self._daemons.append( HydrusThreading.DAEMONForegroundWorker( self, 'MaintainTrash', ClientDaemons.DAEMONMaintainTrash, init_wait = 120 ) )
self._daemons.append( HydrusThreading.DAEMONBackgroundWorker( self, 'MaintainTrash', ClientDaemons.DAEMONMaintainTrash, init_wait = 60 ) )
self._daemons.append( HydrusThreading.DAEMONBackgroundWorker( self, 'RebalanceClientFiles', ClientDaemons.DAEMONRebalanceClientFiles, period = 3600 ) )
self._daemons.append( HydrusThreading.DAEMONBackgroundWorker( self, 'SynchroniseRepositories', ClientDaemons.DAEMONSynchroniseRepositories, ( 'notify_restart_repo_sync_daemon', 'notify_new_permissions' ), period = 4 * 3600, pre_call_wait = 3 ) )
self._daemons.append( HydrusThreading.DAEMONBackgroundWorker( self, 'UPnP', ClientDaemons.DAEMONUPnP, ( 'notify_new_upnp_mappings', ), init_wait = 120, pre_call_wait = 6 ) )
@ -692,7 +692,7 @@ class Controller( HydrusController.HydrusController ):
loaded_into_disk_cache = HydrusGlobals.client_controller.Read( 'load_into_disk_cache', stop_time = disk_cache_stop_time, caller_limit = disk_cache_maintenance_mb * 1024 * 1024 )
self.WriteInterruptable( 'maintain_similar_files', stop_time = stop_time )
self.WriteInterruptable( 'maintain_similar_files_tree', stop_time = stop_time )
self.WriteInterruptable( 'vacuum', stop_time = stop_time )

View File

@ -1073,10 +1073,10 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM file_transfers WHERE service_id = ? AND hash_id IN ' + splayed_valid_hash_ids + ';', ( service_id, ) )
info = self._c.execute( 'SELECT size, mime FROM files_info WHERE hash_id IN ' + splayed_valid_hash_ids + ';' ).fetchall()
info = self._c.execute( 'SELECT hash_id, size, mime FROM files_info WHERE hash_id IN ' + splayed_valid_hash_ids + ';' ).fetchall()
num_files = len( valid_hash_ids )
delta_size = sum( ( size for ( size, mime ) in info ) )
delta_size = sum( ( size for ( hash_id, size, mime ) in info ) )
num_inbox = len( valid_hash_ids.intersection( self._inbox_hash_ids ) )
service_info_updates = []
@ -1264,31 +1264,9 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'REPLACE INTO web_sessions ( name, cookies, expiry ) VALUES ( ?, ?, ? );', ( name, cookies, expires ) )
def _Analyze( self, stop_time = None, only_when_idle = False, force_reanalyze = False ):
def _AnalyzeStaleBigTables( self, stop_time = None, only_when_idle = False, force_reanalyze = False ):
stale_time_delta = 30 * 86400
existing_names_to_timestamps = dict( self._c.execute( 'SELECT name, timestamp FROM analyze_timestamps;' ).fetchall() )
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp' ) ]
all_names = set()
for db_name in db_names:
all_names.update( ( name for ( name, ) in self._c.execute( 'SELECT name FROM ' + db_name + '.sqlite_master WHERE type = ?;', ( 'table', ) ) ) )
all_names.discard( 'sqlite_stat1' )
if force_reanalyze:
names_to_analyze = list( all_names )
else:
names_to_analyze = [ name for name in all_names if name not in existing_names_to_timestamps or HydrusData.TimeHasPassed( existing_names_to_timestamps[ name ] + stale_time_delta ) ]
names_to_analyze = self._GetBigTableNamesToAnalyze( force_reanalyze = force_reanalyze )
if len( names_to_analyze ) > 0:
@ -1307,11 +1285,7 @@ class DB( HydrusDB.HydrusDB ):
started = HydrusData.GetNowPrecise()
self._c.execute( 'ANALYZE ' + name + ';' )
self._c.execute( 'DELETE FROM analyze_timestamps WHERE name = ?;', ( name, ) )
self._c.execute( 'INSERT OR IGNORE INTO analyze_timestamps ( name, timestamp ) VALUES ( ?, ? );', ( name, HydrusData.GetNow() ) )
self._AnalyzeTable( name )
time_took = HydrusData.GetNowPrecise() - started
@ -1339,6 +1313,17 @@ class DB( HydrusDB.HydrusDB ):
def _AnalyzeTable( self, name ):
self._c.execute( 'ANALYZE ' + name + ';' )
( num_rows, ) = self._c.execute( 'SELECT COUNT( * ) FROM ' + name + ';' ).fetchone()
self._c.execute( 'DELETE FROM analyze_timestamps WHERE name = ?;', ( name, ) )
self._c.execute( 'INSERT OR IGNORE INTO analyze_timestamps ( name, num_rows, timestamp ) VALUES ( ?, ?, ? );', ( name, num_rows, HydrusData.GetNow() ) )
def _ArchiveFiles( self, hash_ids ):
valid_hash_ids = [ hash_id for hash_id in hash_ids if hash_id in self._inbox_hash_ids ]
@ -1632,6 +1617,9 @@ class DB( HydrusDB.HydrusDB ):
self._c.executemany( 'INSERT OR IGNORE INTO shape_maintenance_branch_regen ( phash_id ) VALUES ( ? );', ( ( phash_id, ) for phash_id in deletee_phash_ids ) )
self._c.execute( 'DELETE FROM shape_search_cache WHERE hash_id = ?;', ( hash_id, ) )
self._c.execute( 'DELETE FROM duplicate_pairs WHERE smaller_hash_id = ? or larger_hash_id = ?;', ( hash_id, hash_id ) )
def _CacheSimilarFilesGenerateBranch( self, job_key, parent_id, phash_id, phash, children ):
@ -1742,17 +1730,13 @@ class DB( HydrusDB.HydrusDB ):
def _CacheSimilarFilesGetMaintenanceStatus( self ):
( num_current_phashes, ) = self._c.execute( 'SELECT COUNT( * ) FROM shape_perceptual_hashes;' ).fetchone()
searched_distances_to_count = collections.Counter( dict( self._c.execute( 'SELECT searched_distance, COUNT( * ) FROM shape_search_cache GROUP BY searched_distance;' ) ) )
duplicate_types_to_count = collections.Counter( dict( self._c.execute( 'SELECT duplicate_type, COUNT( * ) FROM duplicate_pairs GROUP BY duplicate_type;' ) ) )
( num_phashes_to_regen, ) = self._c.execute( 'SELECT COUNT( * ) FROM shape_maintenance_phash_regen;' ).fetchone()
( num_branches_to_regen, ) = self._c.execute( 'SELECT COUNT( * ) FROM shape_maintenance_branch_regen;' ).fetchone()
# gui will present this as a general 'still 100,000 still to go!' and 'completely ready to go!'
# I could stick this on local files review services, I guess, although it better belongs on a new 'all local files' service page.
# can add the arbitrary dupe search cache to this as well
return ( num_current_phashes, num_phashes_to_regen, num_branches_to_regen )
return ( searched_distances_to_count, duplicate_types_to_count, num_phashes_to_regen, num_branches_to_regen )
def _CacheSimilarFilesAssociatePHashes( self, hash_id, phashes ):
@ -1768,42 +1752,35 @@ class DB( HydrusDB.HydrusDB ):
phash_ids.add( phash_id )
self._c.execute( 'REPLACE INTO shape_search_cache ( hash_id, searched_distance ) VALUES ( ?, ? );', ( hash_id, None ) )
return phash_ids
def _CacheSimilarFilesMaintain( self, stop_time ):
def _CacheSimilarFilesMaintainFiles( self, job_key ):
job_key = ClientThreading.JobKey( cancellable = True )
job_key.SetVariable( 'popup_title', 'similar files metadata maintenance' )
job_key_pubbed = False
# this should take a cancellable job_key from the gui filter window
hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM shape_maintenance_phash_regen;' ) ]
# remove hash_id from the pairs cache?
# set its search status to False, but don't remove any existing pairs
client_files_manager = self._controller.GetClientFilesManager()
num_to_do = len( hash_ids )
for ( i, hash_id ) in enumerate( hash_ids ):
if not job_key_pubbed:
self._controller.pub( 'message', job_key )
job_key_pubbed = True
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit or HydrusData.TimeHasPassed( stop_time ):
if should_quit:
return
text = 'regenerating similar file metadata - ' + HydrusData.ConvertValueRangeToPrettyString( i, num_to_do )
HydrusGlobals.client_controller.pub( 'splash_set_status_text', text )
job_key.SetVariable( 'popup_text_1', text )
job_key.SetVariable( 'popup_gauge_1', ( i, num_to_do ) )
@ -1853,6 +1830,17 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM shape_maintenance_phash_regen WHERE hash_id = ?;', ( hash_id, ) )
job_key.Finish()
def _CacheSimilarFilesMaintainTree( self, stop_time ):
job_key = ClientThreading.JobKey( cancellable = True )
job_key.SetVariable( 'popup_title', 'similar files metadata maintenance' )
job_key_pubbed = False
rebalance_phash_ids = [ phash_id for ( phash_id, ) in self._c.execute( 'SELECT phash_id FROM shape_maintenance_branch_regen;' ) ]
num_to_do = len( rebalance_phash_ids )
@ -1901,13 +1889,6 @@ class DB( HydrusDB.HydrusDB ):
def _CacheSimilarFilesMaintenanceDue( self ):
result = self._c.execute( 'SELECT 1 FROM shape_maintenance_phash_regen;' ).fetchone()
if result is not None:
return True
result = self._c.execute( 'SELECT 1 FROM shape_maintenance_branch_regen;' ).fetchone()
if result is not None:
@ -2685,7 +2666,7 @@ class DB( HydrusDB.HydrusDB ):
# main
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, timestamp INTEGER );' )
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, num_rows INTEGER, timestamp INTEGER );' )
self._c.execute( 'CREATE TABLE client_files_locations ( prefix TEXT, location TEXT );' )
@ -2820,6 +2801,11 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE TABLE external_caches.shape_maintenance_phash_regen ( hash_id INTEGER PRIMARY KEY );' )
self._c.execute( 'CREATE TABLE external_caches.shape_maintenance_branch_regen ( phash_id INTEGER PRIMARY KEY );' )
self._c.execute( 'CREATE TABLE external_caches.shape_search_cache ( hash_id INTEGER PRIMARY KEY, searched_distance INTEGER );' )
self._c.execute( 'CREATE TABLE external_caches.duplicate_pairs ( smaller_hash_id INTEGER, larger_hash_id INTEGER, duplicate_type INTEGER, PRIMARY KEY( smaller_hash_id, larger_hash_id ) );' )
self._c.execute( 'CREATE UNIQUE INDEX external_caches.duplicate_pairs_reversed_hash_ids ON duplicate_pairs ( larger_hash_id, smaller_hash_id );' )
# master
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
@ -3053,7 +3039,7 @@ class DB( HydrusDB.HydrusDB ):
self._controller.CallToThread( client_files_manager.DelayedDeleteFiles, file_hashes )
useful_thumbnail_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id != ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';', ( self._trash_service_id, ) ) }
useful_thumbnail_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';' ) }
deletable_thumbnail_hash_ids = hash_ids.difference( useful_thumbnail_hash_ids )
@ -3560,6 +3546,81 @@ class DB( HydrusDB.HydrusDB ):
return predicates
def _GetBigTableNamesToAnalyze( self, force_reanalyze = False ):
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp' ) ]
all_names = set()
for db_name in db_names:
all_names.update( ( name for ( name, ) in self._c.execute( 'SELECT name FROM ' + db_name + '.sqlite_master WHERE type = ?;', ( 'table', ) ) ) )
all_names.discard( 'sqlite_stat1' )
if force_reanalyze:
names_to_analyze = list( all_names )
else:
# The idea here is that some tables get huge faster than the normal maintenance cycle (usually after syncing to big repo)
# They then search real slow for like 14 days. And then after that they don't need new analyzes tbh
# Analyze on a small table takes ~1ms, so let's instead frequently do smaller tables and then catch them and throttle down as they grow
big_table_minimum = 10000
huge_table_minimum = 1000000
small_table_stale_time_delta = 86400
big_table_stale_time_delta = 30 * 86400
huge_table_stale_time_delta = 30 * 86400 * 6
existing_names_to_info = { name : ( num_rows, timestamp ) for ( name, num_rows, timestamp ) in self._c.execute( 'SELECT name, num_rows, timestamp FROM analyze_timestamps;' ) }
names_to_analyze = []
for name in all_names:
if name in existing_names_to_info:
( num_rows, timestamp ) = existing_names_to_info[ name ]
if num_rows > big_table_minimum:
if num_rows > huge_table_minimum:
due_time = timestamp + huge_table_stale_time_delta
else:
due_time = timestamp + big_table_stale_time_delta
if HydrusData.TimeHasPassed( due_time ):
names_to_analyze.append( name )
else:
# these usually take a couple of milliseconds, so just sneak them in here. no need to bother the user with a prompt
if HydrusData.TimeHasPassed( timestamp + small_table_stale_time_delta ):
self._AnalyzeTable( name )
else:
names_to_analyze.append( name )
return names_to_analyze
def _GetClientFilesLocations( self ):
result = { prefix : HydrusPaths.ConvertPortablePathToAbsPath( location ) for ( prefix, location ) in self._c.execute( 'SELECT prefix, location FROM client_files_locations;' ) }
@ -5777,6 +5838,31 @@ class DB( HydrusDB.HydrusDB ):
hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ?' + age_phrase + limit_phrase + ';', ( self._trash_service_id, ) ) }
if HydrusGlobals.db_report_mode:
message = 'When asked for '
if limit is None:
message += 'all the'
else:
message += 'at most ' + HydrusData.ConvertIntToPrettyString( limit )
message += ' trash files,'
if minimum_age is not None:
message += ' with minimum age ' + HydrusData.ConvertTimestampToPrettyAge( timestamp_cutoff ) + ','
message += ' I found ' + HydrusData.ConvertIntToPrettyString( len( hash_ids ) ) + '.'
HydrusData.ShowText( message )
return self._GetHashes( hash_ids )
@ -6211,22 +6297,7 @@ class DB( HydrusDB.HydrusDB ):
# analyze
stale_time_delta = 30 * 86400
existing_names_to_timestamps = dict( self._c.execute( 'SELECT name, timestamp FROM analyze_timestamps;' ).fetchall() )
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp' ) ]
all_names = set()
for db_name in db_names:
all_names.update( ( name for ( name, ) in self._c.execute( 'SELECT name FROM ' + db_name + '.sqlite_master WHERE type = ?;', ( 'table', ) ) ) )
names_to_analyze = { name for name in all_names if name not in existing_names_to_timestamps or HydrusData.TimeHasPassed( existing_names_to_timestamps[ name ] + stale_time_delta ) }
names_to_analyze.discard( 'sqlite_stat1' )
names_to_analyze = self._GetBigTableNamesToAnalyze()
if len( names_to_analyze ) > 0:
@ -7119,6 +7190,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'service_filenames': result = self._GetServiceFilenames( *args, **kwargs )
elif action == 'service_info': result = self._GetServiceInfo( *args, **kwargs )
elif action == 'services': result = self._GetServices( *args, **kwargs )
elif action == 'similar_files_maintenance_status': result = self._CacheSimilarFilesGetMaintenanceStatus( *args, **kwargs )
elif action == 'related_tags': result = self._GetRelatedTags( *args, **kwargs )
elif action == 'tag_censorship': result = self._GetTagCensorship( *args, **kwargs )
elif action == 'tag_parents': result = self._GetTagParents( *args, **kwargs )
@ -7506,106 +7578,6 @@ class DB( HydrusDB.HydrusDB ):
self._controller.pub( 'splash_set_title_text', 'updating db to v' + str( version + 1 ) )
if version == 180:
self._c.execute( 'REPLACE INTO yaml_dumps VALUES ( ?, ?, ? );', ( YAML_DUMP_ID_REMOTE_BOORU, 'rule34hentai', ClientDefaults.GetDefaultBoorus()[ 'rule34hentai' ] ) )
#
names_seen = set()
info = self._c.execute( 'SELECT service_id, name FROM services;', ).fetchall()
for ( service_id, name ) in info:
if name in names_seen:
while name in names_seen:
name += str( random.randint( 0, 9 ) )
self._c.execute( 'UPDATE services SET name = ? WHERE service_id = ?;', ( name, service_id ) )
names_seen.add( name )
if version == 182:
self._c.execute( 'DELETE FROM service_info WHERE info_type IN ( ?, ? );', ( HC.SERVICE_INFO_NUM_THUMBNAILS, HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) )
if version == 183:
self._c.execute( 'CREATE TABLE client_files_locations ( prefix TEXT, location TEXT );' )
client_files_default = os.path.join( self._db_dir, 'client_files' )
location = HydrusPaths.ConvertAbsPathToPortablePath( client_files_default, HC.BASE_DIR )
for prefix in HydrusData.IterateHexPrefixes():
self._c.execute( 'INSERT INTO client_files_locations ( prefix, location ) VALUES ( ?, ? );', ( prefix, location ) )
if version == 184:
result = self._c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( '', ) ).fetchone()
if result is not None:
( tag_id, ) = result
self._c.execute( 'DELETE FROM mappings WHERE tag_id = ?;', ( tag_id, ) )
if version == 188:
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, timestamp INTEGER );' )
if version == 189:
self._controller.pub( 'splash_set_status_text', 'updating file tables' )
#
self._c.execute( 'DROP INDEX file_petitions_hash_id_index;' )
self._c.execute( 'ALTER TABLE file_petitions RENAME TO file_petitions_old;' )
self._c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ) );' )
self._c.execute( 'CREATE INDEX file_petitions_hash_id_index ON file_petitions ( hash_id );' )
self._c.execute( 'INSERT INTO file_petitions SELECT * FROM file_petitions_old;' )
self._c.execute( 'DROP TABLE file_petitions_old;' )
#
self._c.execute( 'ALTER TABLE files_info RENAME TO files_info_old;' )
self._c.execute( 'CREATE TABLE current_files ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE INDEX current_files_timestamp ON current_files ( timestamp );' )
self._c.execute( 'CREATE TABLE files_info ( hash_id INTEGER PRIMARY KEY, size INTEGER, mime INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER );' )
self._c.execute( 'CREATE INDEX files_info_size ON files_info ( size );' )
self._c.execute( 'CREATE INDEX files_info_mime ON files_info ( mime );' )
self._c.execute( 'CREATE INDEX files_info_width ON files_info ( width );' )
self._c.execute( 'CREATE INDEX files_info_height ON files_info ( height );' )
self._c.execute( 'CREATE INDEX files_info_duration ON files_info ( duration );' )
self._c.execute( 'CREATE INDEX files_info_num_frames ON files_info ( num_frames );' )
self._c.execute( 'INSERT INTO current_files SELECT service_id, hash_id, timestamp FROM files_info_old;' )
self._c.execute( 'INSERT OR IGNORE INTO files_info SELECT hash_id, size, mime, width, height, duration, num_frames, num_words FROM files_info_old;' )
self._c.execute( 'DROP TABLE files_info_old;' )
if version == 192:
no_wal_path = os.path.join( self._db_dir, 'no-wal' )
@ -8132,7 +8104,7 @@ class DB( HydrusDB.HydrusDB ):
self._controller.pub( 'splash_set_status_text', 'analyzing new tables' )
self._Analyze()
self._AnalyzeStaleBigTables()
self._c.execute( 'COMMIT;' )
@ -8866,6 +8838,26 @@ class DB( HydrusDB.HydrusDB ):
self.pub_initial_message( message )
if version == 239:
self._c.execute( 'DROP TABLE analyze_timestamps;' )
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, num_rows INTEGER, timestamp INTEGER );' )
#
self._controller.pub( 'splash_set_status_text', 'setting up next step of similar files stuff' )
self._c.execute( 'CREATE TABLE external_caches.shape_search_cache ( hash_id INTEGER PRIMARY KEY, searched_distance INTEGER );' )
self._c.execute( 'CREATE TABLE external_caches.duplicate_pairs ( smaller_hash_id INTEGER, larger_hash_id INTEGER, duplicate_type INTEGER, PRIMARY KEY( smaller_hash_id, larger_hash_id ) );' )
self._c.execute( 'CREATE UNIQUE INDEX external_caches.duplicate_pairs_reversed_hash_ids ON duplicate_pairs ( larger_hash_id, smaller_hash_id );' )
combined_local_file_service_id = self._GetServiceId( CC.COMBINED_LOCAL_FILE_SERVICE_KEY )
self._c.execute( 'INSERT OR IGNORE INTO shape_search_cache SELECT hash_id, NULL FROM current_files, files_info USING ( hash_id ) WHERE service_id = ? and mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WE_CAN_PHASH ) + ';', ( combined_local_file_service_id, ) )
self._controller.pub( 'splash_set_title_text', 'updated db to v' + str( version + 1 ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
@ -9501,7 +9493,7 @@ class DB( HydrusDB.HydrusDB ):
def _Write( self, action, *args, **kwargs ):
if action == 'analyze': result = self._Analyze( *args, **kwargs )
if action == 'analyze': result = self._AnalyzeStaleBigTables( *args, **kwargs )
elif action == 'backup': result = self._Backup( *args, **kwargs )
elif action == 'content_update_package':result = self._ProcessContentUpdatePackage( *args, **kwargs )
elif action == 'content_updates':result = self._ProcessContentUpdates( *args, **kwargs )
@ -9519,7 +9511,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'imageboard': result = self._SetYAMLDump( YAML_DUMP_ID_IMAGEBOARD, *args, **kwargs )
elif action == 'import_file': result = self._ImportFile( *args, **kwargs )
elif action == 'local_booru_share': result = self._SetYAMLDump( YAML_DUMP_ID_LOCAL_BOORU, *args, **kwargs )
elif action == 'maintain_similar_files': result = self._CacheSimilarFilesMaintain( *args, **kwargs )
elif action == 'maintain_similar_files_tree': result = self._CacheSimilarFilesMaintainTree( *args, **kwargs )
elif action == 'push_recent_tags': result = self._PushRecentTags( *args, **kwargs )
elif action == 'regenerate_ac_cache': result = self._RegenerateACCache( *args, **kwargs )
elif action == 'regenerate_similar_files': result = self._CacheSimilarFilesRegenerateTree( *args, **kwargs )

View File

@ -701,9 +701,20 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
#
job_key = ClientThreading.JobKey( pausable = True, cancellable = True)
job_key = ClientThreading.JobKey()
job_key.SetVariable( 'title', 'test job' )
job_key.SetVariable( 'popup_title', u'\u24c9\u24d7\u24d8\u24e2 \u24d8\u24e2 \u24d0 \u24e3\u24d4\u24e2\u24e3 \u24e4\u24dd\u24d8\u24d2\u24de\u24d3\u24d4 \u24dc\u24d4\u24e2\u24e2\u24d0\u24d6\u24d4' )
job_key.SetVariable( 'popup_text_1', u'\u24b2\u24a0\u24b2 \u24a7\u249c\u249f' )
job_key.SetVariable( 'popup_text_2', u'p\u0250\u05df \u028d\u01dd\u028d' )
self._controller.pub( 'message', job_key )
#
job_key = ClientThreading.JobKey( pausable = True, cancellable = True )
job_key.SetVariable( 'popup_title', 'test job' )
job_key.SetVariable( 'popup_text_1', 'Currently processing test job 5/8' )
job_key.SetVariable( 'popup_gauge_1', ( 5, 8 ) )
@ -1029,6 +1040,10 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
ClientGUIMenus.AppendMenuItem( search_menu, service.GetName(), 'Open a new search tab for ' + service.GetName() + '.', self, self._NewPageQuery, service.GetServiceKey() )
search_menu.AppendSeparator()
ClientGUIMenus.AppendMenuItem( search_menu, 'duplicates (under construction!)', 'Open a new tab to discover and filter duplicate files.', self, self._NewPageDuplicateFilter )
ClientGUIMenus.AppendMenu( menu, search_menu, 'new search page' )
#
@ -1514,9 +1529,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
def _MaintainSimilarFilesData( self ):
text = 'This will do up to ten minutes\' maintenance on the similar files search data.'
text += os.linesep * 2
text += 'It will rebalance the search tree and (re)generate any outstanding file search data.'
text = 'This will rebalance the similar files search data, improving search speed.'
text += os.linesep * 2
text += 'If there is work to do, it will report its status through a popup message. The gui may hang until it is done.'
@ -1528,7 +1541,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
stop_time = HydrusData.GetNow() + 60 * 10
self._controller.Write( 'maintain_similar_files', stop_time )
self._controller.Write( 'maintain_similar_files_tree', stop_time )
@ -1695,6 +1708,15 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
wx.CallAfter( page.SetSearchFocus )
def _NewPageDuplicateFilter( self ):
management_controller = ClientGUIManagement.CreateManagementControllerDuplicateFilter()
page_name = 'duplicates'
self._NewPage( page_name, management_controller )
def _NewPageImportBooru( self ):
with ClientGUIDialogs.DialogSelectBooru( self ) as dlg:

View File

@ -5087,20 +5087,6 @@ class SaneListCtrl( wx.ListCtrl, ListCtrlAutoWidthMixin, ColumnSorterMixin ):
return ( self.itemDataMap[ key1 ], self.itemDataMap[ key2 ] )
def HasClientData( self, data, column_index = None ):
try:
index = self.GetIndexFromClientData( data, column_index )
return True
except HydrusExceptions.DataMissing:
return False
def GetListCtrl( self ):
return self
@ -5120,6 +5106,20 @@ class SaneListCtrl( wx.ListCtrl, ListCtrlAutoWidthMixin, ColumnSorterMixin ):
return results
def HasClientData( self, data, column_index = None ):
try:
index = self.GetIndexFromClientData( data, column_index )
return True
except HydrusExceptions.DataMissing:
return False
def OnSortOrderChanged( self ):
self._data_indices_to_sort_indices_dirty = True
@ -5186,24 +5186,6 @@ class SaneListCtrlForSingleObject( SaneListCtrl ):
SaneListCtrl.Append( self, display_tuple, sort_tuple )
def GetClientData( self, index = None ):
if index is None:
data_indicies = [ self._GetDataIndex( index ) for index in range( self.GetItemCount() ) ]
datas = [ self._data_indices_to_objects[ data_index ] for data_index in data_indicies ]
return datas
else:
data_index = self._GetDataIndex( index )
return self._data_indices_to_objects[ data_index ]
def GetIndexFromClientData( self, obj ):
try:
@ -5220,6 +5202,31 @@ class SaneListCtrlForSingleObject( SaneListCtrl ):
def GetObject( self, index ):
data_index = self._GetDataIndex( index )
return self._data_indices_to_objects[ data_index ]
def GetObjects( self, only_selected = False ):
if only_selected:
indicies = self.GetAllSelected()
else:
indicies = range( self.GetItemCount() )
data_indicies = [ self._GetDataIndex( index ) for index in indicies ]
datas = [ self._data_indices_to_objects[ data_index ] for data_index in data_indicies ]
return datas
def HasClientData( self, data ):
try:
@ -5321,7 +5328,7 @@ class SeedCacheControl( SaneListCtrlForSingleObject ):
notes = []
for seed in self.GetSelectedClientData():
for seed in self.GetObjects( only_selected = True ):
( seed, status, added_timestamp, last_modified_timestamp, note ) = self._seed_cache.GetSeedInfo( seed )
@ -5343,7 +5350,7 @@ class SeedCacheControl( SaneListCtrlForSingleObject ):
def _CopySelectedSeeds( self ):
seeds = self.GetSelectedClientData()
seeds = self.GetObjects( only_selected = True )
if len( seeds ) > 0:
@ -5357,7 +5364,7 @@ class SeedCacheControl( SaneListCtrlForSingleObject ):
def _SetSelected( self, status_to_set ):
seeds_to_reset = self.GetSelectedClientData()
seeds_to_reset = self.GetObjects( only_selected = True )
for seed in seeds_to_reset:
@ -5561,6 +5568,31 @@ class StaticBoxSorterForListBoxTags( StaticBox ):
self._tags_box.SetTagsByMedia( media, force_reload = force_reload )
class TextAndGauge( wx.Panel ):
def __init__( self, parent ):
wx.Panel.__init__( self, parent )
self._st = wx.StaticText( self )
self._gauge = Gauge( self )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( self._st, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._gauge, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
def SetValue( self, text, value, range ):
self._st.SetLabelText( text )
self._gauge.SetRange( range )
self._gauge.SetValue( value )
( TimeDeltaEvent, EVT_TIME_DELTA ) = wx.lib.newevent.NewCommandEvent()
class TimeDeltaButton( wx.Button ):

View File

@ -1515,7 +1515,7 @@ class DialogManageExportFolders( ClientGUIDialogs.Dialog ):
for index in indices:
export_folder = self._export_folders.GetClientData( index )
export_folder = self._export_folders.GetObject( index )
original_name = export_folder.GetName()
@ -1557,7 +1557,7 @@ class DialogManageExportFolders( ClientGUIDialogs.Dialog ):
existing_db_names = set( HydrusGlobals.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER ) )
export_folders = self._export_folders.GetClientData()
export_folders = self._export_folders.GetObjects()
good_names = set()
@ -2592,7 +2592,7 @@ class DialogManageImportFolders( ClientGUIDialogs.Dialog ):
for index in indices:
import_folder = self._import_folders.GetClientData( index )
import_folder = self._import_folders.GetObject( index )
original_name = import_folder.GetName()
@ -2636,7 +2636,7 @@ class DialogManageImportFolders( ClientGUIDialogs.Dialog ):
good_names = set()
import_folders = self._import_folders.GetClientData()
import_folders = self._import_folders.GetObjects()
for import_folder in import_folders:
@ -3171,20 +3171,34 @@ class DialogManagePixivAccount( ClientGUIDialogs.Dialog ):
form_fields = {}
form_fields[ 'mode' ] = 'login'
# this no longer seems to work--they updated to some javascript gubbins for their main form
# I couldn't see where the POST was going in Firefox dev console, so I guess it is some other thing that doesn't pick up
# the old form is still there, but hidden and changed to https://accounts.pixiv.net/login, but even if I do that, it just refreshes in Japanese :/
form_fields[ 'post_key' ] = 'c779b8a16389a7861d584d11d73424a0'
form_fields[ 'lang' ] = 'en'
form_fields[ 'source' ] = 'pc'
form_fields[ 'return_to' ] = 'http://www.pixiv.net'
form_fields[ 'pixiv_id' ] = id
form_fields[ 'pass' ] = password
form_fields[ 'password' ] = password
body = urllib.urlencode( form_fields )
headers = {}
headers[ 'Content-Type' ] = 'application/x-www-form-urlencoded'
( response_gumpf, cookies ) = HydrusGlobals.client_controller.DoHTTP( HC.POST, 'http://www.pixiv.net/login.php', request_headers = headers, body = body, return_cookies = True )
( response_gumpf, cookies ) = HydrusGlobals.client_controller.DoHTTP( HC.POST, 'https://accounts.pixiv.net/login', request_headers = headers, body = body, return_cookies = True )
# _ only given to logged in php sessions
if 'PHPSESSID' in cookies and '_' in cookies[ 'PHPSESSID' ]: self._status.SetLabelText( 'OK!' )
else: self._status.SetLabelText( 'Did not work!' )
# actually, it needs an _ in it to be a logged in session
# posting to the old login form gives you a 301 and a session without an underscore
if 'PHPSESSID' in cookies:
self._status.SetLabelText( 'OK!' )
else:
self._status.SetLabelText( 'Did not work!' )
wx.CallLater( 2000, self._status.SetLabel, '' )

View File

@ -50,6 +50,7 @@ MANAGEMENT_TYPE_IMPORT_THREAD_WATCHER = 4
MANAGEMENT_TYPE_PETITIONS = 5
MANAGEMENT_TYPE_QUERY = 6
MANAGEMENT_TYPE_IMPORT_URLS = 7
MANAGEMENT_TYPE_DUPLICATE_FILTER = 8
management_panel_types_to_classes = {}
@ -57,19 +58,26 @@ def CreateManagementController( management_type, file_service_key = None ):
if file_service_key is None:
file_service_key = CC.LOCAL_FILE_SERVICE_KEY
file_service_key = CC.COMBINED_LOCAL_FILE_SERVICE_KEY
management_controller = ManagementController()
# sort
# collect
# nah, these are only valid for types with regular file lists
management_controller.SetType( management_type )
management_controller.SetKey( 'file_service', file_service_key )
return management_controller
def CreateManagementControllerDuplicateFilter():
management_controller = CreateManagementController( MANAGEMENT_TYPE_DUPLICATE_FILTER )
return management_controller
def CreateManagementControllerImportGallery( gallery_identifier ):
management_controller = CreateManagementController( MANAGEMENT_TYPE_IMPORT_GALLERY )
@ -1431,6 +1439,208 @@ class ManagementPanelDumper( ManagementPanel ):
management_panel_types_to_classes[ MANAGEMENT_TYPE_DUMPER ] = ManagementPanelDumper
'''
class ManagementPanelDuplicateFilter( ManagementPanel ):
def __init__( self, parent, page, controller, management_controller ):
ManagementPanel.__init__( self, parent, page, controller, management_controller )
self._preparing_panel = ClientGUICommon.StaticBox( self, 'preparation' )
# refresh button that just calls update
self._total_files = wx.StaticText( self._preparing_panel )
self._num_phashes_to_regen = wx.StaticText( self._preparing_panel )
self._num_branches_to_regen = wx.StaticText( self._preparing_panel )
self._phashes_button = wx.BitmapButton( self._preparing_panel, bitmap = CC.GlobalBMPs.play )
self._branches_button = wx.BitmapButton( self._preparing_panel, bitmap = CC.GlobalBMPs.play )
#
self._searching_panel = ClientGUICommon.StaticBox( self, 'discovery' )
menu_items = []
menu_items.append( ( 'exact match', 'Search for exact matches.', self._SetSearchDistanceExact ) )
menu_items.append( ( 'very similar', 'Search for very similar files.', self._SetSearchDistanceVerySimilar ) )
menu_items.append( ( 'similar', 'Search for similar files.', self._SetSearchDistanceSimilar ) )
menu_items.append( ( 'speculative', 'Search for files that are probably similar.', self._SetSearchDistanceSpeculative ) )
self._search_distance_button = ClientGUICommon.MenuButton( self._searching_panel, 'similarity', menu_items )
self._search_distance_spinctrl = wx.SpinCtrl( self._searching_panel, min = 0, max = 64, size = ( 50, -1 ) )
self._num_searched = ClientGUICommon.TextAndGauge( self._searching_panel )
self._search_button = wx.BitmapButton( self._searching_panel, bitmap = CC.GlobalBMPs.play )
#
self._filtering_panel = ClientGUICommon.StaticBox( self, 'filtering' )
self._num_unknown_duplicates = wx.StaticText( self._filtering_panel )
self._num_same_file_duplicates = wx.StaticText( self._filtering_panel )
self._num_alternate_duplicates = wx.StaticText( self._filtering_panel )
# bind spinctrl (which should throw another update on every shift, as well
# bind the buttons (nah, replace with betterbitmapbutton)
#
# initialise value of spinctrl, label of distance button (which might be 'custom')
gridbox_1 = wx.FlexGridSizer( 0, 2 )
gridbox_1.AddGrowableCol( 0, 1 )
gridbox_1.AddF( self._num_phashes_to_regen, CC.FLAGS_EXPAND_PERPENDICULAR )
gridbox_1.AddF( self._phashes_button, CC.FLAGS_VCENTER )
gridbox_1.AddF( self._num_branches_to_regen, CC.FLAGS_EXPAND_PERPENDICULAR )
gridbox_1.AddF( self._branches_button, CC.FLAGS_VCENTER )
self._preparing_panel.AddF( self._total_files, CC.FLAGS_EXPAND_PERPENDICULAR )
self._preparing_panel.AddF( gridbox_1, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
distance_hbox = wx.BoxSizer( wx.HORIZONTAL )
distance_hbox.AddF( wx.StaticText( self._searching_panel, label = 'search similarity: ' ), CC.FLAGS_VCENTER )
distance_hbox.AddF( self._search_distance_button, CC.FLAGS_EXPAND_BOTH_WAYS )
distance_hbox.AddF( self._search_distance_spinctrl, CC.FLAGS_VCENTER )
gridbox_2 = wx.FlexGridSizer( 0, 2 )
gridbox_2.AddGrowableCol( 0, 1 )
gridbox_2.AddF( self._num_searched, CC.FLAGS_EXPAND_PERPENDICULAR )
gridbox_2.AddF( self._search_button, CC.FLAGS_VCENTER )
self._searching_panel.AddF( distance_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._searching_panel.AddF( gridbox_2, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
self._filtering_panel.AddF( self._num_unknown_duplicates, CC.FLAGS_EXPAND_PERPENDICULAR )
self._filtering_panel.AddF( self._num_same_file_duplicates, CC.FLAGS_EXPAND_PERPENDICULAR )
self._filtering_panel.AddF( self._num_alternate_duplicates, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( self._preparing_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._searching_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._filtering_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
#
self._RefreshAndUpdate()
def _SetSearchDistance( self, value ):
self._search_distance_spinctrl.SetValue( value ) # does this trigger the update event? check it
# update the label, which prob needs an HC.hamming_str dict or something, which I can then apply everywhere else as well.
def _SetSearchDistanceExact( self ):
self._SetSearchDistance( HC.HAMMING_EXACT_MATCH )
def _SetSearchDistanceSimilar( self ):
self._SetSearchDistance( HC.HAMMING_SIMILAR )
def _SetSearchDistanceSpeculative( self ):
self._SetSearchDistance( HC.HAMMING_SPECULATIVE )
def _SetSearchDistanceVerySimilar( self ):
self._SetSearchDistance( HC.HAMMING_VERY_SIMILAR )
def _RefreshAndUpdate( self ):
self._similar_files_maintenance_status = self._controller.Read( 'similar_files_maintenance_status' )
self._Update()
def _Update( self ):
( searched_distances_to_count, duplicate_types_to_count, num_phashes_to_regen, num_branches_to_regen ) = self._similar_files_maintenance_status
if num_phashes_to_regen == 0:
self._num_phashes_to_regen.SetLabelText( 'All files ready!' )
self._phashes_button.Disable()
else:
self._num_phashes_to_regen.SetLabelText( HydrusData.ConvertIntToPrettyString( num_phashes_to_regen ) + ' files to reanalyze.' )
self._phashes_button.Enable()
if num_branches_to_regen == 0:
self._num_branches_to_regen.SetLabelText( 'Search tree is fast!' )
self._branches_button.Disable()
else:
self._num_branches_to_regen.SetLabelText( HydrusData.ConvertIntToPrettyString( num_branches_to_regen ) + ' search branches to rebalance.' )
self._branches_button.Enable()
total_num_files = sum( searched_distances_to_count.values() )
self._total_files.SetLabelText( HydrusData.ConvertIntToPrettyString( total_num_files ) + ' eligable files.' )
search_distance = self._search_distance_spinctrl.GetValue()
num_searched = sum( ( count for ( value, count ) in searched_distances_to_count.items() if value is not None and value >= search_distance ) )
if num_searched == total_num_files:
self._num_searched.SetValue( 'All potential duplicates found.', total_num_files, total_num_files )
self._search_button.Disable()
else:
if num_searched == 0:
self._num_searched.SetValue( 'Have not yet searched at that distance.', 0, total_num_files )
else:
self._num_searched.SetValue( 'Searched ' + HydrusData.ConvertValueRangeToPrettyString( num_searched, total_num_files ) + ' files.', num_searched, total_num_files )
self._search_button.Enable()
self._num_unknown_duplicates.SetLabelText( HydrusData.ConvertIntToPrettyString( duplicate_types_to_count[ HC.DUPLICATE_UNKNOWN ] ) + ' potential duplicates found.' )
self._num_same_file_duplicates.SetLabelText( HydrusData.ConvertIntToPrettyString( duplicate_types_to_count[ HC.DUPLICATE_SAME_FILE ] ) + ' same file pairs filtered.' )
self._num_alternate_duplicates.SetLabelText( HydrusData.ConvertIntToPrettyString( duplicate_types_to_count[ HC.DUPLICATE_ALTERNATE ] ) + ' alternate file pairs filtered.' )
management_panel_types_to_classes[ MANAGEMENT_TYPE_DUPLICATE_FILTER ] = ManagementPanelDuplicateFilter
class ManagementPanelGalleryImport( ManagementPanel ):
def __init__( self, parent, page, controller, management_controller ):

View File

@ -2804,7 +2804,7 @@ class MediaPanelThumbnails( MediaPanel ):
if selection_has_local_file_domain:
filter_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'filter' ), 'archive/delete' )
ClientGUIMenus.AppendMenuItem( filter_menu, 'archive/delete', 'Launch a special media viewer that will quickly archive (left-click) and delete (right-click) the selected media.', self, self._Filter )
shortcut_names = HydrusGlobals.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_SHORTCUTS )
@ -2813,41 +2813,48 @@ class MediaPanelThumbnails( MediaPanel ):
custom_shortcuts_menu = wx.Menu()
custom_shortcuts_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'custom_filter' ), 'manage custom filters' )
ClientGUIMenus.AppendMenuItem( custom_shortcuts_menu, 'manage', 'Manage your different custom filters and their shortcuts.', self, self._CustomFilter )
custom_shortcuts_menu.AppendSeparator()
for shortcut_name in shortcut_names:
custom_shortcuts_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'custom_filter', shortcut_name ), shortcut_name )
ClientGUIMenus.AppendMenuItem( custom_shortcuts_menu, shortcut_name, 'Open the ' + shortcut_name + ' custom filter.', self, self._CustomFilter, shortcut_name )
filter_menu.AppendMenu( CC.ID_NULL, 'custom filter', custom_shortcuts_menu )
ClientGUIMenus.AppendMenu( filter_menu, custom_shortcuts_menu, 'custom filters' )
else:
filter_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'custom_filter' ), 'custom filter' )
ClientGUIMenus.AppendMenuItem( filter_menu, 'create a custom filter', 'Create a custom filter that uses non-default shortcuts.', self, self._CustomFilter )
menu.AppendMenu( CC.ID_NULL, 'filter', filter_menu )
ClientGUIMenus.AppendMenu( menu, filter_menu, 'filter' )
menu.AppendSeparator()
if selection_has_inbox: menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'archive' ), archive_phrase )
if selection_has_archive: menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'inbox' ), inbox_phrase )
if selection_has_inbox:
ClientGUIMenus.AppendMenuItem( menu, archive_phrase, 'Archive the selected files.', self, self._Archive )
menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'remove' ), remove_phrase )
if selection_has_archive:
ClientGUIMenus.AppendMenuItem( menu, inbox_phrase, 'Put the selected files back in the inbox.', self, self._Inbox )
ClientGUIMenus.AppendMenuItem( menu, remove_phrase, 'Remove the selected files from the current view.', self, self._Remove )
if selection_has_local_file_domain:
menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'delete', CC.LOCAL_FILE_SERVICE_KEY ), local_delete_phrase )
ClientGUIMenus.AppendMenuItem( menu, local_delete_phrase, 'Delete the selected files from \'my files\'.', self, self._Delete, CC.LOCAL_FILE_SERVICE_KEY )
if selection_has_trash:
menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'delete', CC.TRASH_SERVICE_KEY ), trash_delete_phrase )
menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'undelete' ), undelete_phrase )
ClientGUIMenus.AppendMenuItem( menu, trash_delete_phrase, 'Delete the selected files from the trash, forcing an immediate physical delete from your hard drive.', self, self._Delete, CC.TRASH_SERVICE_KEY )
ClientGUIMenus.AppendMenuItem( menu, undelete_phrase, 'Restore the selected files back to \'my files\'.', self, self._Undelete )
# share
@ -2856,7 +2863,7 @@ class MediaPanelThumbnails( MediaPanel ):
if selection_has_local:
menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'open_externally' ), '&open externally' )
ClientGUIMenus.AppendMenuItem( menu, 'open externally', 'Launch this file with your OS\'s default program for it.', self, self._OpenExternally )
share_menu = wx.Menu()
@ -3026,10 +3033,10 @@ class MediaPanelThumbnails( MediaPanel ):
similar_menu = wx.Menu()
ClientGUIMenus.AppendMenuItem( similar_menu, 'exact match', 'Search the database for files that look precisely like this one.', self, self._GetSimilarTo, 0 )
ClientGUIMenus.AppendMenuItem( similar_menu, 'very similar', 'Search the database for files that look just like this one.', self, self._GetSimilarTo, 2 )
ClientGUIMenus.AppendMenuItem( similar_menu, 'similar', 'Search the database for files that look generally like this one.', self, self._GetSimilarTo, 4 )
ClientGUIMenus.AppendMenuItem( similar_menu, 'speculative', 'Search the database for files that probably look like this one. This is sometimes useful for symbols with sharp edges or lines.', self, self._GetSimilarTo, 8 )
ClientGUIMenus.AppendMenuItem( similar_menu, 'exact match', 'Search the database for files that look precisely like this one.', self, self._GetSimilarTo, HC.HAMMING_EXACT_MATCH )
ClientGUIMenus.AppendMenuItem( similar_menu, 'very similar', 'Search the database for files that look just like this one.', self, self._GetSimilarTo, HC.HAMMING_VERY_SIMILAR )
ClientGUIMenus.AppendMenuItem( similar_menu, 'similar', 'Search the database for files that look generally like this one.', self, self._GetSimilarTo, HC.HAMMING_SIMILAR )
ClientGUIMenus.AppendMenuItem( similar_menu, 'speculative', 'Search the database for files that probably look like this one. This is sometimes useful for symbols with sharp edges or lines.', self, self._GetSimilarTo, HC.HAMMING_SPECULATIVE )
ClientGUIMenus.AppendMenu( menu, similar_menu, 'find similar files' )

View File

@ -204,7 +204,7 @@ class OptionsPanelImportFiles( OptionsPanel ):
self._auto_archive.Bind( wx.EVT_CHECKBOX, self.EventChanged )
self._auto_archive.SetToolTipString( 'If this is set, all successful imports will be automatically archived rather than sent to the inbox.' )
self._exclude_deleted = wx.CheckBox( self, label = 'exclude already deleted files' )
self._exclude_deleted = wx.CheckBox( self, label = 'exclude previously deleted files' )
self._exclude_deleted.Bind( wx.EVT_CHECKBOX, self.EventChanged )
self._exclude_deleted.SetToolTipString( 'If this is set and an incoming file has already been seen and deleted before by this client, the import will be abandoned. This is useful to make sure you do not keep importing and deleting the same bad files over and over. Files currently in the trash count as deleted.' )

View File

@ -460,7 +460,7 @@ class EditNodes( wx.Panel ):
to_export = HydrusSerialisable.SerialisableList()
for node in self._nodes.GetSelectedClientData():
for node in self._nodes.GetObjects( only_selected = True ):
to_export.append( node )
@ -568,7 +568,7 @@ class EditNodes( wx.Panel ):
def Duplicate( self ):
nodes_to_dupe = self._nodes.GetSelectedClientData()
nodes_to_dupe = self._nodes.GetObjects( only_selected = True )
for node in nodes_to_dupe:
@ -584,7 +584,7 @@ class EditNodes( wx.Panel ):
for i in self._nodes.GetAllSelected():
node = self._nodes.GetClientData( i )
node = self._nodes.GetObject( i )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit node' ) as dlg:
@ -619,7 +619,7 @@ class EditNodes( wx.Panel ):
def GetValue( self ):
return self._nodes.GetClientData()
return self._nodes.GetObjects()
def Paste( self ):
@ -1629,7 +1629,7 @@ class ManageParsingScriptsPanel( ClientGUIScrolledPanels.ManagePanel ):
to_export = HydrusSerialisable.SerialisableList()
for script in self._scripts.GetSelectedClientData():
for script in self._scripts.GetObjects( only_selected = True ):
to_export.append( script )
@ -1719,7 +1719,7 @@ class ManageParsingScriptsPanel( ClientGUIScrolledPanels.ManagePanel ):
def CommitChanges( self ):
scripts = self._scripts.GetClientData()
scripts = self._scripts.GetObjects()
HydrusGlobals.client_controller.Write( 'serialisables_overwrite', self.SCRIPT_TYPES, scripts )
@ -1731,7 +1731,7 @@ class ManageParsingScriptsPanel( ClientGUIScrolledPanels.ManagePanel ):
def Duplicate( self ):
scripts_to_dupe = self._scripts.GetSelectedClientData()
scripts_to_dupe = self._scripts.GetObjects( only_selected = True )
for script in scripts_to_dupe:
@ -1749,7 +1749,7 @@ class ManageParsingScriptsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._scripts.GetAllSelected():
script = self._scripts.GetClientData( i )
script = self._scripts.GetObject( i )
if isinstance( script, ClientParsing.ParseRootFileLookup ):

View File

@ -1371,7 +1371,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._media_viewer_options.GetAllSelected():
data = self._media_viewer_options.GetClientData( i )
data = self._media_viewer_options.GetObject( i )
title = 'set media view options information'
@ -1436,7 +1436,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
HydrusData.ShowText( 'Could not parse those zooms, so they were not saved!' )
for data in self._media_viewer_options.GetClientData():
for data in self._media_viewer_options.GetObjects():
data = list( data )
@ -2489,7 +2489,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
to_export = HydrusSerialisable.SerialisableList()
for subscription in self._subscriptions.GetSelectedClientData():
for subscription in self._subscriptions.GetObjects( only_selected = True ):
to_export.append( subscription )
@ -2563,7 +2563,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._subscriptions.GetAllSelected():
subscription = self._subscriptions.GetClientData( i )
subscription = self._subscriptions.GetObject( i )
subscription.CheckNow()
@ -2575,7 +2575,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
def CommitChanges( self ):
subscriptions = self._subscriptions.GetClientData()
subscriptions = self._subscriptions.GetObjects()
HydrusGlobals.client_controller.Write( 'serialisables_overwrite', [ HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION ], subscriptions )
@ -2591,7 +2591,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
subs_to_dupe = []
for subscription in self._subscriptions.GetSelectedClientData():
for subscription in self._subscriptions.GetObjects( only_selected = True ):
subs_to_dupe.append( subscription )
@ -2612,7 +2612,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._subscriptions.GetAllSelected():
subscription = self._subscriptions.GetClientData( i )
subscription = self._subscriptions.GetObject( i )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit subscription' ) as dlg:
@ -2735,7 +2735,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._subscriptions.GetAllSelected():
subscription = self._subscriptions.GetClientData( i )
subscription = self._subscriptions.GetObject( i )
subscription.PauseResume()
@ -2755,7 +2755,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._subscriptions.GetAllSelected():
subscription = self._subscriptions.GetClientData( i )
subscription = self._subscriptions.GetObject( i )
subscription.Reset()
@ -2771,7 +2771,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
for i in self._subscriptions.GetAllSelected():
subscription = self._subscriptions.GetClientData( i )
subscription = self._subscriptions.GetObject( i )
seed_cache = subscription.GetSeedCache()

View File

@ -279,7 +279,7 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ):
if self._service_key == CC.COMBINED_LOCAL_FILE_SERVICE_KEY:
self._delete_local_deleted = wx.Button( self, label = 'clear deleted file record' )
self._delete_local_deleted.SetToolTipString( 'Make the client forget which files it has deleted from local files, resetting all the \'exclude already deleted files\' checks.' )
self._delete_local_deleted.SetToolTipString( 'Command the client to forget which files it has deleted, resetting all the \'exclude previously deleted files\' checks.' )
self._delete_local_deleted.Bind( wx.EVT_BUTTON, self.EventDeleteLocalDeleted )
@ -829,7 +829,7 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ):
def EventDeleteLocalDeleted( self, event ):
message = 'This will clear the client\'s memory of which files it has locally deleted, which affects \'exclude already deleted files\' import tests.'
message = 'This will clear the client\'s memory of which files it has locally deleted, which affects \'exclude previously deleted files\' import tests.'
message += os.linesep * 2
message += 'It will freeze the gui while it works.'
message += os.linesep * 2

View File

@ -1933,7 +1933,7 @@ class SeedCache( HydrusSerialisable.SerialisableBase ):
if num_successful > 0: status_strings.append( str( num_successful ) + ' successful' )
if num_failed > 0: status_strings.append( str( num_failed ) + ' failed' )
if num_deleted > 0: status_strings.append( str( num_deleted ) + ' already deleted' )
if num_deleted > 0: status_strings.append( str( num_deleted ) + ' previously deleted' )
if num_redundant > 0: status_strings.append( str( num_redundant ) + ' already in db' )
status = ', '.join( status_strings )

View File

@ -586,7 +586,18 @@ class HTTPConnection( object ):
except socket.error as e:
if e.errno == errno.WSAEACCES:
if HC.PLATFORM_WINDOWS:
access_errors = [ errno.EACCES, errno.WSAEACCES ]
connection_reset_errors = [ errno.ECONNRESET, errno.WSAECONNRESET ]
else:
access_errors = [ errno.EACCES ]
connection_reset_errors = [ errno.ECONNRESET ]
if e.errno in access_errors:
text = 'The hydrus client did not have permission to make a connection to ' + HydrusData.ToUnicode( self._host )
@ -599,7 +610,7 @@ class HTTPConnection( object ):
raise HydrusExceptions.FirewallException( text )
elif e.errno == errno.WSAECONNRESET:
elif e.errno in connection_reset_errors:
time.sleep( 5 )
@ -671,7 +682,16 @@ class HTTPConnection( object ):
except socket.error as e:
if e.errno == errno.WSAECONNRESET:
if HC.PLATFORM_WINDOWS:
connection_reset_errors = [ errno.ECONNRESET, errno.WSAECONNRESET ]
else:
connection_reset_errors = [ errno.ECONNRESET ]
if e.errno in connection_reset_errors:
raise recoverable_exc( 'Connection reset by remote host.' )

View File

@ -46,7 +46,7 @@ options = {}
# Misc
NETWORK_VERSION = 17
SOFTWARE_VERSION = 239
SOFTWARE_VERSION = 240
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -100,6 +100,11 @@ content_update_string_lookup[ CONTENT_UPDATE_DENY_PEND ] = 'deny pend'
content_update_string_lookup[ CONTENT_UPDATE_DENY_PETITION ] = 'deny petition'
content_update_string_lookup[ CONTENT_UPDATE_UNDELETE ] = 'undelete'
DUPLICATE_UNKNOWN = 0
DUPLICATE_NOT_DUPLICATE = 1
DUPLICATE_SAME_FILE = 2
DUPLICATE_ALTERNATE = 3
ENCODING_RAW = 0
ENCODING_HEX = 1
ENCODING_BASE64 = 2
@ -116,6 +121,11 @@ IMPORT_FOLDER_TYPE_SYNCHRONISE = 1
EXPORT_FOLDER_TYPE_REGULAR = 0
EXPORT_FOLDER_TYPE_SYNCHRONISE = 1
HAMMING_EXACT_MATCH = 0
HAMMING_VERY_SIMILAR = 2
HAMMING_SIMILAR = 4
HAMMING_SPECULATIVE = 8
HYDRUS_CLIENT = 0
HYDRUS_SERVER = 1
HYDRUS_TEST = 2

View File

@ -5,6 +5,7 @@ import HydrusConstants as HC
import OpenSSL
import os
import socket
import stat
import traceback
AES_KEY_LENGTH = 32
@ -163,6 +164,8 @@ def GenerateOpenSSLCertAndKeyFile( cert_path, key_path ):
f.write( cert_text )
os.chmod( cert_path, stat.S_IREAD )
key_text = OpenSSL.crypto.dump_privatekey( OpenSSL.crypto.FILETYPE_PEM, key )
with open( key_path, 'wt' ) as f:
@ -170,6 +173,8 @@ def GenerateOpenSSLCertAndKeyFile( cert_path, key_path ):
f.write( key_text )
os.chmod( key_path, stat.S_IREAD )
def GenerateRSAKeyPair():
private_key = Crypto.PublicKey.RSA.generate( 2048 )

View File

@ -18,6 +18,7 @@ import random
import ServerFiles
import shutil
import sqlite3
import stat
import sys
import threading
import time
@ -2330,56 +2331,6 @@ class DB( HydrusDB.HydrusDB ):
HydrusData.Print( 'The server is updating to version ' + str( version + 1 ) )
if version == 182:
HydrusData.Print( 'generating swf thumbnails' )
mimes = { HC.APPLICATION_FLASH }
mimes.update( HC.VIDEO )
hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE mime IN ' + HydrusData.SplayListForDB( mimes ) + ';' ) }
for hash_id in hash_ids:
hash = self._GetHash( hash_id )
try:
file_path = ServerFiles.GetFilePath( hash )
except HydrusExceptions.NotFoundException:
continue
thumbnail = HydrusFileHandling.GenerateThumbnail( file_path )
thumbnail_path = ServerFiles.GetExpectedThumbnailPath( hash )
with open( thumbnail_path, 'wb' ) as f:
f.write( thumbnail )
if version == 184:
result = self._c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( '', ) ).fetchone()
if result is not None:
( tag_id, ) = result
self._c.execute( 'DELETE FROM mappings WHERE tag_id = ?;', ( tag_id, ) )
if version == 188:
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, timestamp INTEGER );' )
if version == 198:
HydrusData.Print( 'exporting mappings to external db' )
@ -2598,6 +2549,12 @@ class DB( HydrusDB.HydrusDB ):
HydrusEncryption.GenerateOpenSSLCertAndKeyFile( self._ssl_cert_path, self._ssl_key_path )
if version == 239:
os.chmod( self._ssl_cert_path, stat.S_IREAD )
os.chmod( self._ssl_key_path, stat.S_IREAD )
HydrusData.Print( 'The server has updated to version ' + str( version + 1 ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )

View File

@ -1196,6 +1196,10 @@ class TestServerDB( unittest.TestCase ):
time.sleep( 0.1 )
# so they can be deleted later on no prob
os.chmod( self._db._ssl_cert_path, stat.S_IREAD | stat.S_IWRITE )
os.chmod( self._db._ssl_key_path, stat.S_IREAD | stat.S_IWRITE )
del self._db