hydrus/hydrus/server/ServerDB.py

3511 lines
161 KiB
Python
Raw Normal View History

2019-12-18 22:06:34 +00:00
import collections
2013-02-19 00:11:43 +00:00
import hashlib
2021-04-07 21:26:45 +00:00
import json
2013-02-19 00:11:43 +00:00
import os
2013-06-12 22:53:31 +00:00
import random
2013-02-19 00:11:43 +00:00
import sqlite3
import sys
import time
import traceback
2021-04-07 21:26:45 +00:00
import typing
2020-07-29 20:52:44 +00:00
from hydrus.core import HydrusConstants as HC
2020-04-22 21:00:35 +00:00
from hydrus.core import HydrusData
2020-07-29 20:52:44 +00:00
from hydrus.core import HydrusDB
from hydrus.core import HydrusExceptions
2020-04-22 21:00:35 +00:00
from hydrus.core import HydrusGlobals as HG
2020-07-29 20:52:44 +00:00
from hydrus.core import HydrusPaths
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusTags
2021-04-07 21:26:45 +00:00
from hydrus.core.networking import HydrusNetwork
2020-07-29 20:52:44 +00:00
from hydrus.server import ServerFiles
2017-03-02 02:14:56 +00:00
def GenerateRepositoryMasterMapTableNames( service_id ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
suffix = str( service_id )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
hash_id_map_table_name = 'external_master.repository_hash_id_map_' + suffix
tag_id_map_table_name = 'external_master.repository_tag_id_map_' + suffix
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return ( hash_id_map_table_name, tag_id_map_table_name )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def GenerateRepositoryFilesTableNames( service_id ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
suffix = str( service_id )
current_files_table_name = 'current_files_' + suffix
deleted_files_table_name = 'deleted_files_' + suffix
pending_files_table_name = 'pending_files_' + suffix
petitioned_files_table_name = 'petitioned_files_' + suffix
ip_addresses_table_name = 'ip_addresses_' + suffix
return ( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name )
def GenerateRepositoryMappingsTableNames( service_id ):
suffix = str( service_id )
current_mappings_table_name = 'external_mappings.current_mappings_' + suffix
deleted_mappings_table_name = 'external_mappings.deleted_mappings_' + suffix
pending_mappings_table_name = 'external_mappings.pending_mappings_' + suffix
petitioned_mappings_table_name = 'external_mappings.petitioned_mappings_' + suffix
return ( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name )
def GenerateRepositoryTagParentsTableNames( service_id ):
suffix = str( service_id )
current_tag_parents_table_name = 'current_tag_parents_' + suffix
deleted_tag_parents_table_name = 'deleted_tag_parents_' + suffix
pending_tag_parents_table_name = 'pending_tag_parents_' + suffix
petitioned_tag_parents_table_name = 'petitioned_tag_parents_' + suffix
return ( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name )
def GenerateRepositoryTagSiblingsTableNames( service_id ):
suffix = str( service_id )
current_tag_siblings_table_name = 'current_tag_siblings_' + suffix
deleted_tag_siblings_table_name = 'deleted_tag_siblings_' + suffix
pending_tag_siblings_table_name = 'pending_tag_siblings_' + suffix
petitioned_tag_siblings_table_name = 'petitioned_tag_siblings_' + suffix
return ( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name )
def GenerateRepositoryUpdateTableName( service_id ):
return 'updates_' + str( service_id )
2015-03-04 22:44:32 +00:00
2015-04-22 22:57:25 +00:00
class DB( HydrusDB.HydrusDB ):
2017-03-02 02:14:56 +00:00
READ_WRITE_ACTIONS = [ 'access_key', 'immediate_content_update', 'registration_keys' ]
2015-03-04 22:44:32 +00:00
2021-02-24 22:35:18 +00:00
TRANSACTION_COMMIT_PERIOD = 120
2017-11-01 20:37:39 +00:00
2019-03-20 21:22:10 +00:00
def __init__( self, controller, db_dir, db_name ):
2016-10-12 21:52:50 +00:00
self._files_dir = os.path.join( db_dir, 'server_files' )
2021-04-07 21:26:45 +00:00
self._write_commands_to_methods = {
'account_types' : self._ModifyAccountTypes,
'analyze' : self._Analyze,
'backup' : self._Backup,
'create_update' : self._RepositoryCreateUpdate,
'delete_orphans' : self._DeleteOrphans,
'dirty_accounts' : self._SaveDirtyAccounts,
'dirty_services' : self._SaveDirtyServices,
'file' : self._RepositoryProcessAddFile,
'modify_account_account_type' : self._ModifyAccountAccountType,
'modify_account_ban' : self._ModifyAccountBan,
'modify_account_expires' : self._ModifyAccountExpires,
'modify_account_set_message' : self._ModifyAccountSetMessage,
'modify_account_unban' : self._ModifyAccountUnban,
'services' : self._ModifyServices,
'session' : self._AddSession,
'update' : self._RepositoryProcessClientToServerUpdate,
'vacuum' : self._Vacuum
}
self._service_ids_to_account_type_ids = collections.defaultdict( set )
self._account_type_ids_to_account_types = {}
self._service_ids_to_account_type_keys_to_account_type_ids = collections.defaultdict( dict )
2017-01-04 22:48:23 +00:00
2021-04-07 21:26:45 +00:00
HydrusDB.HydrusDB.__init__( self, controller, db_dir, db_name )
2017-03-02 02:14:56 +00:00
2016-10-12 21:52:50 +00:00
2021-04-07 21:26:45 +00:00
def _AddAccountType( self, service_id, account_type: HydrusNetwork.AccountType ):
2013-05-01 17:21:53 +00:00
2021-04-07 21:26:45 +00:00
# this does not update the cache. a parent caller has the responsibility
2013-05-01 17:21:53 +00:00
2021-04-07 21:26:45 +00:00
dump = account_type.DumpToString()
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'INSERT INTO account_types ( service_id, dump ) VALUES ( ?, ? );', ( service_id, dump ) )
2017-03-02 02:14:56 +00:00
account_type_id = self._c.lastrowid
return account_type_id
def _AddFile( self, file_dict ):
2013-02-19 00:11:43 +00:00
hash = file_dict[ 'hash' ]
2017-03-02 02:14:56 +00:00
master_hash_id = self._GetMasterHashId( hash )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT 1 FROM files_info WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2013-02-19 00:11:43 +00:00
size = file_dict[ 'size' ]
mime = file_dict[ 'mime' ]
if 'width' in file_dict: width = file_dict[ 'width' ]
else: width = None
if 'height' in file_dict: height = file_dict[ 'height' ]
else: height = None
if 'duration' in file_dict: duration = file_dict[ 'duration' ]
else: duration = None
if 'num_frames' in file_dict: num_frames = file_dict[ 'num_frames' ]
else: num_frames = None
if 'num_words' in file_dict: num_words = file_dict[ 'num_words' ]
else: num_words = None
2013-08-07 22:25:18 +00:00
source_path = file_dict[ 'path' ]
2016-06-08 20:27:22 +00:00
dest_path = ServerFiles.GetExpectedFilePath( hash )
2013-02-19 00:11:43 +00:00
2016-06-15 18:59:44 +00:00
HydrusPaths.MirrorFile( source_path, dest_path )
2013-02-19 00:11:43 +00:00
if 'thumbnail' in file_dict:
2016-06-08 20:27:22 +00:00
thumbnail_dest_path = ServerFiles.GetExpectedThumbnailPath( hash )
2013-02-19 00:11:43 +00:00
2019-03-27 22:01:02 +00:00
thumbnail_bytes = file_dict[ 'thumbnail' ]
2015-03-25 22:04:19 +00:00
2015-11-18 22:44:07 +00:00
with open( thumbnail_dest_path, 'wb' ) as f:
2019-03-27 22:01:02 +00:00
f.write( thumbnail_bytes )
2015-11-18 22:44:07 +00:00
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'INSERT OR IGNORE INTO files_info ( master_hash_id, size, mime, width, height, duration, num_frames, num_words ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ? );', ( master_hash_id, size, mime, width, height, duration, num_frames, num_words ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return master_hash_id
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _AddService( self, service ):
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
( service_key, service_type, name, port, dictionary ) = service.ToTuple()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
dictionary_string = dictionary.DumpToString()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'INSERT INTO services ( service_key, service_type, name, port, dictionary_string ) VALUES ( ?, ?, ?, ?, ? );', ( sqlite3.Binary( service_key ), service_type, name, port, dictionary_string ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._c.lastrowid
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_admin_account_type = HydrusNetwork.AccountType.GenerateAdminAccountType( service_type )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_admin_account_type_id = self._AddAccountType( service_id, service_admin_account_type )
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
self._RefreshAccountTypeCache()
2017-03-02 02:14:56 +00:00
if service_type == HC.SERVER_ADMIN:
2015-03-04 22:44:32 +00:00
2019-01-09 22:59:03 +00:00
force_registration_key = b'init'
2015-03-04 22:44:32 +00:00
else:
2017-03-02 02:14:56 +00:00
force_registration_key = None
2015-03-04 22:44:32 +00:00
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
[ registration_key ] = self._GenerateRegistrationKeys( service_id, 1, service_admin_account_type_id, None, force_registration_key )
2013-06-12 22:53:31 +00:00
2017-03-08 23:23:12 +00:00
access_key = self._GetAccessKey( service_key, registration_key )
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
if service_type in HC.REPOSITORIES:
self._RepositoryCreate( service_id )
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
return access_key
2013-06-12 22:53:31 +00:00
2015-03-04 22:44:32 +00:00
def _AddSession( self, session_key, service_key, account_key, expires ):
2013-06-12 22:53:31 +00:00
2015-03-04 22:44:32 +00:00
service_id = self._GetServiceId( service_key )
2013-06-12 22:53:31 +00:00
2015-03-04 22:44:32 +00:00
account_id = self._GetAccountId( account_key )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'INSERT INTO sessions ( session_key, service_id, account_id, expires ) VALUES ( ?, ?, ?, ? );', ( sqlite3.Binary( session_key ), service_id, account_id, expires ) )
2013-02-19 00:11:43 +00:00
2019-06-19 22:08:48 +00:00
def _Analyze( self, maintenance_mode = HC.MAINTENANCE_FORCED, stop_time = None ):
2016-04-20 20:42:21 +00:00
stale_time_delta = 30 * 86400
2016-01-06 21:17:20 +00:00
existing_names_to_timestamps = dict( self._c.execute( 'SELECT name, timestamp FROM analyze_timestamps;' ).fetchall() )
2019-09-05 00:05:32 +00:00
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp', 'durable_temp' ) ]
2016-04-27 19:20:37 +00:00
all_names = set()
for db_name in db_names:
2017-01-04 22:48:23 +00:00
all_names.update( ( name for ( name, ) in self._c.execute( 'SELECT name FROM ' + db_name + '.sqlite_master WHERE type = ?;', ( 'table', ) ) ) )
2016-04-27 19:20:37 +00:00
2016-07-20 19:57:10 +00:00
all_names.discard( 'sqlite_stat1' )
2016-01-06 21:17:20 +00:00
names_to_analyze = [ name for name in all_names if name not in existing_names_to_timestamps or HydrusData.TimeHasPassed( existing_names_to_timestamps[ name ] + stale_time_delta ) ]
random.shuffle( names_to_analyze )
if len( names_to_analyze ) > 0:
2020-04-01 21:51:42 +00:00
locked = HG.server_busy.acquire( False ) # pylint: disable=E1111
2016-01-06 21:17:20 +00:00
2019-10-09 22:03:03 +00:00
if not locked:
2016-04-20 20:42:21 +00:00
2019-10-09 22:03:03 +00:00
return
2016-04-20 20:42:21 +00:00
2016-01-06 21:17:20 +00:00
2019-10-09 22:03:03 +00:00
try:
2016-01-06 21:17:20 +00:00
2019-10-09 22:03:03 +00:00
for name in names_to_analyze:
started = HydrusData.GetNowPrecise()
self._c.execute( 'ANALYZE ' + name + ';' )
self._c.execute( 'DELETE FROM analyze_timestamps WHERE name = ?;', ( name, ) )
self._c.execute( 'INSERT OR IGNORE INTO analyze_timestamps ( name, timestamp ) VALUES ( ?, ? );', ( name, HydrusData.GetNow() ) )
time_took = HydrusData.GetNowPrecise() - started
if time_took > 1:
HydrusData.Print( 'Analyzed ' + name + ' in ' + HydrusData.TimeDeltaToPrettyTimeDelta( time_took ) )
if HG.server_controller.ShouldStopThisWork( maintenance_mode, stop_time = stop_time ):
break
self._c.execute( 'ANALYZE sqlite_master;' ) # this reloads the current stats into the query planner
finally:
HG.server_busy.release()
2016-01-06 21:17:20 +00:00
2019-10-09 22:03:03 +00:00
def _Backup( self ):
2016-04-14 01:54:29 +00:00
2020-04-01 21:51:42 +00:00
locked = HG.server_busy.acquire( False ) # pylint: disable=E1111
2016-04-14 01:54:29 +00:00
2019-10-09 22:03:03 +00:00
if not locked:
HydrusData.Print( 'Could not backup because the server was locked.' )
return
2017-03-02 02:14:56 +00:00
2016-04-14 01:54:29 +00:00
try:
2019-10-09 22:03:03 +00:00
self._CloseDBCursor()
2016-04-14 01:54:29 +00:00
2016-10-12 21:52:50 +00:00
backup_path = os.path.join( self._db_dir, 'server_backup' )
2016-04-14 01:54:29 +00:00
2016-08-17 20:07:22 +00:00
HydrusPaths.MakeSureDirectoryExists( backup_path )
2016-04-14 01:54:29 +00:00
2019-09-25 21:34:18 +00:00
for filename in self._db_filenames.values():
2016-04-14 01:54:29 +00:00
HydrusData.Print( 'backing up: copying ' + filename )
source = os.path.join( self._db_dir, filename )
dest = os.path.join( backup_path, filename )
2016-06-01 20:04:15 +00:00
HydrusPaths.MirrorFile( source, dest )
2016-04-14 01:54:29 +00:00
2017-01-04 22:48:23 +00:00
for filename in [ self._ssl_cert_filename, self._ssl_key_filename ]:
HydrusData.Print( 'backing up: copying ' + filename )
source = os.path.join( self._db_dir, filename )
dest = os.path.join( backup_path, filename )
HydrusPaths.MirrorFile( source, dest )
2016-04-14 01:54:29 +00:00
HydrusData.Print( 'backing up: copying files' )
2016-10-12 21:52:50 +00:00
HydrusPaths.MirrorTree( self._files_dir, os.path.join( backup_path, 'server_files' ) )
2016-04-14 01:54:29 +00:00
2019-10-09 22:03:03 +00:00
self._InitDBCursor()
2016-04-14 01:54:29 +00:00
2019-10-09 22:03:03 +00:00
HydrusData.Print( 'backing up: done!' )
2017-03-02 02:14:56 +00:00
2019-10-09 22:03:03 +00:00
finally:
HG.server_busy.release()
2016-04-14 01:54:29 +00:00
2016-03-30 22:56:50 +00:00
2017-03-02 02:14:56 +00:00
def _CreateDB( self ):
HydrusPaths.MakeSureDirectoryExists( self._files_dir )
for prefix in HydrusData.IterateHexPrefixes():
new_dir = os.path.join( self._files_dir, prefix )
HydrusPaths.MakeSureDirectoryExists( new_dir )
self._c.execute( 'CREATE TABLE services ( service_id INTEGER PRIMARY KEY, service_key BLOB_BYTES, service_type INTEGER, name TEXT, port INTEGER, dictionary_string TEXT );' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE accounts ( account_id INTEGER PRIMARY KEY, service_id INTEGER, account_key BLOB_BYTES, hashed_access_key BLOB_BYTES, account_type_id INTEGER, created INTEGER, expires INTEGER, dictionary_string TEXT );' )
self._c.execute( 'CREATE UNIQUE INDEX accounts_account_key_index ON accounts ( account_key );' )
self._c.execute( 'CREATE UNIQUE INDEX accounts_hashed_access_key_index ON accounts ( hashed_access_key );' )
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE account_scores ( service_id INTEGER, account_id INTEGER, score_type INTEGER, score INTEGER, PRIMARY KEY ( service_id, account_id, score_type ) );' )
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'CREATE TABLE account_types ( account_type_id INTEGER PRIMARY KEY, service_id INTEGER, dump TEXT );' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, timestamp INTEGER );' )
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE files_info ( master_hash_id INTEGER PRIMARY KEY, size INTEGER, mime INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER );' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE reasons ( reason_id INTEGER PRIMARY KEY, reason TEXT );' )
self._c.execute( 'CREATE UNIQUE INDEX reasons_reason_index ON reasons ( reason );' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE registration_keys ( registration_key BLOB_BYTES PRIMARY KEY, service_id INTEGER, account_type_id INTEGER, account_key BLOB_BYTES, access_key BLOB_BYTES UNIQUE, expires INTEGER );' )
self._c.execute( 'CREATE TABLE sessions ( session_key BLOB_BYTES, service_id INTEGER, account_id INTEGER, expires INTEGER );' )
self._c.execute( 'CREATE TABLE version ( version INTEGER, year INTEGER, month INTEGER );' )
# master
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( master_hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.tags ( master_tag_id INTEGER PRIMARY KEY, tag TEXT UNIQUE );' )
# inserts
2013-02-19 00:11:43 +00:00
2018-01-31 22:58:15 +00:00
current_time_struct = time.localtime()
2013-02-19 00:11:43 +00:00
2015-03-04 22:44:32 +00:00
( current_year, current_month ) = ( current_time_struct.tm_year, current_time_struct.tm_mon )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'INSERT INTO version ( version, year, month ) VALUES ( ?, ?, ? );', ( HC.SOFTWARE_VERSION, current_year, current_month ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
# set up server admin
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
admin_service = HydrusNetwork.GenerateService( HC.SERVER_ADMIN_KEY, HC.SERVER_ADMIN, 'server admin', HC.DEFAULT_SERVER_ADMIN_PORT )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._AddService( admin_service ) # this sets up the admin account and a registration key by itself
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _DeleteOrphans( self ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
# make a table for files
# make a table for thumbnails
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
# populate both tables with what you have in your hdd
# if the filename isn't even a hash, schedule it for immediate deletion instead
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
# delete from the tables based on what is in current and pending repo file tables
# delete from the file tables based on what is in update tables
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
# delete whatever is left
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
# might want to split this up into 256 jobs--depends on how fast its bits run
# might also want to set server_busy, if it isn't already
2013-06-12 22:53:31 +00:00
2017-03-02 02:14:56 +00:00
# also think about how often it runs--maybe only once a month is appropriate
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
return # return to this to fix it for new system
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
def _DeleteRepositoryPetitions( self, service_id, subject_account_ids ):
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
self._c.executemany( 'DELETE FROM ' + pending_files_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
self._c.executemany( 'DELETE FROM ' + petitioned_files_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
self._c.executemany( 'DELETE FROM ' + pending_mappings_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
self._c.executemany( 'DELETE FROM ' + petitioned_mappings_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
self._c.executemany( 'DELETE FROM ' + pending_tag_parents_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
self._c.executemany( 'DELETE FROM ' + petitioned_tag_parents_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
self._c.executemany( 'DELETE FROM ' + pending_tag_siblings_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
self._c.executemany( 'DELETE FROM ' + petitioned_tag_siblings_table_name + ' WHERE account_id = ?;', ( ( subject_account_id, ) for subject_account_id in subject_account_ids ) )
2017-03-02 02:14:56 +00:00
def _DeleteService( self, service_key ):
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
service_type = self._GetServiceType( service_id )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM services WHERE service_id = ?;', ( service_id, ) )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM accounts WHERE service_id = ?;', ( service_id, ) )
self._c.execute( 'DELETE FROM account_types WHERE service_id = ?;', ( service_id, ) )
self._c.execute( 'DELETE FROM account_scores WHERE service_id = ?;', ( service_id, ) )
self._c.execute( 'DELETE FROM registration_keys WHERE service_id = ?;', ( service_id, ) )
self._c.execute( 'DELETE FROM sessions WHERE service_id = ?;', ( service_id, ) )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
if service_type in HC.REPOSITORIES:
self._RepositoryDrop( service_id )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
def _GenerateRegistrationKeysFromAccount( self, service_key, account: HydrusNetwork.Account, num, account_type_key, expires ):
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
account_type_id = self._GetAccountTypeId( service_id, account_type_key )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
return self._GenerateRegistrationKeys( service_id, num, account_type_id, expires )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
def _GenerateRegistrationKeys( self, service_id, num, account_type_id, expires, force_registration_key = None ):
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
if force_registration_key is None:
keys = [ ( os.urandom( HC.HYDRUS_KEY_LENGTH ), os.urandom( HC.HYDRUS_KEY_LENGTH ), os.urandom( HC.HYDRUS_KEY_LENGTH ) ) for i in range( num ) ]
else:
keys = [ ( force_registration_key, os.urandom( HC.HYDRUS_KEY_LENGTH ), os.urandom( HC.HYDRUS_KEY_LENGTH ) ) for i in range( num ) ]
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
self._c.executemany( 'INSERT INTO registration_keys ( registration_key, service_id, account_type_id, account_key, access_key, expires ) VALUES ( ?, ?, ?, ?, ?, ? );', [ ( sqlite3.Binary( hashlib.sha256( registration_key ).digest() ), service_id, account_type_id, sqlite3.Binary( account_key ), sqlite3.Binary( access_key ), expires ) for ( registration_key, account_key, access_key ) in keys ] )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
return [ registration_key for ( registration_key, account_key, access_key ) in keys ]
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
2017-03-08 23:23:12 +00:00
def _GetAccessKey( self, service_key, registration_key ):
service_id = self._GetServiceId( service_key )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
# we generate a new access_key every time this is requested so that no one with access to the registration key can peek at the access_key before the legit user fetches it for real
# the reg_key is deleted when the last-requested access_key is used to create a session, which calls getaccountkeyfromaccesskey
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
registration_key_sha256 = hashlib.sha256( registration_key ).digest()
2016-04-20 20:42:21 +00:00
2017-03-08 23:23:12 +00:00
result = self._c.execute( 'SELECT 1 FROM registration_keys WHERE service_id = ? AND registration_key = ?;', ( service_id, sqlite3.Binary( registration_key_sha256 ) ) ).fetchone()
2016-06-01 20:04:15 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2019-02-06 22:41:35 +00:00
raise HydrusExceptions.InsufficientCredentialsException( 'The service could not find that registration key in its database.' )
2017-03-02 02:14:56 +00:00
2016-04-20 20:42:21 +00:00
2017-03-02 02:14:56 +00:00
new_access_key = os.urandom( HC.HYDRUS_KEY_LENGTH )
2016-04-20 20:42:21 +00:00
2017-03-08 23:23:12 +00:00
self._c.execute( 'UPDATE registration_keys SET access_key = ? WHERE service_id = ? AND registration_key = ?;', ( sqlite3.Binary( new_access_key ), service_id, sqlite3.Binary( registration_key_sha256 ) ) )
2016-04-20 20:42:21 +00:00
2017-03-02 02:14:56 +00:00
return new_access_key
2016-04-20 20:42:21 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
def _GetAccount( self, service_id, account_id ) -> HydrusNetwork.Account:
2016-04-20 20:42:21 +00:00
2017-03-02 02:14:56 +00:00
( account_key, account_type_id, created, expires, dictionary_string ) = self._c.execute( 'SELECT account_key, account_type_id, created, expires, dictionary_string FROM accounts WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone()
2016-04-20 20:42:21 +00:00
2021-04-07 21:26:45 +00:00
account_type = self._GetAccountType( service_id, account_type_id )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
dictionary = HydrusSerialisable.CreateFromString( dictionary_string )
2015-04-22 22:57:25 +00:00
2017-03-08 23:23:12 +00:00
return HydrusNetwork.Account.GenerateAccountFromTuple( ( account_key, account_type, created, expires, dictionary ) )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
def _GetAccountFromContent( self, service_key, content ):
2017-03-29 19:39:34 +00:00
service_id = self._GetServiceId( service_key )
2021-04-07 21:26:45 +00:00
service_type = self._GetServiceType( service_id )
2017-03-29 19:39:34 +00:00
content_type = content.GetContentType()
content_data = content.GetContentData()
if content_type == HC.CONTENT_TYPE_FILES:
2021-04-07 21:26:45 +00:00
if service_type != HC.FILE_REPOSITORY:
raise HydrusExceptions.NotFoundException( 'Only File Repositories support file account lookups!')
2017-03-29 19:39:34 +00:00
hash = content_data[0]
if not self._MasterHashExists( hash ):
raise HydrusExceptions.NotFoundException( 'The service could not find that hash in its database.' )
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
master_hash_id = self._GetMasterHashId( hash )
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
if not self._RepositoryServiceHashIdExists( service_id, master_hash_id ):
raise HydrusExceptions.NotFoundException( 'The service could not find that service hash in its database.' )
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
service_hash_id = self._RepositoryGetServiceHashId( service_id, master_hash_id, HydrusData.GetNow() )
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
result = self._c.execute( 'SELECT account_id FROM ' + current_files_table_name + ' WHERE service_hash_id = ?;', ( service_hash_id, ) ).fetchone()
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
if result is None:
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
result = self._c.execute( 'SELECT account_id FROM ' + deleted_files_table_name + ' WHERE service_hash_id = ?;', ( service_hash_id, ) ).fetchone()
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
if result is None:
raise HydrusExceptions.NotFoundException( 'The service could not find that hash in its database.' )
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
elif content_type == HC.CONTENT_TYPE_MAPPING:
2021-04-07 21:26:45 +00:00
if service_type != HC.TAG_REPOSITORY:
raise HydrusExceptions.NotFoundException( 'Only Tag Repositories support mapping account lookups!')
2017-03-29 19:39:34 +00:00
( tag, hash ) = content_data
if not self._MasterHashExists( hash ):
raise HydrusExceptions.NotFoundException( 'The service could not find that hash in its database.' )
2017-03-02 02:14:56 +00:00
2017-03-29 19:39:34 +00:00
master_hash_id = self._GetMasterHashId( hash )
if not self._RepositoryServiceHashIdExists( service_id, master_hash_id ):
raise HydrusExceptions.NotFoundException( 'The service could not find that service hash in its database.' )
2019-09-18 22:40:39 +00:00
service_hash_id = self._RepositoryGetServiceHashId( service_id, master_hash_id, HydrusData.GetNow() )
2017-03-29 19:39:34 +00:00
if not self._MasterTagExists( tag ):
raise HydrusExceptions.NotFoundException( 'The service could not find that tag in its database.' )
master_tag_id = self._GetMasterTagId( tag )
if not self._RepositoryServiceTagIdExists( service_id, master_tag_id ):
raise HydrusExceptions.NotFoundException( 'The service could not find that service tag in its database.' )
2019-09-18 22:40:39 +00:00
service_tag_id = self._RepositoryGetServiceTagId( service_id, master_tag_id, HydrusData.GetNow() )
2017-03-29 19:39:34 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
result = self._c.execute( 'SELECT account_id FROM ' + current_mappings_table_name + ' WHERE service_tag_id = ? AND service_hash_id = ?;', ( service_tag_id, service_hash_id ) ).fetchone()
if result is None:
result = self._c.execute( 'SELECT account_id FROM ' + deleted_mappings_table_name + ' WHERE service_tag_id = ? AND service_hash_id = ?;', ( service_tag_id, service_hash_id ) ).fetchone()
if result is None:
raise HydrusExceptions.NotFoundException( 'The service could not find that mapping in its database.' )
else:
raise HydrusExceptions.NotFoundException( 'The service could not understand the submitted content.' )
2013-02-19 00:11:43 +00:00
2017-03-29 19:39:34 +00:00
( account_id, ) = result
account = self._GetAccount( service_id, account_id )
return account
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
def _GetAccountFromAccountKey( self, service_key, account_key ):
service_id = self._GetServiceId( service_key )
account_id = self._GetAccountId( account_key )
return self._GetAccount( service_id, account_id )
def _GetAccountKeyFromAccessKey( self, service_key, access_key ):
service_id = self._GetServiceId( service_key )
result = self._c.execute( 'SELECT account_key FROM accounts WHERE service_id = ? AND hashed_access_key = ?;', ( service_id, sqlite3.Binary( hashlib.sha256( access_key ).digest() ), ) ).fetchone()
if result is None:
# we do not delete the registration_key (and hence the raw unhashed access_key)
# until the first attempt to create a session to make sure the user
# has the access_key saved
try:
( account_type_id, account_key, expires ) = self._c.execute( 'SELECT account_type_id, account_key, expires FROM registration_keys WHERE access_key = ?;', ( sqlite3.Binary( access_key ), ) ).fetchone()
except:
raise HydrusExceptions.InsufficientCredentialsException( 'The service could not find that account in its database.' )
self._c.execute( 'DELETE FROM registration_keys WHERE access_key = ?;', ( sqlite3.Binary( access_key ), ) )
#
hashed_access_key = hashlib.sha256( access_key ).digest()
account_type = self._GetAccountType( service_id, account_type_id )
created = HydrusData.GetNow()
account = HydrusNetwork.Account( account_key, account_type, created, expires )
( account_key, account_type, created, expires, dictionary ) = HydrusNetwork.Account.GenerateTupleFromAccount( account )
dictionary_string = dictionary.DumpToString()
self._c.execute( 'INSERT INTO accounts ( service_id, account_key, hashed_access_key, account_type_id, created, expires, dictionary_string ) VALUES ( ?, ?, ?, ?, ?, ?, ? );', ( service_id, sqlite3.Binary( account_key ), sqlite3.Binary( hashed_access_key ), account_type_id, created, expires, dictionary_string ) )
else:
( account_key, ) = result
return account_key
def _GetAccountKeyFromAccountId( self, account_id ):
try: ( account_key, ) = self._c.execute( 'SELECT account_key FROM accounts WHERE account_id = ?;', ( account_id, ) ).fetchone()
except: raise HydrusExceptions.InsufficientCredentialsException( 'The service could not find that account_id in its database.' )
return account_key
2017-03-02 02:14:56 +00:00
def _GetAccountId( self, account_key ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT account_id FROM accounts WHERE account_key = ?;', ( sqlite3.Binary( account_key ), ) ).fetchone()
2013-02-19 00:11:43 +00:00
2019-07-03 22:49:27 +00:00
if result is None:
raise HydrusExceptions.InsufficientCredentialsException( 'The service could not find that account key in its database.' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
( account_id, ) = result
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return account_id
2013-02-19 00:11:43 +00:00
2017-03-29 19:39:34 +00:00
def _GetAccountInfo( self, service_key, account, subject_account ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2013-02-19 00:11:43 +00:00
2017-03-29 19:39:34 +00:00
subject_account_key = subject_account.GetAccountKey()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
subject_account_id = self._GetAccountId( subject_account_key )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_type = self._GetServiceType( service_id )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if service_type in HC.REPOSITORIES:
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
account_info = self._RepositoryGetAccountInfo( service_id, subject_account_id )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
else:
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
account_info = {}
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return account_info
2013-10-02 22:06:06 +00:00
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _GetAccountTypeId( self, service_id, account_type_key ):
2015-03-04 22:44:32 +00:00
2021-04-07 21:26:45 +00:00
if account_type_key not in self._service_ids_to_account_type_keys_to_account_type_ids[ service_id ]:
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
raise HydrusExceptions.DataMissing( 'Could not find the given account type key!' )
2017-03-02 02:14:56 +00:00
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
account_type_id = self._service_ids_to_account_type_keys_to_account_type_ids[ service_id ][ account_type_key ]
if account_type_id not in self._service_ids_to_account_type_ids[ service_id ]:
raise HydrusExceptions.DataMissing( 'Could not find the given account type for that service!' )
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
return account_type_id
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
def _GetAccountTypes( self, service_key, account ):
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2013-03-15 02:38:12 +00:00
2021-04-07 21:26:45 +00:00
account_types = [ self._account_type_ids_to_account_types[ account_type_id ] for account_type_id in self._service_ids_to_account_type_ids[ service_id ] ]
return account_types
2013-03-15 02:38:12 +00:00
2021-04-07 21:26:45 +00:00
def _GetAccountType( self, service_id, account_type_id ) -> HydrusNetwork.AccountType:
2015-03-04 22:44:32 +00:00
2021-04-07 21:26:45 +00:00
if account_type_id not in self._service_ids_to_account_type_ids[ service_id ]:
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
raise HydrusExceptions.DataMissing( 'Could not find the given account type for that service!' )
2017-03-02 02:14:56 +00:00
2015-03-04 22:44:32 +00:00
2021-04-07 21:26:45 +00:00
return self._account_type_ids_to_account_types[ account_type_id ]
def _GetAllAccounts( self, service_key, admin_account ):
service_id = self._GetServiceId( service_key )
account_ids = self._STL( self._c.execute( 'SELECT account_id FROM accounts WHERE service_id = ?;', ( service_id, ) ) )
accounts = [ self._GetAccount( service_id, account_id ) for account_id in account_ids ]
return accounts
2015-03-04 22:44:32 +00:00
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
def _GetAutoCreateAccountTypes( self, service_key ):
service_id = self._GetServiceId( service_key )
account_types = [ self._account_type_ids_to_account_types[ account_type_id ] for account_type_id in self._service_ids_to_account_type_ids[ service_id ] ]
auto_create_account_types = [ account_type for account_type in account_types if account_type.SupportsAutoCreateAccount() ]
return auto_create_account_types
def _GetAutoCreateRegistrationKey( self, service_key, account_type_key ):
service_id = self._GetServiceId( service_key )
account_type_id = self._GetAccountTypeId( service_id, account_type_key )
account_type = self._GetAccountType( service_id, account_type_id )
if not account_type.SupportsAutoCreateAccount():
raise HydrusExceptions.BadRequestException( '"{}" accounts do not support auto-creation!'.format( account_type.GetTitle() ) )
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
if not account_type.CanAutoCreateAccountNow():
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
raise HydrusExceptions.BadRequestException( 'Please wait a bit--there are no new "{}" accounts available for now!'.format( account_type.GetTitle() ) )
2017-03-02 02:14:56 +00:00
2015-03-04 22:44:32 +00:00
2021-04-07 21:26:45 +00:00
num = 1
expires = None
account_type.ReportAutoCreateAccount()
self._c.execute( 'UPDATE account_types SET dump = ? WHERE service_id = ? AND account_type_id = ?;', ( account_type.DumpToString(), service_id, account_type_id ) )
return list( self._GenerateRegistrationKeys( service_id, num, account_type_id, expires ) )[0]
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
def _GetHash( self, master_hash_id ):
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT hash FROM hashes WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
raise Exception( 'File hash error in database' )
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
( hash, ) = result
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
return hash
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
def _GetHashes( self, master_hash_ids ):
2015-09-02 23:16:09 +00:00
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT hash FROM hashes WHERE master_hash_id = ?;'
2015-09-02 23:16:09 +00:00
2019-12-18 22:06:34 +00:00
return [ hash for ( hash, ) in self._ExecuteManySelectSingleParam( select_statement, master_hash_ids ) ]
2015-09-02 23:16:09 +00:00
2017-03-02 02:14:56 +00:00
def _GetMasterHashId( self, hash ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT master_hash_id FROM hashes WHERE hash = ?;', ( sqlite3.Binary( hash ), ) ).fetchone()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
self._c.execute( 'INSERT INTO hashes ( hash ) VALUES ( ? );', ( sqlite3.Binary( hash ), ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
master_hash_id = self._c.lastrowid
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
return master_hash_id
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
else:
( master_hash_id, ) = result
return master_hash_id
2015-06-03 21:05:13 +00:00
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
def _GetMasterHashIds( self, hashes ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
master_hash_ids = set()
hashes_not_in_db = set()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for hash in hashes:
if hash is None:
continue
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT master_hash_id FROM hashes WHERE hash = ?;', ( sqlite3.Binary( hash ), ) ).fetchone()
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
hashes_not_in_db.add( hash )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
else:
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( master_hash_id, ) = result
master_hash_ids.add( master_hash_id )
2015-06-03 21:05:13 +00:00
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if len( hashes_not_in_db ) > 0:
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.executemany( 'INSERT INTO hashes ( hash ) VALUES ( ? );', ( ( sqlite3.Binary( hash ), ) for hash in hashes_not_in_db ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
for hash in hashes_not_in_db:
2015-11-18 22:44:07 +00:00
2017-03-02 02:14:56 +00:00
( master_hash_id, ) = self._c.execute( 'SELECT master_hash_id FROM hashes WHERE hash = ?;', ( sqlite3.Binary( hash ), ) ).fetchone()
master_hash_ids.add( master_hash_id )
2015-11-18 22:44:07 +00:00
2015-06-03 21:05:13 +00:00
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
return master_hash_ids
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
def _GetMasterTagId( self, tag ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
tag = HydrusTags.CleanTag( tag )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
HydrusTags.CheckTagNotEmpty( tag )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT master_tag_id FROM tags WHERE tag = ?;', ( tag, ) ).fetchone()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2015-11-18 22:44:07 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'INSERT INTO tags ( tag ) VALUES ( ? );', ( tag, ) )
master_tag_id = self._c.lastrowid
return master_tag_id
else:
( master_tag_id, ) = result
return master_tag_id
2015-11-18 22:44:07 +00:00
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
def _GetOptions( self, service_key ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
( options, ) = self._c.execute( 'SELECT options FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone()
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
return options
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
def _GetReason( self, reason_id ):
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT reason FROM reasons WHERE reason_id = ?;', ( reason_id, ) ).fetchone()
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
if result is None: raise Exception( 'Reason error in database' )
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
( reason, ) = result
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
return reason
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
def _GetReasonId( self, reason ):
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT reason_id FROM reasons WHERE reason = ?;', ( reason, ) ).fetchone()
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'INSERT INTO reasons ( reason ) VALUES ( ? );', ( reason, ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
reason_id = self._c.lastrowid
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
return reason_id
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
else:
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
( reason_id, ) = result
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
return reason_id
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
def _GetServiceId( self, service_key ):
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT service_id FROM services WHERE service_key = ?;', ( sqlite3.Binary( service_key ), ) ).fetchone()
2014-10-01 22:58:32 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2014-01-01 20:01:00 +00:00
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.DataMissing( 'Service id error in database' )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
( service_id, ) = result
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return service_id
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _GetServiceIds( self, limited_types = HC.ALL_SERVICES ):
2014-09-24 21:50:07 +00:00
2017-03-02 02:14:56 +00:00
return [ service_id for ( service_id, ) in self._c.execute( 'SELECT service_id FROM services WHERE service_type IN ' + HydrusData.SplayListForDB( limited_types ) + ';' ) ]
2014-09-24 21:50:07 +00:00
2017-03-02 02:14:56 +00:00
def _GetServiceKey( self, service_id ):
2014-09-24 21:50:07 +00:00
2017-03-02 02:14:56 +00:00
( service_key, ) = self._c.execute( 'SELECT service_key FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone()
2014-09-24 21:50:07 +00:00
2017-03-02 02:14:56 +00:00
return service_key
2014-09-24 21:50:07 +00:00
2017-03-02 02:14:56 +00:00
def _GetServiceKeys( self, limited_types = HC.ALL_SERVICES ):
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
return [ service_key for ( service_key, ) in self._c.execute( 'SELECT service_key FROM services WHERE service_type IN '+ HydrusData.SplayListForDB( limited_types ) + ';' ) ]
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
def _GetServiceType( self, service_id ):
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT service_type FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone()
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
if result is None: raise Exception( 'Service id error in database' )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
( service_type, ) = result
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
return service_type
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
def _GetServices( self, limited_types = HC.ALL_SERVICES ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
services = []
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_info = self._c.execute( 'SELECT service_key, service_type, name, port, dictionary_string FROM services WHERE service_type IN ' + HydrusData.SplayListForDB( limited_types ) + ';' ).fetchall()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for ( service_key, service_type, name, port, dictionary_string ) in service_info:
dictionary = HydrusSerialisable.CreateFromString( dictionary_string )
service = HydrusNetwork.GenerateService( service_key, service_type, name, port, dictionary )
services.append( service )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return services
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _GetServicesFromAccount( self, account ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return self._GetServices()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _GetSessions( self, service_key = None ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
now = HydrusData.GetNow()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM sessions WHERE ? > expires;', ( now, ) )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
sessions = []
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if service_key is None:
results = self._c.execute( 'SELECT session_key, service_id, account_id, expires FROM sessions;' ).fetchall()
else:
service_id = self._GetServiceId( service_key)
results = self._c.execute( 'SELECT session_key, service_id, account_id, expires FROM sessions WHERE service_id = ?;', ( service_id, ) ).fetchall()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
service_ids_to_service_keys = {}
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
account_ids_to_accounts = {}
2015-03-04 22:44:32 +00:00
account_ids_to_hashed_access_keys = {}
2017-03-02 02:14:56 +00:00
for ( session_key, service_id, account_id, expires ) in results:
if service_id not in service_ids_to_service_keys:
service_ids_to_service_keys[ service_id ] = self._GetServiceKey( service_id )
service_key = service_ids_to_service_keys[ service_id ]
if account_id not in account_ids_to_accounts:
account = self._GetAccount( service_id, account_id )
account_ids_to_accounts[ account_id ] = account
account = account_ids_to_accounts[ account_id ]
if account_id not in account_ids_to_hashed_access_keys:
( hashed_access_key, ) = self._c.execute( 'SELECT hashed_access_key FROM accounts WHERE account_id = ?;', ( account_id, ) ).fetchone()
account_ids_to_hashed_access_keys[ account_id ] = hashed_access_key
hashed_access_key = account_ids_to_hashed_access_keys[ account_id ]
sessions.append( ( session_key, service_key, account, hashed_access_key, expires ) )
2017-03-02 02:14:56 +00:00
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return sessions
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _GetTag( self, master_tag_id ):
2015-10-14 21:02:25 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT tag FROM tags WHERE master_tag_id = ?;', ( master_tag_id, ) ).fetchone()
2015-03-04 22:44:32 +00:00
2016-02-17 22:06:47 +00:00
if result is None:
2017-03-02 02:14:56 +00:00
raise Exception( 'Tag error in database' )
2016-02-17 22:06:47 +00:00
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( tag, ) = result
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return tag
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _InitCaches( self ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._over_monthly_data = False
self._services_over_monthly_data = set()
2015-03-04 22:44:32 +00:00
2021-04-07 21:26:45 +00:00
self._RefreshAccountTypeCache()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _InitExternalDatabases( self ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._db_filenames[ 'external_mappings' ] = 'server.mappings.db'
self._db_filenames[ 'external_master' ] = 'server.master.db'
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _ManageDBError( self, job, e ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if isinstance( e, HydrusExceptions.NetworkException ):
job.PutResult( e )
else:
( exception_type, value, tb ) = sys.exc_info()
new_e = type( e )( os.linesep.join( traceback.format_exception( exception_type, value, tb ) ) )
job.PutResult( new_e )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _MasterHashExists( self, hash ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT master_hash_id FROM hashes WHERE hash = ?;', ( sqlite3.Binary( hash ), ) ).fetchone()
2015-03-04 22:44:32 +00:00
if result is None:
2017-03-02 02:14:56 +00:00
return False
2015-03-04 22:44:32 +00:00
else:
2017-03-02 02:14:56 +00:00
return True
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _MasterTagExists( self, tag ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT master_tag_id FROM tags WHERE tag = ?;', ( tag, ) ).fetchone()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if result is None:
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return False
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
else:
return True
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
def _ModifyAccountAccountType( self, service_key, admin_account, subject_account_key, new_account_type_key ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2014-03-26 21:23:10 +00:00
2021-04-07 21:26:45 +00:00
subject_account_id = self._GetAccountId( subject_account_key )
subject_account = self._GetAccount( service_id, subject_account_id )
current_account_type_id = self._GetAccountTypeId( service_id, subject_account.GetAccountType().GetAccountTypeKey() )
new_account_type_id = self._GetAccountTypeId( service_id, new_account_type_key )
current_account_type = self._GetAccountType( service_id, current_account_type_id )
new_account_type = self._GetAccountType( service_id, new_account_type_id )
2014-03-26 21:23:10 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'UPDATE accounts SET account_type_id = ? WHERE account_id = ?;', ( new_account_type_id, subject_account_id ) )
2014-03-26 21:23:10 +00:00
2021-04-07 21:26:45 +00:00
HG.server_controller.pub( 'update_session_accounts', service_key, ( subject_account_key, ) )
HydrusData.Print(
'Account {} changed the account type of {} from "{}" to "{}".'.format(
admin_account.GetAccountKey().hex(),
subject_account_key.hex(),
current_account_type.GetTitle(),
new_account_type.GetTitle()
)
)
2014-03-26 21:23:10 +00:00
2013-10-02 22:06:06 +00:00
2021-04-07 21:26:45 +00:00
def _ModifyAccountBan( self, service_key, admin_account, subject_account_key, reason, expires ):
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
subject_account_id = self._GetAccountId( subject_account_key )
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
subject_account = self._GetAccount( service_id, subject_account_id )
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
now = HydrusData.GetNow()
subject_account.Ban( reason, now, expires )
self._SaveAccounts( service_id, ( subject_account, ) )
service_type = self._GetServiceType( service_id )
if service_type in HC.REPOSITORIES:
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
self._DeleteRepositoryPetitions( service_id, ( subject_account_id, ) )
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
HG.server_controller.pub( 'update_session_accounts', service_key, ( subject_account_key, ) )
HydrusData.Print(
'Account {} banned {} with reason "{}" until "{}".'.format(
admin_account.GetAccountKey().hex(),
subject_account_key.hex(),
reason,
HydrusData.ConvertTimestampToPrettyExpires( expires )
)
)
def _ModifyAccountExpires( self, service_key, admin_account, subject_account_key, new_expires ):
service_id = self._GetServiceId( service_key )
subject_account_id = self._GetAccountId( subject_account_key )
( current_expires, ) = self._c.execute( 'SELECT expires FROM accounts WHERE account_id = ?;', ( subject_account_id, ) ).fetchone()
self._c.execute( 'UPDATE accounts SET expires = ? WHERE account_id = ?;', ( new_expires, subject_account_id ) )
HG.server_controller.pub( 'update_session_accounts', service_key, ( subject_account_key, ) )
HydrusData.Print(
'Account {} changed the expiration of {} from "{}" to "{}".'.format(
admin_account.GetAccountKey().hex(),
subject_account_key.hex(),
HydrusData.ConvertTimestampToPrettyExpires( current_expires ),
HydrusData.ConvertTimestampToPrettyExpires( new_expires )
)
)
def _ModifyAccountSetMessage( self, service_key, admin_account, subject_account_key, message ):
service_id = self._GetServiceId( service_key )
account_id = self._GetAccountId( subject_account_key )
subject_account = self._GetAccount( service_id, account_id )
now = HydrusData.GetNow()
subject_account.SetMessage( message, now )
self._SaveAccounts( service_id, ( subject_account, ) )
HG.server_controller.pub( 'update_session_accounts', service_key, ( subject_account_key, ) )
if message == '':
m = 'Account {} cleared {} of any message.'
else:
m = 'Account {} set {} with a message.'
HydrusData.Print(
m.format(
admin_account.GetAccountKey().hex(),
subject_account_key.hex()
)
)
def _ModifyAccountUnban( self, service_key, admin_account, subject_account_key ):
service_id = self._GetServiceId( service_key )
account_id = self._GetAccountId( subject_account_key )
subject_account = self._GetAccount( service_id, account_id )
subject_account.Unban()
self._SaveAccounts( service_id, ( subject_account, ) )
HG.server_controller.pub( 'update_session_accounts', service_key, ( subject_account_key, ) )
HydrusData.Print(
'Account {} unbanned {}.'.format(
admin_account.GetAccountKey().hex(),
subject_account_key.hex()
)
)
def _ModifyAccountTypes( self, service_key, admin_account, account_types, deletee_account_type_keys_to_replacement_account_type_keys ):
service_id = self._GetServiceId( service_key )
current_account_types = self._GetAccountTypes( service_key, admin_account )
current_account_type_keys_to_account_types = { account_type.GetAccountTypeKey() : account_type for account_type in current_account_types }
current_account_type_keys = set( current_account_type_keys_to_account_types.keys() )
future_account_type_keys_to_account_types = { account_type.GetAccountTypeKey() : account_type for account_type in account_types }
future_account_type_keys = set( future_account_type_keys_to_account_types.keys() )
deletee_account_type_keys = current_account_type_keys.difference( future_account_type_keys )
for deletee_account_type_key in deletee_account_type_keys:
if deletee_account_type_key not in deletee_account_type_keys_to_replacement_account_type_keys:
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
raise HydrusExceptions.DataMissing( 'Was missing a replacement account_type_key.' )
2014-09-10 22:37:38 +00:00
2021-04-07 21:26:45 +00:00
if deletee_account_type_keys_to_replacement_account_type_keys[ deletee_account_type_key ] not in future_account_type_keys:
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
raise HydrusExceptions.DataMissing( 'Was a replacement account_type_key was not in the future account types.' )
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
# we have a temp lad here, don't want to alter the actual cache structure, just in case of rollback
modification_account_type_keys_to_account_type_ids = dict( self._service_ids_to_account_type_keys_to_account_type_ids[ service_id ] )
for account_type in account_types:
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
account_type_key = account_type.GetAccountTypeKey()
if account_type_key not in current_account_type_keys:
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
account_type_id = self._AddAccountType( service_id, account_type )
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
modification_account_type_keys_to_account_type_ids[ account_type_key ] = account_type_id
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
HydrusData.Print(
'Account {} added a new account type, "{}".'.format(
admin_account.GetAccountKey().hex(),
account_type.GetTitle()
)
)
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
else:
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
dump = account_type.DumpToString()
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
account_type_id = modification_account_type_keys_to_account_type_ids[ account_type_key ]
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'UPDATE account_types SET dump = ? WHERE service_id = ? AND account_type_id = ?;', ( dump, service_id, account_type_id ) )
HydrusData.Print(
'Account {} confirmed/updated the account type, "{}".'.format(
admin_account.GetAccountKey().hex(),
account_type.GetTitle()
)
)
for deletee_account_type_key in deletee_account_type_keys:
new_account_type_key = deletee_account_type_keys_to_replacement_account_type_keys[ deletee_account_type_key ]
deletee_account_type_id = modification_account_type_keys_to_account_type_ids[ deletee_account_type_key ]
new_account_type_id = modification_account_type_keys_to_account_type_ids[ new_account_type_key ]
self._c.execute( 'UPDATE accounts SET account_type_id = ? WHERE service_id = ? AND account_type_id = ?;', ( new_account_type_id, service_id, deletee_account_type_id ) )
self._c.execute( 'UPDATE registration_keys SET account_type_id = ? WHERE service_id = ? AND account_type_id = ?;', ( new_account_type_id, service_id, deletee_account_type_id ) )
self._c.execute( 'DELETE FROM account_types WHERE service_id = ? AND account_type_id = ?;', ( service_id, deletee_account_type_id ) )
deletee_account_type = current_account_type_keys_to_account_types[ deletee_account_type_key ]
new_account_type = future_account_type_keys_to_account_types[ new_account_type_key ]
HydrusData.Print(
'Account {} deleted the account type, "{}", replacing them with "{}".'.format(
admin_account.GetAccountKey().hex(),
deletee_account_type.GetTitle(),
new_account_type.GetTitle()
)
)
2013-11-27 18:27:11 +00:00
2021-04-07 21:26:45 +00:00
# now we are done, no rollback, so let's update the cache
self._RefreshAccountTypeCache()
2017-03-02 02:14:56 +00:00
2017-07-12 20:03:45 +00:00
self.pub_after_job( 'update_all_session_accounts', service_key )
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
def _ModifyServices( self, account, services ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
current_service_keys = { service_key for ( service_key, ) in self._c.execute( 'SELECT service_key FROM services;' ) }
2013-05-01 17:21:53 +00:00
2017-03-02 02:14:56 +00:00
future_service_keys = { service.GetServiceKey() for service in services }
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for service_key in current_service_keys:
if service_key not in future_service_keys:
self._DeleteService( service_key )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_keys_to_access_keys = {}
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for service in services:
service_key = service.GetServiceKey()
if service_key in current_service_keys:
( service_key, service_type, name, port, dictionary ) = service.ToTuple()
service_id = self._GetServiceId( service_key )
dictionary_string = dictionary.DumpToString()
self._c.execute( 'UPDATE services SET name = ?, port = ?, dictionary_string = ? WHERE service_id = ?;', ( name, port, dictionary_string, service_id ) )
else:
access_key = self._AddService( service )
service_keys_to_access_keys[ service_key ] = access_key
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return service_keys_to_access_keys
2013-10-09 18:13:42 +00:00
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _Read( self, action, *args, **kwargs ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if action == 'access_key': result = self._GetAccessKey( *args, **kwargs )
elif action == 'account': result = self._GetAccountFromAccountKey( *args, **kwargs )
2021-04-07 21:26:45 +00:00
elif action == 'account_from_content': result = self._GetAccountFromContent( *args, **kwargs )
2017-03-02 02:14:56 +00:00
elif action == 'account_info': result = self._GetAccountInfo( *args, **kwargs )
elif action == 'account_key_from_access_key': result = self._GetAccountKeyFromAccessKey( *args, **kwargs )
elif action == 'account_types': result = self._GetAccountTypes( *args, **kwargs )
2021-04-07 21:26:45 +00:00
elif action == 'auto_create_account_types': result = self._GetAutoCreateAccountTypes( *args, **kwargs )
elif action == 'auto_create_registration_key': result = self._GetAutoCreateRegistrationKey( *args, **kwargs )
elif action == 'all_accounts': result = self._GetAllAccounts( *args, **kwargs )
2017-03-02 02:14:56 +00:00
elif action == 'immediate_update': result = self._RepositoryGenerateImmediateUpdate( *args, **kwargs )
elif action == 'ip': result = self._RepositoryGetIPTimestamp( *args, **kwargs )
elif action == 'num_petitions': result = self._RepositoryGetNumPetitions( *args, **kwargs )
elif action == 'petition': result = self._RepositoryGetPetition( *args, **kwargs )
elif action == 'registration_keys': result = self._GenerateRegistrationKeysFromAccount( *args, **kwargs )
elif action == 'service_has_file': result = self._RepositoryHasFile( *args, **kwargs )
elif action == 'service_keys': result = self._GetServiceKeys( *args, **kwargs )
elif action == 'services': result = self._GetServices( *args, **kwargs )
elif action == 'services_from_account': result = self._GetServicesFromAccount( *args, **kwargs )
elif action == 'sessions': result = self._GetSessions( *args, **kwargs )
elif action == 'verify_access_key': result = self._VerifyAccessKey( *args, **kwargs )
else: raise Exception( 'db received an unknown read command: ' + action )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
return result
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
def _RefreshAccountTypeCache( self ):
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
self._service_ids_to_account_type_ids = collections.defaultdict( set )
self._account_type_ids_to_account_types = {}
self._service_ids_to_account_type_keys_to_account_type_ids = collections.defaultdict( dict )
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
data = self._c.execute( 'SELECT account_type_id, service_id, dump FROM account_types;' ).fetchall()
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
for ( account_type_id, service_id, dump ) in data:
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
account_type = HydrusSerialisable.CreateFromString( dump )
2017-03-02 02:14:56 +00:00
2021-04-07 21:26:45 +00:00
self._service_ids_to_account_type_ids[ service_id ].add( account_type_id )
self._account_type_ids_to_account_types[ account_type_id ] = account_type
self._service_ids_to_account_type_keys_to_account_type_ids[ service_id ][ account_type.GetAccountTypeKey() ] = account_type_id
2017-03-02 02:14:56 +00:00
2013-02-19 00:11:43 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryAddFile( self, service_id, account_id, file_dict, overwrite_deleted, timestamp ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
master_hash_id = self._AddFile( file_dict )
2019-09-18 22:40:39 +00:00
service_hash_id = self._RepositoryGetServiceHashId( service_id, master_hash_id, timestamp )
2017-03-02 02:14:56 +00:00
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
if 'ip' in file_dict:
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
ip = file_dict[ 'ip' ]
2013-02-19 00:11:43 +00:00
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT INTO ' + ip_addresses_table_name + ' ( master_hash_id, ip, ip_timestamp ) VALUES ( ?, ?, ? );', ( master_hash_id, ip, timestamp ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT 1 FROM ' + current_files_table_name + ' WHERE service_hash_id = ?;', ( service_hash_id, ) ).fetchone()
if result is not None:
return
if overwrite_deleted:
#self._RepositoryRewardFilePenders( service_id, service_hash_id, 1 )
#self._c.execute( 'DELETE FROM ' + pending_files_table_name + ' WHERE service_hash_id = ?;', ( service_hash_id, ) )
self._c.execute( 'DELETE FROM ' + deleted_files_table_name + ' WHERE service_hash_id = ?;', ( service_hash_id, ) )
2013-02-19 00:11:43 +00:00
else:
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT 1 FROM ' + deleted_files_table_name + ' WHERE service_hash_id = ?;', ( service_hash_id, ) ).fetchone()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if result is not None:
return
2013-02-19 00:11:43 +00:00
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT INTO ' + current_files_table_name + ' ( service_hash_id, account_id, file_timestamp ) VALUES ( ?, ?, ? );', ( service_hash_id, account_id, timestamp ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryAddMappings( self, service_id, account_id, master_tag_id, master_hash_ids, overwrite_deleted, timestamp ):
2013-02-19 00:11:43 +00:00
2019-09-18 22:40:39 +00:00
service_tag_id = self._RepositoryGetServiceTagId( service_id, master_tag_id, timestamp )
service_hash_ids = self._RepositoryGetServiceHashIds( service_id, master_hash_ids, timestamp )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if overwrite_deleted:
#self._RepositoryRewardMappingPenders( service_id, service_tag_id, service_hash_ids, 1 )
#self._c.executemany( 'DELETE FROM ' + pending_mappings_table_name + ' WHERE master_tag_id = ? AND master_hash_id = ?;', ( ( master_tag_id, master_hash_id ) for master_hash_id in master_hash_ids ) )
self._c.executemany( 'DELETE FROM ' + deleted_mappings_table_name + ' WHERE service_tag_id = ? AND service_hash_id = ?;', ( ( service_tag_id, service_hash_id ) for service_hash_id in service_hash_ids ) )
else:
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_hash_id FROM ' + deleted_mappings_table_name + ' WHERE service_tag_id = ' + str( service_tag_id ) + ' AND service_hash_id = ?;'
2017-03-08 23:23:12 +00:00
2019-12-18 22:06:34 +00:00
deleted_service_hash_ids = self._STI( self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ) )
2017-03-02 02:14:56 +00:00
service_hash_ids = set( service_hash_ids ).difference( deleted_service_hash_ids )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
# in future, delete from pending with the master ids here
2013-03-15 02:38:12 +00:00
2019-09-18 22:40:39 +00:00
self._c.executemany( 'INSERT OR IGNORE INTO ' + current_mappings_table_name + ' ( service_tag_id, service_hash_id, account_id, mapping_timestamp ) VALUES ( ?, ?, ?, ? );', [ ( service_tag_id, service_hash_id, account_id, timestamp ) for service_hash_id in service_hash_ids ] )
2013-03-15 02:38:12 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryAddTagParent( self, service_id, account_id, child_master_tag_id, parent_master_tag_id, overwrite_deleted, timestamp ):
2013-02-19 00:11:43 +00:00
2019-09-18 22:40:39 +00:00
child_service_tag_id = self._RepositoryGetServiceTagId( service_id, child_master_tag_id, timestamp )
parent_service_tag_id = self._RepositoryGetServiceTagId( service_id, parent_master_tag_id, timestamp )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if overwrite_deleted:
self._RepositoryRewardTagParentPenders( service_id, child_master_tag_id, parent_master_tag_id, 1 )
self._c.execute( 'DELETE FROM ' + pending_tag_parents_table_name + ' WHERE child_master_tag_id = ? AND parent_master_tag_id = ?;', ( child_master_tag_id, parent_master_tag_id ) )
self._c.execute( 'DELETE FROM ' + deleted_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) )
else:
result = self._c.execute( 'SELECT 1 FROM ' + deleted_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) ).fetchone()
if result is not None:
return
2013-03-15 02:38:12 +00:00
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT OR IGNORE INTO ' + current_tag_parents_table_name + ' ( child_service_tag_id, parent_service_tag_id, account_id, parent_timestamp ) VALUES ( ?, ?, ?, ? );', ( child_service_tag_id, parent_service_tag_id, account_id, timestamp ) )
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryAddTagSibling( self, service_id, account_id, bad_master_tag_id, good_master_tag_id, overwrite_deleted, timestamp ):
2013-03-15 02:38:12 +00:00
2019-09-18 22:40:39 +00:00
bad_service_tag_id = self._RepositoryGetServiceTagId( service_id, bad_master_tag_id, timestamp )
good_service_tag_id = self._RepositoryGetServiceTagId( service_id, good_master_tag_id, timestamp )
2014-09-24 21:50:07 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
if overwrite_deleted:
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagSiblingPenders( service_id, bad_master_tag_id, good_master_tag_id, 1 )
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM ' + pending_tag_siblings_table_name + ' WHERE bad_master_tag_id = ? AND good_master_tag_id = ?;', ( bad_master_tag_id, good_master_tag_id ) )
self._c.execute( 'DELETE FROM ' + deleted_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) )
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
else:
2014-09-10 22:37:38 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT 1 FROM ' + deleted_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) ).fetchone()
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
if result is not None:
return
2013-03-15 02:38:12 +00:00
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT OR IGNORE INTO ' + current_tag_siblings_table_name + ' ( bad_service_tag_id, good_service_tag_id, account_id, sibling_timestamp ) VALUES ( ?, ?, ?, ? );', ( bad_service_tag_id, good_service_tag_id, account_id, timestamp ) )
2013-03-15 02:38:12 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryCreate( self, service_id ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE ' + hash_id_map_table_name + ' ( service_hash_id INTEGER PRIMARY KEY, master_hash_id INTEGER UNIQUE, hash_id_timestamp INTEGER );' )
self._CreateIndex( hash_id_map_table_name, [ 'hash_id_timestamp' ] )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE ' + tag_id_map_table_name + ' ( service_tag_id INTEGER PRIMARY KEY, master_tag_id INTEGER UNIQUE, tag_id_timestamp INTEGER );' )
self._CreateIndex( tag_id_map_table_name, [ 'tag_id_timestamp' ] )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
#
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE ' + current_files_table_name + ' ( service_hash_id INTEGER PRIMARY KEY, account_id INTEGER, file_timestamp INTEGER );' )
self._CreateIndex( current_files_table_name, [ 'account_id' ] )
self._CreateIndex( current_files_table_name, [ 'file_timestamp' ] )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE ' + deleted_files_table_name + ' ( service_hash_id INTEGER PRIMARY KEY, account_id INTEGER, file_timestamp INTEGER );' )
self._CreateIndex( deleted_files_table_name, [ 'account_id' ] )
self._CreateIndex( deleted_files_table_name, [ 'file_timestamp' ] )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE ' + pending_files_table_name + ' ( master_hash_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( master_hash_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( pending_files_table_name, [ 'account_id', 'reason_id' ] )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'CREATE TABLE ' + petitioned_files_table_name + ' ( service_hash_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( service_hash_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( petitioned_files_table_name, [ 'account_id', 'reason_id' ] )
self._c.execute( 'CREATE TABLE ' + ip_addresses_table_name + ' ( master_hash_id INTEGER, ip TEXT, ip_timestamp INTEGER );' )
#
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
self._c.execute( 'CREATE TABLE ' + current_mappings_table_name + ' ( service_tag_id INTEGER, service_hash_id INTEGER, account_id INTEGER, mapping_timestamp INTEGER, PRIMARY KEY ( service_tag_id, service_hash_id ) ) WITHOUT ROWID;' )
self._CreateIndex( current_mappings_table_name, [ 'account_id' ] )
self._CreateIndex( current_mappings_table_name, [ 'mapping_timestamp' ] )
self._c.execute( 'CREATE TABLE ' + deleted_mappings_table_name + ' ( service_tag_id INTEGER, service_hash_id INTEGER, account_id INTEGER, mapping_timestamp INTEGER, PRIMARY KEY ( service_tag_id, service_hash_id ) ) WITHOUT ROWID;' )
self._CreateIndex( deleted_mappings_table_name, [ 'account_id' ] )
self._CreateIndex( deleted_mappings_table_name, [ 'mapping_timestamp' ] )
self._c.execute( 'CREATE TABLE ' + pending_mappings_table_name + ' ( master_tag_id INTEGER, master_hash_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( master_tag_id, master_hash_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( pending_mappings_table_name, [ 'account_id', 'reason_id' ] )
self._c.execute( 'CREATE TABLE ' + petitioned_mappings_table_name + ' ( service_tag_id INTEGER, service_hash_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( service_tag_id, service_hash_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( petitioned_mappings_table_name, [ 'account_id', 'reason_id' ] )
#
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
self._c.execute( 'CREATE TABLE ' + current_tag_parents_table_name + ' ( child_service_tag_id INTEGER, parent_service_tag_id INTEGER, account_id INTEGER, parent_timestamp INTEGER, PRIMARY KEY ( child_service_tag_id, parent_service_tag_id ) ) WITHOUT ROWID;' )
self._CreateIndex( current_tag_parents_table_name, [ 'account_id' ] )
self._CreateIndex( current_tag_parents_table_name, [ 'parent_timestamp' ] )
self._c.execute( 'CREATE TABLE ' + deleted_tag_parents_table_name + ' ( child_service_tag_id INTEGER, parent_service_tag_id INTEGER, account_id INTEGER, parent_timestamp INTEGER, PRIMARY KEY ( child_service_tag_id, parent_service_tag_id ) ) WITHOUT ROWID;' )
self._CreateIndex( deleted_tag_parents_table_name, [ 'account_id' ] )
self._CreateIndex( deleted_tag_parents_table_name, [ 'parent_timestamp' ] )
self._c.execute( 'CREATE TABLE ' + pending_tag_parents_table_name + ' ( child_master_tag_id INTEGER, parent_master_tag_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( child_master_tag_id, parent_master_tag_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( pending_tag_parents_table_name, [ 'account_id', 'reason_id' ] )
self._c.execute( 'CREATE TABLE ' + petitioned_tag_parents_table_name + ' ( child_service_tag_id INTEGER, parent_service_tag_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( child_service_tag_id, parent_service_tag_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( petitioned_tag_parents_table_name, [ 'account_id', 'reason_id' ] )
#
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
self._c.execute( 'CREATE TABLE ' + current_tag_siblings_table_name + ' ( bad_service_tag_id INTEGER PRIMARY KEY, good_service_tag_id INTEGER, account_id INTEGER, sibling_timestamp INTEGER );' )
self._CreateIndex( current_tag_siblings_table_name, [ 'account_id' ] )
self._CreateIndex( current_tag_siblings_table_name, [ 'sibling_timestamp' ] )
self._c.execute( 'CREATE TABLE ' + deleted_tag_siblings_table_name + ' ( bad_service_tag_id INTEGER PRIMARY KEY, good_service_tag_id INTEGER, account_id INTEGER, sibling_timestamp INTEGER );' )
self._CreateIndex( deleted_tag_siblings_table_name, [ 'account_id' ] )
self._CreateIndex( deleted_tag_siblings_table_name, [ 'sibling_timestamp' ] )
self._c.execute( 'CREATE TABLE ' + pending_tag_siblings_table_name + ' ( bad_master_tag_id INTEGER, good_master_tag_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( bad_master_tag_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( pending_tag_siblings_table_name, [ 'account_id', 'reason_id' ] )
self._c.execute( 'CREATE TABLE ' + petitioned_tag_siblings_table_name + ' ( bad_service_tag_id INTEGER, good_service_tag_id INTEGER, account_id INTEGER, reason_id INTEGER, PRIMARY KEY ( bad_service_tag_id, account_id ) ) WITHOUT ROWID;' )
self._CreateIndex( petitioned_tag_siblings_table_name, [ 'account_id', 'reason_id' ] )
#
( update_table_name ) = GenerateRepositoryUpdateTableName( service_id )
self._c.execute( 'CREATE TABLE ' + update_table_name + ' ( master_hash_id INTEGER PRIMARY KEY );' )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryCreateUpdate( self, service_key, begin, end ):
service_id = self._GetServiceId( service_key )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( name, ) = self._c.execute( 'SELECT name FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone()
2015-10-14 21:02:25 +00:00
HydrusData.Print( 'Creating update for ' + repr( name ) + ' from ' + HydrusData.ConvertTimestampToPrettyTime( begin, in_utc = True ) + ' to ' + HydrusData.ConvertTimestampToPrettyTime( end, in_utc = True ) )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
updates = self._RepositoryGenerateUpdates( service_id, begin, end )
update_hashes = []
total_definition_rows = 0
total_content_rows = 0
if len( updates ) > 0:
2016-06-01 20:04:15 +00:00
2017-03-02 02:14:56 +00:00
for update in updates:
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
num_rows = update.GetNumRows()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if isinstance( update, HydrusNetwork.DefinitionsUpdate ):
2015-10-14 21:02:25 +00:00
2017-03-02 02:14:56 +00:00
total_definition_rows += num_rows
2015-10-14 21:02:25 +00:00
2017-03-02 02:14:56 +00:00
elif isinstance( update, HydrusNetwork.ContentUpdate ):
2015-10-14 21:02:25 +00:00
2017-03-02 02:14:56 +00:00
total_content_rows += num_rows
2015-10-14 21:02:25 +00:00
2015-03-04 22:44:32 +00:00
2019-01-09 22:59:03 +00:00
update_bytes = update.DumpToNetworkBytes()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
update_hash = hashlib.sha256( update_bytes ).digest()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
dest_path = ServerFiles.GetExpectedFilePath( update_hash )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
with open( dest_path, 'wb' ) as f:
2015-10-14 21:02:25 +00:00
2017-03-02 02:14:56 +00:00
f.write( update_bytes )
2015-10-14 21:02:25 +00:00
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
update_hashes.append( update_hash )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( update_table_name ) = GenerateRepositoryUpdateTableName( service_id )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
master_hash_ids = self._GetMasterHashIds( update_hashes )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
self._c.executemany( 'INSERT OR IGNORE INTO ' + update_table_name + ' ( master_hash_id ) VALUES ( ? );', ( ( master_hash_id, ) for master_hash_id in master_hash_ids ) )
2015-03-04 22:44:32 +00:00
2018-07-04 20:48:28 +00:00
HydrusData.Print( 'Update OK. ' + HydrusData.ToHumanInt( total_definition_rows ) + ' definition rows and ' + HydrusData.ToHumanInt( total_content_rows ) + ' content rows in ' + HydrusData.ToHumanInt( len( updates ) ) + ' update files.' )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return update_hashes
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryDeleteFiles( self, service_id, account_id, service_hash_ids, timestamp ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2015-03-04 22:44:32 +00:00
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_hash_id FROM ' + current_files_table_name + ' WHERE service_hash_id = ?;'
2015-03-04 22:44:32 +00:00
2019-12-18 22:06:34 +00:00
valid_service_hash_ids = self._STL( self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ) )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardFilePetitioners( service_id, valid_service_hash_ids, 1 )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
self._c.executemany( 'DELETE FROM ' + current_files_table_name + ' WHERE service_hash_id = ?', ( ( service_hash_id, ) for service_hash_id in valid_service_hash_ids ) )
self._c.executemany( 'DELETE FROM ' + petitioned_files_table_name + ' WHERE service_hash_id = ?', ( ( service_hash_id, ) for service_hash_id in valid_service_hash_ids ) )
2013-10-02 22:06:06 +00:00
2019-09-18 22:40:39 +00:00
self._c.executemany( 'INSERT OR IGNORE INTO ' + deleted_files_table_name + ' ( service_hash_id, account_id, file_timestamp ) VALUES ( ?, ?, ? );', ( ( service_hash_id, account_id, timestamp ) for service_hash_id in valid_service_hash_ids ) )
2013-10-02 22:06:06 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryDeleteMappings( self, service_id, account_id, service_tag_id, service_hash_ids, timestamp ):
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2013-10-02 22:06:06 +00:00
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_hash_id FROM ' + current_mappings_table_name + ' WHERE service_tag_id = ' + str( service_tag_id ) + ' AND service_hash_id = ?;'
2013-10-02 22:06:06 +00:00
2019-12-18 22:06:34 +00:00
valid_service_hash_ids = self._STL( self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ) )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardMappingPetitioners( service_id, service_tag_id, valid_service_hash_ids, 1 )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
self._c.executemany( 'DELETE FROM ' + current_mappings_table_name + ' WHERE service_tag_id = ? AND service_hash_id = ?;', ( ( service_tag_id, service_hash_id ) for service_hash_id in valid_service_hash_ids ) )
self._c.executemany( 'DELETE FROM ' + petitioned_mappings_table_name + ' WHERE service_tag_id = ? AND service_hash_id = ?;', ( ( service_tag_id, service_hash_id ) for service_hash_id in valid_service_hash_ids ) )
2013-10-02 22:06:06 +00:00
2019-09-18 22:40:39 +00:00
self._c.executemany( 'INSERT OR IGNORE INTO ' + deleted_mappings_table_name + ' ( service_tag_id, service_hash_id, account_id, mapping_timestamp ) VALUES ( ?, ?, ?, ? );', ( ( service_tag_id, service_hash_id, account_id, timestamp ) for service_hash_id in valid_service_hash_ids ) )
2013-10-02 22:06:06 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryDeleteTagParent( self, service_id, account_id, child_service_tag_id, parent_service_tag_id, timestamp ):
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagParentPetitioners( service_id, child_service_tag_id, parent_service_tag_id, 1 )
2016-04-20 20:42:21 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM ' + current_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) )
self._c.execute( 'DELETE FROM ' + petitioned_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) )
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT OR IGNORE INTO ' + deleted_tag_parents_table_name + ' ( child_service_tag_id, parent_service_tag_id, account_id, parent_timestamp ) VALUES ( ?, ?, ?, ? );', ( child_service_tag_id, parent_service_tag_id, account_id, timestamp ) )
2016-04-20 20:42:21 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryDeleteTagSibling( self, service_id, account_id, bad_service_tag_id, good_service_tag_id, timestamp ):
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagSiblingPetitioners( service_id, bad_service_tag_id, good_service_tag_id, 1 )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM ' + current_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) )
self._c.execute( 'DELETE FROM ' + petitioned_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) )
2015-06-03 21:05:13 +00:00
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT OR IGNORE INTO ' + deleted_tag_siblings_table_name + ' ( bad_service_tag_id, good_service_tag_id, account_id, sibling_timestamp ) VALUES ( ?, ?, ?, ? );', ( bad_service_tag_id, good_service_tag_id, account_id, timestamp ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryDenyFilePetition( self, service_id, service_hash_ids ):
self._RepositoryRewardFilePetitioners( service_id, service_hash_ids, -1 )
2017-03-15 20:13:04 +00:00
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2017-03-02 02:14:56 +00:00
2017-03-15 20:13:04 +00:00
self._c.executemany( 'DELETE FROM ' + petitioned_files_table_name + ' WHERE service_hash_id = ?;', ( ( service_hash_id, ) for service_hash_id in service_hash_ids ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryDenyMappingPetition( self, service_id, service_tag_id, service_hash_ids ):
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardMappingPetitioners( service_id, service_tag_id, service_hash_ids, -1 )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.executemany( 'DELETE FROM ' + petitioned_mappings_table_name + ' WHERE service_tag_id = ? AND service_hash_id = ?;', ( ( service_tag_id, service_hash_id ) for service_hash_id in service_hash_ids ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryDenyTagParentPend( self, service_id, child_master_tag_id, parent_master_tag_id ):
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagParentPenders( service_id, child_master_tag_id, parent_master_tag_id, -1 )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM ' + pending_tag_parents_table_name + ' WHERE child_master_tag_id = ? AND parent_master_tag_id = ?;', ( child_master_tag_id, parent_master_tag_id ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryDenyTagParentPetition( self, service_id, child_service_tag_id, parent_service_tag_id ):
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagParentPetitioners( service_id, child_service_tag_id, parent_service_tag_id, -1 )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM ' + petitioned_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryDenyTagSiblingPend( self, service_id, bad_master_tag_id, good_master_tag_id ):
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagSiblingPenders( service_id, bad_master_tag_id, good_master_tag_id, -1 )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DELETE FROM ' + pending_tag_siblings_table_name + ' WHERE bad_master_tag_id = ? AND good_master_tag_id = ?;', ( bad_master_tag_id, good_master_tag_id ) )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryDenyTagSiblingPetition( self, service_id, bad_service_tag_id, good_service_tag_id ):
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._RepositoryRewardTagSiblingPetitioners( service_id, bad_service_tag_id, good_service_tag_id, -1 )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
self._c.execute( 'DELETE FROM ' + petitioned_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) )
def _RepositoryDrop( self, service_id ):
table_names = []
table_names.extend( GenerateRepositoryMasterMapTableNames( service_id ) )
table_names.extend( GenerateRepositoryFilesTableNames( service_id ) )
table_names.extend( GenerateRepositoryMappingsTableNames( service_id ) )
table_names.extend( GenerateRepositoryTagParentsTableNames( service_id ) )
table_names.extend( GenerateRepositoryTagSiblingsTableNames( service_id ) )
table_names.append( GenerateRepositoryUpdateTableName( service_id ) )
for table_name in table_names:
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
self._c.execute( 'DROP TABLE ' + table_name + ';' )
2015-06-03 21:05:13 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGenerateImmediateUpdate( self, service_key, account, begin, end ):
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
updates = self._RepositoryGenerateUpdates( service_id, begin, end )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
return updates
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGenerateUpdates( self, service_id, begin, end ):
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
MAX_DEFINITIONS_ROWS = 50000
MAX_CONTENT_ROWS = 250000
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
MAX_CONTENT_CHUNK = 25000
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
updates = []
definitions_update_builder = HydrusNetwork.UpdateBuilder( HydrusNetwork.DefinitionsUpdate, MAX_DEFINITIONS_ROWS )
content_update_builder = HydrusNetwork.UpdateBuilder( HydrusNetwork.ContentUpdate, MAX_CONTENT_ROWS )
( service_hash_ids_table_name, service_tag_ids_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for ( service_hash_id, hash ) in self._c.execute( 'SELECT service_hash_id, hash FROM ' + service_hash_ids_table_name + ' NATURAL JOIN hashes WHERE hash_id_timestamp BETWEEN ? AND ?;', ( begin, end ) ):
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
row = ( HC.DEFINITIONS_TYPE_HASHES, service_hash_id, hash )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
definitions_update_builder.AddRow( row )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
for ( service_tag_id, tag ) in self._c.execute( 'SELECT service_tag_id, tag FROM ' + service_tag_ids_table_name + ' NATURAL JOIN tags WHERE tag_id_timestamp BETWEEN ? AND ?;', ( begin, end ) ):
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
row = ( HC.DEFINITIONS_TYPE_TAGS, service_tag_id, tag )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
definitions_update_builder.AddRow( row )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
definitions_update_builder.Finish()
updates.extend( definitions_update_builder.GetUpdates() )
#
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
table_join = self._RepositoryGetFilesInfoFilesTableJoin( service_id, HC.CONTENT_STATUS_CURRENT )
for ( service_hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in self._c.execute( 'SELECT service_hash_id, size, mime, file_timestamp, width, height, duration, num_frames, num_words FROM ' + table_join + ' WHERE file_timestamp BETWEEN ? AND ?;', ( begin, end ) ):
2015-02-25 19:34:30 +00:00
2017-03-02 02:14:56 +00:00
file_row = ( service_hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
content_update_builder.AddRow( ( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADD, file_row ) )
2013-10-02 22:06:06 +00:00
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_hash_ids = [ service_hash_id for ( service_hash_id, ) in self._c.execute( 'SELECT service_hash_id FROM ' + deleted_files_table_name + ' WHERE file_timestamp BETWEEN ? AND ?;', ( begin, end ) ) ]
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
for service_hash_id in service_hash_ids:
content_update_builder.AddRow( ( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, service_hash_id ) )
#
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_tag_ids_to_service_hash_ids = HydrusData.BuildKeyToListDict( self._c.execute( 'SELECT service_tag_id, service_hash_id FROM ' + current_mappings_table_name + ' WHERE mapping_timestamp BETWEEN ? AND ?;', ( begin, end ) ) )
2019-01-09 22:59:03 +00:00
for ( service_tag_id, service_hash_ids ) in list(service_tag_ids_to_service_hash_ids.items()):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for block_of_service_hash_ids in HydrusData.SplitListIntoChunks( service_hash_ids, MAX_CONTENT_CHUNK ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
row_weight = len( block_of_service_hash_ids )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
content_update_builder.AddRow( ( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( service_tag_id, block_of_service_hash_ids ) ), row_weight )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_tag_ids_to_service_hash_ids = HydrusData.BuildKeyToListDict( self._c.execute( 'SELECT service_tag_id, service_hash_id FROM ' + deleted_mappings_table_name + ' WHERE mapping_timestamp BETWEEN ? AND ?;', ( begin, end ) ) )
2019-01-09 22:59:03 +00:00
for ( service_tag_id, service_hash_ids ) in list(service_tag_ids_to_service_hash_ids.items()):
2017-03-02 02:14:56 +00:00
for block_of_service_hash_ids in HydrusData.SplitListIntoChunks( service_hash_ids, MAX_CONTENT_CHUNK ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
row_weight = len( block_of_service_hash_ids )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
content_update_builder.AddRow( ( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( service_tag_id, block_of_service_hash_ids ) ), row_weight )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
#
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2016-02-03 22:12:53 +00:00
2017-03-02 02:14:56 +00:00
pairs = self._c.execute( 'SELECT child_service_tag_id, parent_service_tag_id FROM ' + current_tag_parents_table_name + ' WHERE parent_timestamp BETWEEN ? AND ?;', ( begin, end ) ).fetchall()
for pair in pairs:
2016-03-02 21:00:30 +00:00
2017-03-02 02:14:56 +00:00
content_update_builder.AddRow( ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_ADD, pair ) )
2016-03-02 21:00:30 +00:00
2017-03-02 02:14:56 +00:00
pairs = self._c.execute( 'SELECT child_service_tag_id, parent_service_tag_id FROM ' + deleted_tag_parents_table_name + ' WHERE parent_timestamp BETWEEN ? AND ?;', ( begin, end ) ).fetchall()
2016-02-03 22:12:53 +00:00
2017-03-02 02:14:56 +00:00
for pair in pairs:
content_update_builder.AddRow( ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_DELETE, pair ) )
2016-02-03 22:12:53 +00:00
2017-03-02 02:14:56 +00:00
#
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
pairs = self._c.execute( 'SELECT bad_service_tag_id, good_service_tag_id FROM ' + current_tag_siblings_table_name + ' WHERE sibling_timestamp BETWEEN ? AND ?;', ( begin, end ) ).fetchall()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
for pair in pairs:
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
content_update_builder.AddRow( ( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_ADD, pair ) )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
pairs = self._c.execute( 'SELECT bad_service_tag_id, good_service_tag_id FROM ' + deleted_tag_siblings_table_name + ' WHERE sibling_timestamp BETWEEN ? AND ?;', ( begin, end ) ).fetchall()
2016-02-03 22:12:53 +00:00
2017-03-02 02:14:56 +00:00
for pair in pairs:
content_update_builder.AddRow( ( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_DELETE, pair ) )
2016-02-03 22:12:53 +00:00
2017-03-02 02:14:56 +00:00
#
2016-02-03 22:12:53 +00:00
2017-03-02 02:14:56 +00:00
content_update_builder.Finish()
2013-11-27 18:27:11 +00:00
2017-03-02 02:14:56 +00:00
updates.extend( content_update_builder.GetUpdates() )
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
return updates
2013-10-02 22:06:06 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGetAccountInfo( self, service_id, account_id ):
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
2015-04-22 22:57:25 +00:00
2017-03-02 02:14:56 +00:00
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2015-04-22 22:57:25 +00:00
2018-12-05 22:35:30 +00:00
table_join = 'files_info NATURAL JOIN ' + hash_id_map_table_name + ' NATURAL JOIN ' + current_files_table_name
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
( num_files, num_files_bytes ) = self._c.execute( 'SELECT COUNT( * ), SUM( size ) FROM ' + table_join + ' WHERE account_id = ?;', ( account_id, ) ).fetchone()
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
if num_files_bytes is None:
num_files_bytes = 0
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
account_info = {}
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
account_info[ 'num_files' ] = num_files
account_info[ 'num_files_bytes' ] = num_files_bytes
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
#
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
num_mappings = len( self._c.execute( 'SELECT 1 FROM ' + current_mappings_table_name + ' WHERE account_id = ? LIMIT 5000;', ( account_id, ) ).fetchall() )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
account_info[ 'num_mappings' ] = num_mappings
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
#
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT score FROM account_scores WHERE service_id = ? AND account_id = ? AND score_type = ?;', ( service_id, account_id, HC.SCORE_PETITION ) ).fetchone()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if result is None: petition_score = 0
else: ( petition_score, ) = result
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
account_info[ 'petition_score' ] = petition_score
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return account_info
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGetCurrentMappingsCount( self, service_id, service_tag_id ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
( count, ) = self._c.execute( 'SELECT COUNT( * ) FROM ' + current_mappings_table_name + ' WHERE service_tag_id = ?;', ( service_tag_id, ) ).fetchone()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
return count
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGetFilesInfoFilesTableJoin( self, service_id, content_status ):
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
if content_status == HC.CONTENT_STATUS_CURRENT:
return 'files_info NATURAL JOIN ' + hash_id_map_table_name + ' NATURAL JOIN ' + current_files_table_name
elif content_status == HC.CONTENT_STATUS_DELETED:
return 'files_info NATURAL JOIN ' + hash_id_map_table_name + ' NATURAL JOIN ' + deleted_files_table_name
elif content_status == HC.CONTENT_STATUS_PENDING:
return 'files_info NATURAL JOIN ' + hash_id_map_table_name + ' NATURAL JOIN ' + pending_files_table_name
elif content_status == HC.CONTENT_STATUS_PETITIONED:
return 'files_info NATURAL JOIN ' + hash_id_map_table_name + ' NATURAL JOIN ' + petitioned_files_table_name
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGetFilePetition( self, service_id ):
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2019-07-03 22:49:27 +00:00
result = self._c.execute( 'SELECT DISTINCT account_id, reason_id FROM ' + petitioned_files_table_name + ' LIMIT 100;' ).fetchall()
2017-03-02 02:14:56 +00:00
2019-07-03 22:49:27 +00:00
if len( result ) == 0:
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.NotFoundException( 'No petitions!' )
2019-07-03 22:49:27 +00:00
result = random.choice( result )
2017-03-02 02:14:56 +00:00
( petitioner_account_id, reason_id ) = result
action = HC.CONTENT_UPDATE_PETITION
petitioner_account = self._GetAccount( service_id, petitioner_account_id )
reason = self._GetReason( reason_id )
service_hash_ids = [ service_hash_id for ( service_hash_id, ) in self._c.execute( 'SELECT service_hash_id FROM ' + petitioned_files_table_name + ' WHERE account_id = ? AND reason_id = ?;', ( petitioner_account_id, reason_id ) ) ]
master_hash_ids = self._RepositoryGetMasterHashIds( service_id, service_hash_ids )
hashes = self._GetHashes( master_hash_ids )
content_type = HC.CONTENT_TYPE_FILES
contents = [ HydrusNetwork.Content( content_type, hashes ) ]
return HydrusNetwork.Petition( action, petitioner_account, reason, contents )
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
def _RepositoryGetIPTimestamp( self, service_key, account, hash ):
2013-02-19 00:11:43 +00:00
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
master_hash_id = self._GetMasterHashId( hash )
result = self._c.execute( 'SELECT ip, ip_timestamp FROM ' + ip_addresses_table_name + ' WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
if result is None:
2016-03-30 22:56:50 +00:00
2019-02-27 23:03:30 +00:00
raise HydrusExceptions.NotFoundException( 'Did not find ip information for that hash.' )
2016-03-30 22:56:50 +00:00
2017-03-02 02:14:56 +00:00
return result
def _RepositoryGetMappingPetition( self, service_id ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2019-07-03 22:49:27 +00:00
result = self._c.execute( 'SELECT DISTINCT account_id, reason_id FROM ' + petitioned_mappings_table_name + ' LIMIT 100;' ).fetchall()
2017-03-02 02:14:56 +00:00
2019-07-03 22:49:27 +00:00
if len( result ) == 0:
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.NotFoundException( 'No petitions!' )
2019-07-03 22:49:27 +00:00
result = random.choice( result )
2017-03-02 02:14:56 +00:00
( petitioner_account_id, reason_id ) = result
action = HC.CONTENT_UPDATE_PETITION
petitioner_account = self._GetAccount( service_id, petitioner_account_id )
reason = self._GetReason( reason_id )
tag_ids_to_hash_ids = HydrusData.BuildKeyToListDict( self._c.execute( 'SELECT service_tag_id, service_hash_id FROM ' + petitioned_mappings_table_name + ' WHERE account_id = ? AND reason_id = ?;', ( petitioner_account_id, reason_id ) ) )
contents = []
2017-05-10 21:33:58 +00:00
total_num_petitions = 0
total_weight = 0
2017-05-31 21:50:53 +00:00
min_weight_permitted = None
max_weight_permitted = None
2019-07-10 22:38:30 +00:00
petition_namespace = None
2017-06-21 21:15:59 +00:00
max_total_weight = None
2017-05-31 21:50:53 +00:00
petition_pairs = list( tag_ids_to_hash_ids.items() )
random.shuffle( petition_pairs )
for ( service_tag_id, service_hash_ids ) in petition_pairs:
content_weight = len( service_hash_ids )
if min_weight_permitted is None:
# group petitions of similar weight together rather than mixing weight 5000 in with a hundred weight 1s
if content_weight == 1:
min_weight_permitted = 1
max_weight_permitted = 1
2017-06-21 21:15:59 +00:00
max_total_weight = 20000
2017-05-31 21:50:53 +00:00
elif content_weight < 10:
min_weight_permitted = 2
max_weight_permitted = 9
2017-06-21 21:15:59 +00:00
max_total_weight = 5000
2017-05-31 21:50:53 +00:00
elif content_weight < 50:
min_weight_permitted = 10
max_weight_permitted = 49
2017-06-21 21:15:59 +00:00
max_total_weight = 2000
2017-05-31 21:50:53 +00:00
else:
min_weight_permitted = 50
max_weight_permitted = None
2017-06-21 21:15:59 +00:00
max_total_weight = 500
2017-05-31 21:50:53 +00:00
else:
if content_weight < min_weight_permitted:
continue
if max_weight_permitted is not None and content_weight > max_weight_permitted:
continue
2017-03-02 02:14:56 +00:00
master_tag_id = self._RepositoryGetMasterTagId( service_id, service_tag_id )
tag = self._GetTag( master_tag_id )
2019-07-10 22:38:30 +00:00
( namespace, subtag ) = HydrusTags.SplitTag( tag )
if petition_namespace is None:
petition_namespace = namespace
if namespace != petition_namespace:
continue
master_hash_ids = self._RepositoryGetMasterHashIds( service_id, service_hash_ids )
2017-03-02 02:14:56 +00:00
hashes = self._GetHashes( master_hash_ids )
content = HydrusNetwork.Content( HC.CONTENT_TYPE_MAPPINGS, ( tag, hashes ) )
contents.append( content )
2017-05-10 21:33:58 +00:00
total_num_petitions += 1
2017-05-31 21:50:53 +00:00
total_weight += content_weight
2017-05-10 21:33:58 +00:00
2019-07-10 22:38:30 +00:00
if total_num_petitions > 500 or total_weight > 50000:
2017-05-10 21:33:58 +00:00
break
2017-03-02 02:14:56 +00:00
return HydrusNetwork.Petition( action, petitioner_account, reason, contents )
def _RepositoryGetMasterHashIds( self, service_id, service_hash_ids ):
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT master_hash_id FROM ' + hash_id_map_table_name + ' WHERE service_hash_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
master_hash_ids = [ master_hash_id for ( master_hash_id, ) in self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ) ]
2017-03-02 02:14:56 +00:00
if len( service_hash_ids ) != len( master_hash_ids ):
raise HydrusExceptions.DataMissing( 'Missing master_hash_id map error!' )
return master_hash_ids
def _RepositoryGetMasterTagId( self, service_id, service_tag_id ):
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
result = self._c.execute( 'SELECT master_tag_id FROM ' + tag_id_map_table_name + ' WHERE service_tag_id = ?;', ( service_tag_id, ) ).fetchone()
if result is None:
raise HydrusExceptions.DataMissing( 'Missing master_tag_id map error!' )
( master_tag_id, ) = result
return master_tag_id
2019-07-10 22:38:30 +00:00
def _RepositoryGetNumPetitions( self, service_key, account ):
service_id = self._GetServiceId( service_key )
petition_count_info = []
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2021-01-20 22:22:03 +00:00
if account.HasPermission( HC.CONTENT_TYPE_FILES, HC.PERMISSION_ACTION_MODERATE ):
2019-07-10 22:38:30 +00:00
( num_petitions, ) = self._c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id FROM ' + petitioned_files_table_name + ' LIMIT 1000 );' ).fetchone()
petition_count_info.append( ( HC.CONTENT_TYPE_FILES, HC.CONTENT_STATUS_PETITIONED, num_petitions ) )
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2021-01-20 22:22:03 +00:00
if account.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_MODERATE ):
2019-07-10 22:38:30 +00:00
( num_petitions, ) = self._c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT service_tag_id, account_id, reason_id FROM ' + petitioned_mappings_table_name + ' LIMIT 1000 );' ).fetchone()
petition_count_info.append( ( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_STATUS_PETITIONED, num_petitions ) )
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2021-01-20 22:22:03 +00:00
if account.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_MODERATE ):
2019-07-10 22:38:30 +00:00
( num_petitions, ) = self._c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id FROM ' + pending_tag_parents_table_name + ' LIMIT 1000 );' ).fetchone()
petition_count_info.append( ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_STATUS_PENDING, num_petitions ) )
( num_petitions, ) = self._c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id FROM ' + petitioned_tag_parents_table_name + ' LIMIT 1000 );' ).fetchone()
petition_count_info.append( ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_STATUS_PETITIONED, num_petitions ) )
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2021-01-20 22:22:03 +00:00
if account.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_MODERATE ):
2019-07-10 22:38:30 +00:00
( num_petitions, ) = self._c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id FROM ' + pending_tag_siblings_table_name + ' LIMIT 1000 );' ).fetchone()
petition_count_info.append( ( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_STATUS_PENDING, num_petitions ) )
( num_petitions, ) = self._c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id FROM ' + petitioned_tag_siblings_table_name + ' LIMIT 1000 );' ).fetchone()
petition_count_info.append( ( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_STATUS_PETITIONED, num_petitions ) )
return petition_count_info
2017-03-15 20:13:04 +00:00
def _RepositoryGetPetition( self, service_key, account, content_type, status ):
service_id = self._GetServiceId( service_key )
if content_type == HC.CONTENT_TYPE_FILES:
petition = self._RepositoryGetFilePetition( service_id )
elif content_type == HC.CONTENT_TYPE_MAPPINGS:
petition = self._RepositoryGetMappingPetition( service_id )
elif content_type == HC.CONTENT_TYPE_TAG_PARENTS:
if status == HC.CONTENT_STATUS_PENDING:
petition = self._RepositoryGetTagParentPend( service_id )
else:
petition = self._RepositoryGetTagParentPetition( service_id )
elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
if status == HC.CONTENT_STATUS_PENDING:
petition = self._RepositoryGetTagSiblingPend( service_id )
else:
petition = self._RepositoryGetTagSiblingPetition( service_id )
return petition
2019-09-18 22:40:39 +00:00
def _RepositoryGetServiceHashId( self, service_id, master_hash_id, timestamp ):
2017-03-02 02:14:56 +00:00
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
result = self._c.execute( 'SELECT service_hash_id FROM ' + hash_id_map_table_name + ' WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
if result is None:
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT INTO ' + hash_id_map_table_name + ' ( master_hash_id, hash_id_timestamp ) VALUES ( ?, ? );', ( master_hash_id, timestamp ) )
2017-03-02 02:14:56 +00:00
service_hash_id = self._c.lastrowid
return service_hash_id
else:
( service_hash_id, ) = result
return service_hash_id
2019-09-18 22:40:39 +00:00
def _RepositoryGetServiceHashIds( self, service_id, master_hash_ids, timestamp ):
2017-03-02 02:14:56 +00:00
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
service_hash_ids = set()
master_hash_ids_not_in_table = set()
for master_hash_id in master_hash_ids:
result = self._c.execute( 'SELECT service_hash_id FROM ' + hash_id_map_table_name + ' WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
if result is None:
master_hash_ids_not_in_table.add( master_hash_id )
else:
( service_hash_id, ) = result
service_hash_ids.add( service_hash_id )
if len( master_hash_ids_not_in_table ) > 0:
2019-09-18 22:40:39 +00:00
self._c.executemany( 'INSERT INTO ' + hash_id_map_table_name + ' ( master_hash_id, hash_id_timestamp ) VALUES ( ?, ? );', ( ( master_hash_id, timestamp ) for master_hash_id in master_hash_ids_not_in_table ) )
2017-03-02 02:14:56 +00:00
for master_hash_id in master_hash_ids_not_in_table:
( service_hash_id, ) = self._c.execute( 'SELECT service_hash_id FROM ' + hash_id_map_table_name + ' WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
service_hash_ids.add( service_hash_id )
return service_hash_ids
2019-09-18 22:40:39 +00:00
def _RepositoryGetServiceTagId( self, service_id, master_tag_id, timestamp ):
2017-03-02 02:14:56 +00:00
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
result = self._c.execute( 'SELECT service_tag_id FROM ' + tag_id_map_table_name + ' WHERE master_tag_id = ?;', ( master_tag_id, ) ).fetchone()
if result is None:
2019-09-18 22:40:39 +00:00
self._c.execute( 'INSERT INTO ' + tag_id_map_table_name + ' ( master_tag_id, tag_id_timestamp ) VALUES ( ?, ? );', ( master_tag_id, timestamp ) )
2017-03-02 02:14:56 +00:00
service_tag_id = self._c.lastrowid
return service_tag_id
else:
( service_tag_id, ) = result
return service_tag_id
def _RepositoryGetTagParentPend( self, service_id ):
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2019-07-03 22:49:27 +00:00
result = self._c.execute( 'SELECT DISTINCT account_id, reason_id FROM ' + pending_tag_parents_table_name + ' LIMIT 100;' ).fetchall()
2017-03-02 02:14:56 +00:00
2019-07-03 22:49:27 +00:00
if len( result ) == 0:
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.NotFoundException( 'No petitions!' )
2019-07-03 22:49:27 +00:00
result = random.choice( result )
2017-03-02 02:14:56 +00:00
( petitioner_account_id, reason_id ) = result
action = HC.CONTENT_UPDATE_PEND
petitioner_account = self._GetAccount( service_id, petitioner_account_id )
reason = self._GetReason( reason_id )
pairs = self._c.execute( 'SELECT child_master_tag_id, parent_master_tag_id FROM ' + pending_tag_parents_table_name + ' WHERE account_id = ? AND reason_id = ?;', ( petitioner_account_id, reason_id ) ).fetchall()
contents = []
2019-06-19 22:08:48 +00:00
chosen_parent_namespace = None
2017-03-02 02:14:56 +00:00
for ( child_master_tag_id, parent_master_tag_id ) in pairs:
parent_tag = self._GetTag( parent_master_tag_id )
2019-06-19 22:08:48 +00:00
( parent_namespace, parent_subtag ) = HydrusTags.SplitTag( parent_tag )
if chosen_parent_namespace is None:
chosen_parent_namespace = parent_namespace
if parent_namespace != chosen_parent_namespace:
continue
child_tag = self._GetTag( child_master_tag_id )
2017-03-02 02:14:56 +00:00
content = HydrusNetwork.Content( HC.CONTENT_TYPE_TAG_PARENTS, ( child_tag, parent_tag ) )
contents.append( content )
return HydrusNetwork.Petition( action, petitioner_account, reason, contents )
def _RepositoryGetTagParentPetition( self, service_id ):
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2019-07-03 22:49:27 +00:00
result = self._c.execute( 'SELECT DISTINCT account_id, reason_id FROM ' + petitioned_tag_parents_table_name + ' LIMIT 100;' ).fetchall()
2017-03-02 02:14:56 +00:00
2019-07-03 22:49:27 +00:00
if len( result ) == 0:
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.NotFoundException( 'No petitions!' )
2019-07-03 22:49:27 +00:00
result = random.choice( result )
2017-03-02 02:14:56 +00:00
( petitioner_account_id, reason_id ) = result
action = HC.CONTENT_UPDATE_PETITION
petitioner_account = self._GetAccount( service_id, petitioner_account_id )
reason = self._GetReason( reason_id )
pairs = self._c.execute( 'SELECT child_service_tag_id, parent_service_tag_id FROM ' + petitioned_tag_parents_table_name + ' WHERE account_id = ? AND reason_id = ?;', ( petitioner_account_id, reason_id ) ).fetchall()
contents = []
2019-06-19 22:08:48 +00:00
chosen_parent_namespace = None
2017-03-02 02:14:56 +00:00
for ( child_service_tag_id, parent_service_tag_id ) in pairs:
child_master_tag_id = self._RepositoryGetMasterTagId( service_id, child_service_tag_id )
parent_master_tag_id = self._RepositoryGetMasterTagId( service_id, parent_service_tag_id )
parent_tag = self._GetTag( parent_master_tag_id )
2019-06-19 22:08:48 +00:00
( parent_namespace, parent_subtag ) = HydrusTags.SplitTag( parent_tag )
if chosen_parent_namespace is None:
chosen_parent_namespace = parent_namespace
if parent_namespace != chosen_parent_namespace:
continue
child_tag = self._GetTag( child_master_tag_id )
2017-03-02 02:14:56 +00:00
content = HydrusNetwork.Content( HC.CONTENT_TYPE_TAG_PARENTS, ( child_tag, parent_tag ) )
contents.append( content )
return HydrusNetwork.Petition( action, petitioner_account, reason, contents )
def _RepositoryGetTagSiblingPend( self, service_id ):
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2019-07-03 22:49:27 +00:00
result = self._c.execute( 'SELECT DISTINCT account_id, reason_id FROM ' + pending_tag_siblings_table_name + ' LIMIT 100;' ).fetchall()
2017-03-02 02:14:56 +00:00
2019-07-03 22:49:27 +00:00
if len( result ) == 0:
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.NotFoundException( 'No petitions!' )
2019-07-03 22:49:27 +00:00
result = random.choice( result )
2017-03-02 02:14:56 +00:00
( petitioner_account_id, reason_id ) = result
action = HC.CONTENT_UPDATE_PEND
petitioner_account = self._GetAccount( service_id, petitioner_account_id )
reason = self._GetReason( reason_id )
pairs = self._c.execute( 'SELECT bad_master_tag_id, good_master_tag_id FROM ' + pending_tag_siblings_table_name + ' WHERE account_id = ? AND reason_id = ?;', ( petitioner_account_id, reason_id ) ).fetchall()
contents = []
2019-06-19 22:08:48 +00:00
chosen_good_namespace = None
2017-03-02 02:14:56 +00:00
for ( bad_master_tag_id, good_master_tag_id ) in pairs:
good_tag = self._GetTag( good_master_tag_id )
2019-06-19 22:08:48 +00:00
( good_namespace, good_subtag ) = HydrusTags.SplitTag( good_tag )
if chosen_good_namespace is None:
chosen_good_namespace = good_namespace
if good_namespace != chosen_good_namespace:
continue
bad_tag = self._GetTag( bad_master_tag_id )
2017-03-15 20:13:04 +00:00
content = HydrusNetwork.Content( HC.CONTENT_TYPE_TAG_SIBLINGS, ( bad_tag, good_tag ) )
2017-03-02 02:14:56 +00:00
contents.append( content )
return HydrusNetwork.Petition( action, petitioner_account, reason, contents )
def _RepositoryGetTagSiblingPetition( self, service_id ):
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2019-07-03 22:49:27 +00:00
result = self._c.execute( 'SELECT DISTINCT account_id, reason_id FROM ' + petitioned_tag_siblings_table_name + ' LIMIT 100;' ).fetchall()
2017-03-02 02:14:56 +00:00
2019-07-03 22:49:27 +00:00
if len( result ) == 0:
2017-03-02 02:14:56 +00:00
raise HydrusExceptions.NotFoundException( 'No petitions!' )
2019-07-03 22:49:27 +00:00
result = random.choice( result )
2017-03-02 02:14:56 +00:00
( petitioner_account_id, reason_id ) = result
action = HC.CONTENT_UPDATE_PETITION
petitioner_account = self._GetAccount( service_id, petitioner_account_id )
reason = self._GetReason( reason_id )
pairs = self._c.execute( 'SELECT bad_service_tag_id, good_service_tag_id FROM ' + petitioned_tag_siblings_table_name + ' WHERE account_id = ? AND reason_id = ?;', ( petitioner_account_id, reason_id ) ).fetchall()
contents = []
2019-06-19 22:08:48 +00:00
chosen_good_namespace = None
2017-03-02 02:14:56 +00:00
for ( bad_service_tag_id, good_service_tag_id ) in pairs:
bad_master_tag_id = self._RepositoryGetMasterTagId( service_id, bad_service_tag_id )
good_master_tag_id = self._RepositoryGetMasterTagId( service_id, good_service_tag_id )
good_tag = self._GetTag( good_master_tag_id )
2019-06-19 22:08:48 +00:00
( good_namespace, good_subtag ) = HydrusTags.SplitTag( good_tag )
if chosen_good_namespace is None:
chosen_good_namespace = good_namespace
if good_namespace != chosen_good_namespace:
continue
bad_tag = self._GetTag( bad_master_tag_id )
2017-03-15 20:13:04 +00:00
content = HydrusNetwork.Content( HC.CONTENT_TYPE_TAG_SIBLINGS, ( bad_tag, good_tag ) )
2017-03-02 02:14:56 +00:00
contents.append( content )
return HydrusNetwork.Petition( action, petitioner_account, reason, contents )
def _RepositoryHasFile( self, service_key, hash ):
if not self._MasterHashExists( hash ):
return ( False, None )
service_id = self._GetServiceId( service_key )
master_hash_id = self._GetMasterHashId( hash )
table_join = self._RepositoryGetFilesInfoFilesTableJoin( service_id, HC.CONTENT_STATUS_CURRENT )
result = self._c.execute( 'SELECT mime FROM ' + table_join + ' WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
if result is None:
return ( False, None )
( mime, ) = result
return ( True, mime )
def _RepositoryPendTagParent( self, service_id, account_id, child_master_tag_id, parent_master_tag_id, reason_id ):
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
child_exists = self._RepositoryServiceTagIdExists( service_id, child_master_tag_id )
parent_exists = self._RepositoryServiceTagIdExists( service_id, parent_master_tag_id )
if child_exists and parent_exists:
2019-09-18 22:40:39 +00:00
child_service_tag_id = self._RepositoryGetServiceTagId( service_id, child_master_tag_id, HydrusData.GetNow() )
parent_service_tag_id = self._RepositoryGetServiceTagId( service_id, parent_master_tag_id, HydrusData.GetNow() )
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT 1 FROM ' + current_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) ).fetchone()
if result is not None:
return
self._c.execute( 'REPLACE INTO ' + pending_tag_parents_table_name + ' ( child_master_tag_id, parent_master_tag_id, account_id, reason_id ) VALUES ( ?, ?, ?, ? );', ( child_master_tag_id, parent_master_tag_id, account_id, reason_id ) )
def _RepositoryPendTagSibling( self, service_id, account_id, bad_master_tag_id, good_master_tag_id, reason_id ):
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
bad_exists = self._RepositoryServiceTagIdExists( service_id, bad_master_tag_id )
good_exists = self._RepositoryServiceTagIdExists( service_id, good_master_tag_id )
if bad_exists and good_exists:
2019-09-18 22:40:39 +00:00
bad_service_tag_id = self._RepositoryGetServiceTagId( service_id, bad_master_tag_id, HydrusData.GetNow() )
good_service_tag_id = self._RepositoryGetServiceTagId( service_id, good_master_tag_id, HydrusData.GetNow() )
2017-03-02 02:14:56 +00:00
result = self._c.execute( 'SELECT 1 FROM ' + current_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) ).fetchone()
if result is not None:
return
self._c.execute( 'REPLACE INTO ' + pending_tag_siblings_table_name + ' ( bad_master_tag_id, good_master_tag_id, account_id, reason_id ) VALUES ( ?, ?, ?, ? );', ( bad_master_tag_id, good_master_tag_id, account_id, reason_id ) )
def _RepositoryPetitionFiles( self, service_id, account_id, service_hash_ids, reason_id ):
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_hash_id FROM ' + current_files_table_name + ' WHERE service_hash_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
valid_service_hash_ids = [ service_hash_id for ( service_hash_id, ) in self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ) ]
2017-03-02 02:14:56 +00:00
2017-03-15 20:13:04 +00:00
self._c.executemany( 'REPLACE INTO ' + petitioned_files_table_name + ' ( service_hash_id, account_id, reason_id ) VALUES ( ?, ?, ? );', ( ( service_hash_id, account_id, reason_id ) for service_hash_id in valid_service_hash_ids ) )
2017-03-02 02:14:56 +00:00
def _RepositoryPetitionMappings( self, service_id, account_id, service_tag_id, service_hash_ids, reason_id ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_hash_id FROM ' + current_mappings_table_name + ' WHERE service_tag_id = ' + str( service_tag_id ) + ' AND service_hash_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
valid_service_hash_ids = [ service_hash_id for ( service_hash_id, ) in self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ) ]
2017-03-02 02:14:56 +00:00
self._c.executemany( 'REPLACE INTO ' + petitioned_mappings_table_name + ' ( service_tag_id, service_hash_id, account_id, reason_id ) VALUES ( ?, ?, ?, ? );', [ ( service_tag_id, service_hash_id, account_id, reason_id ) for service_hash_id in valid_service_hash_ids ] )
def _RepositoryPetitionTagParent( self, service_id, account_id, child_service_tag_id, parent_service_tag_id, reason_id ):
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
result = self._c.execute( 'SELECT 1 FROM ' + current_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) ).fetchone()
if result is None:
return
self._c.execute( 'REPLACE INTO ' + petitioned_tag_parents_table_name + ' ( child_service_tag_id, parent_service_tag_id, account_id, reason_id ) VALUES ( ?, ?, ?, ? );', ( child_service_tag_id, parent_service_tag_id, account_id, reason_id ) )
def _RepositoryPetitionTagSibling( self, service_id, account_id, bad_service_tag_id, good_service_tag_id, reason_id ):
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
result = self._c.execute( 'SELECT 1 FROM ' + current_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) ).fetchone()
if result is None:
return
self._c.execute( 'REPLACE INTO ' + petitioned_tag_siblings_table_name + ' ( bad_service_tag_id, good_service_tag_id, account_id, reason_id ) VALUES ( ?, ?, ?, ? );', ( bad_service_tag_id, good_service_tag_id, account_id, reason_id ) )
2019-09-18 22:40:39 +00:00
def _RepositoryProcessAddFile( self, service, account, file_dict, timestamp ):
2017-03-02 02:14:56 +00:00
service_key = service.GetServiceKey()
service_id = self._GetServiceId( service_key )
account_key = account.GetAccountKey()
account_id = self._GetAccountId( account_key )
can_create_files = account.HasPermission( HC.CONTENT_TYPE_FILES, HC.PERMISSION_ACTION_CREATE )
2021-01-20 22:22:03 +00:00
can_moderate_files = account.HasPermission( HC.CONTENT_TYPE_FILES, HC.PERMISSION_ACTION_MODERATE )
2017-03-02 02:14:56 +00:00
# later add pend file here however that is neat
2021-01-20 22:22:03 +00:00
if can_create_files or can_moderate_files:
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if not can_moderate_files:
2017-03-02 02:14:56 +00:00
max_storage = service.GetMaxStorage()
if max_storage is not None:
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
table_join = self._RepositoryGetFilesInfoFilesTableJoin( service_id, HC.CONTENT_STATUS_CURRENT )
( total_current_storage, ) = self._c.execute( 'SELECT SUM( size ) FROM ' + table_join + ';' ).fetchone()
if total_current_storage is None:
total_current_storage = 0
table_join = self._RepositoryGetFilesInfoFilesTableJoin( service_id, HC.CONTENT_STATUS_PENDING )
( total_pending_storage, ) = self._c.execute( 'SELECT SUM( size ) FROM ' + table_join + ';' ).fetchone()
if total_pending_storage is None:
total_pending_storage = 0
if total_current_storage + total_pending_storage + file_dict[ 'size' ] > max_storage:
2020-05-06 21:31:41 +00:00
raise HydrusExceptions.ConflictException( 'This repository is full up and cannot take any more files!' )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
overwrite_deleted = can_moderate_files
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryAddFile( service_id, account_id, file_dict, overwrite_deleted, timestamp )
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
def _RepositoryProcessClientToServerUpdate( self, service_key, account, client_to_server_update, timestamp ):
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
account_key = account.GetAccountKey()
account_id = self._GetAccountId( account_key )
can_petition_files = account.HasPermission( HC.CONTENT_TYPE_FILES, HC.PERMISSION_ACTION_PETITION )
2021-01-20 22:22:03 +00:00
can_moderate_files = account.HasPermission( HC.CONTENT_TYPE_FILES, HC.PERMISSION_ACTION_MODERATE )
2017-03-02 02:14:56 +00:00
can_petition_mappings = account.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_PETITION )
can_create_mappings = account.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_CREATE )
2021-01-20 22:22:03 +00:00
can_moderate_mappings = account.HasPermission( HC.CONTENT_TYPE_MAPPINGS, HC.PERMISSION_ACTION_MODERATE )
2017-03-02 02:14:56 +00:00
can_petition_tag_parents = account.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_PETITION )
can_create_tag_parents = account.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_CREATE )
2021-01-20 22:22:03 +00:00
can_moderate_tag_parents = account.HasPermission( HC.CONTENT_TYPE_TAG_PARENTS, HC.PERMISSION_ACTION_MODERATE )
2017-03-02 02:14:56 +00:00
can_petition_tag_siblings = account.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_PETITION )
can_create_tag_siblings = account.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_CREATE )
2021-01-20 22:22:03 +00:00
can_moderate_tag_siblings = account.HasPermission( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.PERMISSION_ACTION_MODERATE )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if can_moderate_files or can_petition_files:
2017-03-02 02:14:56 +00:00
for ( hashes, reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_PETITION ):
master_hash_ids = self._GetMasterHashIds( hashes )
2019-09-18 22:40:39 +00:00
service_hash_ids = self._RepositoryGetServiceHashIds( service_id, master_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if can_moderate_files:
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteFiles( service_id, account_id, service_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
elif can_petition_files:
reason_id = self._GetReasonId( reason )
self._RepositoryPetitionFiles( service_id, account_id, service_hash_ids, reason_id )
2021-01-20 22:22:03 +00:00
if can_moderate_files:
2017-03-02 02:14:56 +00:00
for ( hashes, reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DENY_PETITION ):
master_hash_ids = self._GetMasterHashIds( hashes )
2019-09-18 22:40:39 +00:00
service_hash_ids = self._RepositoryGetServiceHashIds( service_id, master_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
self._RepositoryDenyFilePetition( service_id, service_hash_ids )
#
# later add pend mappings here however that is neat
2021-01-20 22:22:03 +00:00
if can_create_mappings or can_moderate_mappings:
2017-03-02 02:14:56 +00:00
for ( ( tag, hashes ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND ):
master_tag_id = self._GetMasterTagId( tag )
master_hash_ids = self._GetMasterHashIds( hashes )
2021-01-20 22:22:03 +00:00
overwrite_deleted = can_moderate_mappings
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryAddMappings( service_id, account_id, master_tag_id, master_hash_ids, overwrite_deleted, timestamp )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if can_moderate_mappings or can_petition_mappings:
2017-03-02 02:14:56 +00:00
for ( ( tag, hashes ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PETITION ):
master_tag_id = self._GetMasterTagId( tag )
2019-09-18 22:40:39 +00:00
service_tag_id = self._RepositoryGetServiceTagId( service_id, master_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
master_hash_ids = self._GetMasterHashIds( hashes )
2019-09-18 22:40:39 +00:00
service_hash_ids = self._RepositoryGetServiceHashIds( service_id, master_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if can_moderate_mappings:
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteMappings( service_id, account_id, service_tag_id, service_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
elif can_petition_mappings:
reason_id = self._GetReasonId( reason )
self._RepositoryPetitionMappings( service_id, account_id, service_tag_id, service_hash_ids, reason_id )
2021-01-20 22:22:03 +00:00
if can_moderate_mappings:
2017-03-02 02:14:56 +00:00
for ( ( tag, hashes ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DENY_PETITION ):
master_tag_id = self._GetMasterTagId( tag )
2019-09-18 22:40:39 +00:00
service_tag_id = self._RepositoryGetServiceTagId( service_id, master_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
master_hash_ids = self._GetMasterHashIds( hashes )
2019-09-18 22:40:39 +00:00
service_hash_ids = self._RepositoryGetServiceHashIds( service_id, master_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
self._RepositoryDenyMappingPetition( service_id, service_tag_id, service_hash_ids )
#
2021-01-20 22:22:03 +00:00
if can_create_tag_parents or can_moderate_tag_parents or can_petition_tag_parents:
2017-03-02 02:14:56 +00:00
for ( ( child_tag, parent_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_PEND ):
child_master_tag_id = self._GetMasterTagId( child_tag )
parent_master_tag_id = self._GetMasterTagId( parent_tag )
2021-01-20 22:22:03 +00:00
if can_create_tag_parents or can_moderate_tag_parents:
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
overwrite_deleted = can_moderate_tag_parents
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryAddTagParent( service_id, account_id, child_master_tag_id, parent_master_tag_id, overwrite_deleted, timestamp )
2017-03-02 02:14:56 +00:00
elif can_petition_tag_parents:
reason_id = self._GetReasonId( reason )
self._RepositoryPendTagParent( service_id, account_id, child_master_tag_id, parent_master_tag_id, reason_id )
for ( ( child_tag, parent_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_PETITION ):
child_master_tag_id = self._GetMasterTagId( child_tag )
parent_master_tag_id = self._GetMasterTagId( parent_tag )
2019-09-18 22:40:39 +00:00
child_service_tag_id = self._RepositoryGetServiceTagId( service_id, child_master_tag_id, timestamp )
parent_service_tag_id = self._RepositoryGetServiceTagId( service_id, parent_master_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if can_moderate_tag_parents:
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteTagParent( service_id, account_id, child_service_tag_id, parent_service_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
elif can_petition_tag_parents:
reason_id = self._GetReasonId( reason )
self._RepositoryPetitionTagParent( service_id, account_id, child_service_tag_id, parent_service_tag_id, reason_id )
2021-01-20 22:22:03 +00:00
if can_moderate_tag_parents:
2017-03-02 02:14:56 +00:00
for ( ( child_tag, parent_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_DENY_PEND ):
child_master_tag_id = self._GetMasterTagId( child_tag )
parent_master_tag_id = self._GetMasterTagId( parent_tag )
self._RepositoryDenyTagParentPend( service_id, child_master_tag_id, parent_master_tag_id )
for ( ( child_tag, parent_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_UPDATE_DENY_PETITION ):
child_master_tag_id = self._GetMasterTagId( child_tag )
parent_master_tag_id = self._GetMasterTagId( parent_tag )
2019-09-18 22:40:39 +00:00
child_service_tag_id = self._RepositoryGetServiceTagId( service_id, child_master_tag_id, timestamp )
parent_service_tag_id = self._RepositoryGetServiceTagId( service_id, parent_master_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
self._RepositoryDenyTagParentPetition( service_id, child_service_tag_id, parent_service_tag_id )
#
2021-01-20 22:22:03 +00:00
if can_create_tag_siblings or can_moderate_tag_siblings or can_petition_tag_siblings:
2017-03-02 02:14:56 +00:00
for ( ( bad_tag, good_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_PEND ):
bad_master_tag_id = self._GetMasterTagId( bad_tag )
good_master_tag_id = self._GetMasterTagId( good_tag )
2021-01-20 22:22:03 +00:00
if can_create_tag_siblings or can_moderate_tag_siblings:
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
overwrite_deleted = can_moderate_tag_siblings
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryAddTagSibling( service_id, account_id, bad_master_tag_id, good_master_tag_id, overwrite_deleted, timestamp )
2017-03-02 02:14:56 +00:00
elif can_petition_tag_siblings:
reason_id = self._GetReasonId( reason )
self._RepositoryPendTagSibling( service_id, account_id, bad_master_tag_id, good_master_tag_id, reason_id )
for ( ( bad_tag, good_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_PETITION ):
bad_master_tag_id = self._GetMasterTagId( bad_tag )
good_master_tag_id = self._GetMasterTagId( good_tag )
2019-09-18 22:40:39 +00:00
bad_service_tag_id = self._RepositoryGetServiceTagId( service_id, bad_master_tag_id, timestamp )
good_service_tag_id = self._RepositoryGetServiceTagId( service_id, good_master_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
2021-01-20 22:22:03 +00:00
if can_moderate_tag_siblings:
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteTagSibling( service_id, account_id, bad_service_tag_id, good_service_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
elif can_petition_tag_siblings:
reason_id = self._GetReasonId( reason )
self._RepositoryPetitionTagSibling( service_id, account_id, bad_service_tag_id, good_service_tag_id, reason_id )
2021-01-20 22:22:03 +00:00
if can_moderate_tag_siblings:
2017-03-02 02:14:56 +00:00
for ( ( bad_tag, good_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_DENY_PEND ):
bad_master_tag_id = self._GetMasterTagId( bad_tag )
good_master_tag_id = self._GetMasterTagId( good_tag )
self._RepositoryDenyTagSiblingPend( service_id, bad_master_tag_id, good_master_tag_id )
for ( ( bad_tag, good_tag ), reason ) in client_to_server_update.GetContentDataIterator( HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_UPDATE_DENY_PETITION ):
bad_master_tag_id = self._GetMasterTagId( bad_tag )
good_master_tag_id = self._GetMasterTagId( good_tag )
2019-09-18 22:40:39 +00:00
bad_service_tag_id = self._RepositoryGetServiceTagId( service_id, bad_master_tag_id, timestamp )
good_service_tag_id = self._RepositoryGetServiceTagId( service_id, good_master_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
self._RepositoryDenyTagSiblingPetition( service_id, bad_service_tag_id, good_service_tag_id )
def _RepositoryRewardFilePetitioners( self, service_id, service_hash_ids, multiplier ):
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT account_id, COUNT( * ) FROM ' + petitioned_files_table_name + ' WHERE service_hash_id = ? GROUP BY account_id;'
2017-03-02 02:14:56 +00:00
2020-09-16 20:46:54 +00:00
counter = collections.Counter()
2019-12-18 22:06:34 +00:00
for ( account_id, count ) in self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ):
2020-09-16 20:46:54 +00:00
counter[ account_id ] += count
2019-12-18 22:06:34 +00:00
2020-09-16 20:46:54 +00:00
scores = [ ( account_id, count * multiplier ) for ( account_id, count ) in counter.items() ]
2017-03-02 02:14:56 +00:00
self._RewardAccounts( service_id, HC.SCORE_PETITION, scores )
def _RepositoryRewardMappingPetitioners( self, service_id, service_tag_id, service_hash_ids, multiplier ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT account_id, COUNT( * ) FROM ' + petitioned_mappings_table_name + ' WHERE service_tag_id = ' + str( service_tag_id ) + ' AND service_hash_id = ? GROUP BY account_id;'
2020-09-16 20:46:54 +00:00
counter = collections.Counter()
2019-12-18 22:06:34 +00:00
for ( account_id, count ) in self._ExecuteManySelectSingleParam( select_statement, service_hash_ids ):
2020-09-16 20:46:54 +00:00
counter[ account_id ] += count
2019-12-18 22:06:34 +00:00
2017-03-02 02:14:56 +00:00
2020-09-16 20:46:54 +00:00
scores = [ ( account_id, count * multiplier ) for ( account_id, count ) in counter.items() ]
2017-03-02 02:14:56 +00:00
self._RewardAccounts( service_id, HC.SCORE_PETITION, scores )
def _RepositoryRewardTagParentPenders( self, service_id, child_master_tag_id, parent_master_tag_id, multiplier ):
2019-09-18 22:40:39 +00:00
child_service_tag_id = self._RepositoryGetServiceTagId( service_id, child_master_tag_id, HydrusData.GetNow() )
2017-03-02 02:14:56 +00:00
score = self._RepositoryGetCurrentMappingsCount( service_id, child_service_tag_id )
score = max( score, 1 )
weighted_score = score * multiplier
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
account_ids = [ account_id for ( account_id, ) in self._c.execute( 'SELECT account_id FROM ' + pending_tag_parents_table_name + ' WHERE child_master_tag_id = ? AND parent_master_tag_id = ?;', ( child_master_tag_id, parent_master_tag_id ) ) ]
scores = [ ( account_id, weighted_score ) for account_id in account_ids ]
self._RewardAccounts( service_id, HC.SCORE_PETITION, scores )
def _RepositoryRewardTagParentPetitioners( self, service_id, child_service_tag_id, parent_service_tag_id, multiplier ):
score = self._RepositoryGetCurrentMappingsCount( service_id, child_service_tag_id )
score = max( score, 1 )
weighted_score = score * multiplier
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
account_ids = [ account_id for ( account_id, ) in self._c.execute( 'SELECT account_id FROM ' + petitioned_tag_parents_table_name + ' WHERE child_service_tag_id = ? AND parent_service_tag_id = ?;', ( child_service_tag_id, parent_service_tag_id ) ) ]
scores = [ ( account_id, weighted_score ) for account_id in account_ids ]
self._RewardAccounts( service_id, HC.SCORE_PETITION, scores )
def _RepositoryRewardTagSiblingPenders( self, service_id, bad_master_tag_id, good_master_tag_id, multiplier ):
2019-09-18 22:40:39 +00:00
bad_service_tag_id = self._RepositoryGetServiceTagId( service_id, bad_master_tag_id, HydrusData.GetNow() )
2017-03-02 02:14:56 +00:00
score = self._RepositoryGetCurrentMappingsCount( service_id, bad_service_tag_id )
score = max( score, 1 )
weighted_score = score * multiplier
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
account_ids = [ account_id for ( account_id, ) in self._c.execute( 'SELECT account_id FROM ' + pending_tag_siblings_table_name + ' WHERE bad_master_tag_id = ? AND good_master_tag_id = ?;', ( bad_master_tag_id, good_master_tag_id ) ) ]
scores = [ ( account_id, weighted_score ) for account_id in account_ids ]
self._RewardAccounts( service_id, HC.SCORE_PETITION, scores )
def _RepositoryRewardTagSiblingPetitioners( self, service_id, bad_service_tag_id, good_service_tag_id, multiplier ):
score = self._RepositoryGetCurrentMappingsCount( service_id, bad_service_tag_id )
score = max( score, 1 )
weighted_score = score * multiplier
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
account_ids = [ account_id for ( account_id, ) in self._c.execute( 'SELECT account_id FROM ' + petitioned_tag_siblings_table_name + ' WHERE bad_service_tag_id = ? AND good_service_tag_id = ?;', ( bad_service_tag_id, good_service_tag_id ) ) ]
scores = [ ( account_id, weighted_score ) for account_id in account_ids ]
self._RewardAccounts( service_id, HC.SCORE_PETITION, scores )
def _RepositoryServiceHashIdExists( self, service_id, master_hash_id ):
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
result = self._c.execute( 'SELECT 1 FROM ' + hash_id_map_table_name + ' WHERE master_hash_id = ?;', ( master_hash_id, ) ).fetchone()
if result is None:
return False
else:
return True
def _RepositoryServiceTagIdExists( self, service_id, master_tag_id ):
( hash_id_map_table_name, tag_id_map_table_name ) = GenerateRepositoryMasterMapTableNames( service_id )
result = self._c.execute( 'SELECT 1 FROM ' + tag_id_map_table_name + ' WHERE master_tag_id = ?;', ( master_tag_id, ) ).fetchone()
if result is None:
return False
else:
return True
2019-09-18 22:40:39 +00:00
def _RepositorySuperBan( self, service_id, admin_account_id, subject_account_ids, timestamp ):
2017-03-02 02:14:56 +00:00
( current_files_table_name, deleted_files_table_name, pending_files_table_name, petitioned_files_table_name, ip_addresses_table_name ) = GenerateRepositoryFilesTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_hash_id FROM ' + current_files_table_name + ' WHERE account_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
service_hash_ids = self._STL( self._ExecuteManySelectSingleParam( select_statement, subject_account_ids ) )
2017-03-02 02:14:56 +00:00
if len( service_hash_ids ) > 0:
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteFiles( service_id, admin_account_id, service_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateRepositoryMappingsTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT service_tag_id, service_hash_id FROM ' + current_mappings_table_name + ' WHERE account_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
mappings_dict = HydrusData.BuildKeyToListDict( self._ExecuteManySelectSingleParam( select_statement, subject_account_ids ) )
2017-03-02 02:14:56 +00:00
if len( mappings_dict ) > 0:
2021-04-07 21:26:45 +00:00
for ( service_tag_id, service_hash_ids ) in mappings_dict.items():
2017-03-02 02:14:56 +00:00
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteMappings( service_id, admin_account_id, service_tag_id, service_hash_ids, timestamp )
2017-03-02 02:14:56 +00:00
( current_tag_parents_table_name, deleted_tag_parents_table_name, pending_tag_parents_table_name, petitioned_tag_parents_table_name ) = GenerateRepositoryTagParentsTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT child_service_tag_id, parent_service_tag_id FROM ' + current_tag_parents_table_name + ' WHERE account_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
pairs = list( self._ExecuteManySelectSingleParam( select_statement, subject_account_ids ) )
2017-03-02 02:14:56 +00:00
if len( pairs ) > 0:
for ( child_service_tag_id, parent_service_tag_id ) in pairs:
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteTagParent( service_id, admin_account_id, child_service_tag_id, parent_service_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
( current_tag_siblings_table_name, deleted_tag_siblings_table_name, pending_tag_siblings_table_name, petitioned_tag_siblings_table_name ) = GenerateRepositoryTagSiblingsTableNames( service_id )
2019-12-18 22:06:34 +00:00
select_statement = 'SELECT bad_service_tag_id, good_service_tag_id FROM ' + current_tag_siblings_table_name + ' WHERE account_id = ?;'
2017-03-02 02:14:56 +00:00
2019-12-18 22:06:34 +00:00
pairs = list( self._ExecuteManySelectSingleParam( select_statement, subject_account_ids ) )
2017-03-02 02:14:56 +00:00
if len( pairs ) > 0:
for ( bad_service_tag_id, good_service_tag_id ) in pairs:
2019-09-18 22:40:39 +00:00
self._RepositoryDeleteTagSibling( service_id, admin_account_id, bad_service_tag_id, good_service_tag_id, timestamp )
2017-03-02 02:14:56 +00:00
def _RewardAccounts( self, service_id, score_type, scores ):
self._c.executemany( 'INSERT OR IGNORE INTO account_scores ( service_id, account_id, score_type, score ) VALUES ( ?, ?, ?, ? );', [ ( service_id, account_id, score_type, 0 ) for ( account_id, score ) in scores ] )
self._c.executemany( 'UPDATE account_scores SET score = score + ? WHERE service_id = ? AND account_id = ? and score_type = ?;', [ ( score, service_id, account_id, score_type ) for ( account_id, score ) in scores ] )
def _SaveAccounts( self, service_id, accounts ):
for account in accounts:
2017-03-08 23:23:12 +00:00
( account_key, account_type, created, expires, dictionary ) = HydrusNetwork.Account.GenerateTupleFromAccount( account )
2017-03-02 02:14:56 +00:00
dictionary_string = dictionary.DumpToString()
2021-04-07 21:26:45 +00:00
self._c.execute( 'UPDATE accounts SET dictionary_string = ? WHERE account_key = ?;', ( dictionary_string, sqlite3.Binary( account_key ) ) )
2017-03-02 02:14:56 +00:00
account.SetClean()
def _SaveDirtyAccounts( self, service_keys_to_dirty_accounts ):
2021-04-07 21:26:45 +00:00
for ( service_key, dirty_accounts ) in service_keys_to_dirty_accounts.items():
2017-03-02 02:14:56 +00:00
service_id = self._GetServiceId( service_key )
self._SaveAccounts( service_id, dirty_accounts )
def _SaveDirtyServices( self, dirty_services ):
self._SaveServices( dirty_services )
def _SaveServices( self, services ):
for service in services:
( service_key, service_type, name, port, dictionary ) = service.ToTuple()
dictionary_string = dictionary.DumpToString()
self._c.execute( 'UPDATE services SET dictionary_string = ? WHERE service_key = ?;', ( dictionary_string, sqlite3.Binary( service_key ) ) )
service.SetClean()
def _UpdateDB( self, version ):
HydrusData.Print( 'The server is updating to version ' + str( version + 1 ) )
2021-04-07 21:26:45 +00:00
if version == 433:
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
old_data = self._c.execute( 'SELECT account_type_id, service_id, account_type_key, title, dictionary_string FROM account_types;' ).fetchall()
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'DROP TABLE account_types;' )
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
from hydrus.core.networking import HydrusNetworkLegacy
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'CREATE TABLE account_types ( account_type_id INTEGER PRIMARY KEY, service_id INTEGER, dump TEXT );' )
for ( account_type_id, service_id, account_type_key, title, dictionary_string ) in old_data:
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
account_type = HydrusNetworkLegacy.ConvertToNewAccountType( account_type_key, title, dictionary_string )
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
dump = account_type.DumpToString()
2019-09-18 22:40:39 +00:00
2021-04-07 21:26:45 +00:00
self._c.execute( 'INSERT INTO account_types ( account_type_id, service_id, dump ) VALUES ( ?, ?, ? );', ( account_type_id, service_id, dump ) )
2019-09-18 22:40:39 +00:00
2015-11-18 22:44:07 +00:00
HydrusData.Print( 'The server has updated to version ' + str( version + 1 ) )
2015-05-06 20:26:18 +00:00
2014-11-20 01:48:04 +00:00
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
2013-02-19 00:11:43 +00:00
2015-11-11 21:20:41 +00:00
2019-10-09 22:03:03 +00:00
def _Vacuum( self ):
2020-04-01 21:51:42 +00:00
locked = HG.server_busy.acquire( False ) # pylint: disable=E1111
2019-10-09 22:03:03 +00:00
if not locked:
HydrusData.Print( 'Could not vacuum because the server was locked!' )
return
try:
db_names = [ name for ( index, name, path ) in self._c.execute( 'PRAGMA database_list;' ) if name not in ( 'mem', 'temp', 'durable_temp' ) ]
2020-04-29 21:44:12 +00:00
db_names = [ name for name in db_names if name in self._db_filenames ]
2020-12-09 22:18:48 +00:00
ok_db_names = []
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
for name in db_names:
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
db_path = os.path.join( self._db_dir, self._db_filenames[ name ] )
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
try:
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
HydrusDB.CheckCanVacuumCursor( db_path, self._c )
except Exception as e:
HydrusData.Print( 'Cannot vacuum "{}": {}'.format( db_path, e ) )
continue
ok_db_names.append( name )
db_names = ok_db_names
if len( db_names ) > 0:
self._CloseDBCursor()
try:
names_done = []
for name in db_names:
2019-10-09 22:03:03 +00:00
2020-04-08 21:10:11 +00:00
try:
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
db_path = os.path.join( self._db_dir, self._db_filenames[ name ] )
started = HydrusData.GetNowPrecise()
HydrusDB.VacuumDB( db_path )
time_took = HydrusData.GetNowPrecise() - started
HydrusData.Print( 'Vacuumed ' + db_path + ' in ' + HydrusData.TimeDeltaToPrettyTimeDelta( time_took ) )
names_done.append( name )
2019-10-09 22:03:03 +00:00
2020-04-08 21:10:11 +00:00
except Exception as e:
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
HydrusData.Print( 'vacuum failed:' )
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
HydrusData.ShowException( e )
return
2019-10-09 22:03:03 +00:00
2020-12-09 22:18:48 +00:00
finally:
self._InitDBCursor()
2019-10-09 22:03:03 +00:00
finally:
HG.server_busy.release()
2015-04-22 22:57:25 +00:00
def _VerifyAccessKey( self, service_key, access_key ):
2013-02-19 00:11:43 +00:00
2015-04-22 22:57:25 +00:00
service_id = self._GetServiceId( service_key )
2013-10-02 22:06:06 +00:00
2015-04-22 22:57:25 +00:00
result = self._c.execute( 'SELECT 1 FROM accounts WHERE service_id = ? AND hashed_access_key = ?;', ( service_id, sqlite3.Binary( hashlib.sha256( access_key ).digest() ) ) ).fetchone()
2013-02-19 00:11:43 +00:00
2015-04-22 22:57:25 +00:00
if result is None:
result = self._c.execute( 'SELECT 1 FROM registration_keys WHERE service_id = ? AND access_key = ?;', ( service_id, sqlite3.Binary( access_key ) ) ).fetchone()
2017-03-15 20:13:04 +00:00
if result is None:
return False
2015-04-22 22:57:25 +00:00
2013-02-19 00:11:43 +00:00
2015-04-22 22:57:25 +00:00
return True
2013-02-19 00:11:43 +00:00
2015-04-22 22:57:25 +00:00
def _Write( self, action, *args, **kwargs ):
2021-04-07 21:26:45 +00:00
if action not in self._write_commands_to_methods:
raise Exception( 'db received an unknown write command: ' + action )
2013-02-19 00:11:43 +00:00
2021-04-07 21:26:45 +00:00
return self._write_commands_to_methods[ action ]( *args, **kwargs )
2013-10-02 22:06:06 +00:00
2016-10-12 21:52:50 +00:00
def GetFilesDir( self ):
return self._files_dir