Version 65

This commit is contained in:
Hydrus 2013-04-10 13:10:37 -05:00
parent 3216613c96
commit 81de5a75dd
16 changed files with 2941 additions and 1616 deletions

View File

@ -8,6 +8,28 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 64</h3></li>
<ul>
<li>added subscriptions dialog</li>
<li>added prototype subscription daemon, but not yet activated it</li>
<li>added downloader classes</li>
<li>began reorganisation of nearly all download code</li>
<li>moved parsing around</li>
<li>moved advancedhttpconnection around</li>
<li>moved serviceupdate around</li>
<li>moved some content_update stuff around</li>
<li>moved some constants around</li>
<li>files in client_files now have extensions</li>
<li>files in client_files are now read only</li>
<li>pdf launch is simplified as a result of this new ext stuff</li>
<li>fixed find similar images; it was just a typo</li>
<li>also fixed initial predicates string display</li>
<li>rejiggered the thumbnail resizer again; due to my stupidity, it was causing lag</li>
<li>thumbnail resizer burns a little more cpu when there are >10,000 or >100,000 thumbnails to render</li>
<li>it also now does thumbs in random order, for a couple of good reasons</li>
<li>upgraded sqlite, hopefully some queries will run faster?</li>
<li>upgraded to python 2.7.4. please report any weird errors</li>
</ul>
<li><h3>version 64</h3></li>
<ul>
<li>got rid of system:not_uploaded_to - now system:file service, which is a lot more powerful</li>

View File

@ -3,7 +3,6 @@ import dircache
import gc
import hashlib
import httplib
import ClientParsers
import HydrusConstants as HC
import HydrusImageHandling
import HydrusMessageHandling
@ -31,10 +30,6 @@ COLOUR_UNSELECTED = wx.Colour( 223, 227, 230 )
COLOUR_MESSAGE = wx.Colour( 230, 246, 255 )
LOCAL_FILE_SERVICE_IDENTIFIER = HC.ClientServiceIdentifier( 'local files', HC.LOCAL_FILE, 'local files' )
LOCAL_TAG_SERVICE_IDENTIFIER = HC.ClientServiceIdentifier( 'local tags', HC.LOCAL_TAG, 'local tags' )
NULL_SERVICE_IDENTIFIER = HC.ClientServiceIdentifier( '', HC.NULL_SERVICE, 'no service' )
SHORTCUT_HELP = '''You can set up many custom shortcuts in file->options->shortcuts. Please check that to see your current mapping.
Some shortcuts remain hardcoded, however:
@ -72,18 +67,6 @@ collection_string_lookup[ COLLECT_BY_SV ] = 'collect by series-volume'
collection_string_lookup[ COLLECT_BY_SVC ] = 'collect by series-volume-chapter'
collection_string_lookup[ NO_COLLECTIONS ] = 'no collections'
CONTENT_UPDATE_ADD = 0
CONTENT_UPDATE_DELETE = 1
CONTENT_UPDATE_PENDING = 2
CONTENT_UPDATE_RESCIND_PENDING = 3
CONTENT_UPDATE_PETITION = 4
CONTENT_UPDATE_RESCIND_PETITION = 5
CONTENT_UPDATE_EDIT_LOG = 6
CONTENT_UPDATE_ARCHIVE = 7
CONTENT_UPDATE_INBOX = 8
CONTENT_UPDATE_RATING = 9
CONTENT_UPDATE_RATINGS_FILTER = 10
DISCRIMINANT_INBOX = 0
DISCRIMINANT_LOCAL = 1
DISCRIMINANT_NOT_LOCAL = 2
@ -137,14 +120,6 @@ RESTRICTION_MAX_RESOLUTION = 1
RESTRICTION_MAX_FILE_SIZE = 2
RESTRICTION_ALLOWED_MIMES = 3
SERVICE_UPDATE_ACCOUNT = 0
SERVICE_UPDATE_DELETE_PENDING = 1
SERVICE_UPDATE_ERROR = 2
SERVICE_UPDATE_NEXT_BEGIN = 3
SERVICE_UPDATE_RESET = 4
SERVICE_UPDATE_REQUEST_MADE = 5
SERVICE_UPDATE_LAST_CHECK = 6
SHUTDOWN_TIMESTAMP_VACUUM = 0
SHUTDOWN_TIMESTAMP_FATTEN_AC_CACHE = 1
SHUTDOWN_TIMESTAMP_DELETE_ORPHANS = 2
@ -295,7 +270,7 @@ def GenerateMultipartFormDataCTAndBodyFromDict( fields ):
return m.get()
def GetMediasTagCount( pool, tag_service_identifier = NULL_SERVICE_IDENTIFIER ):
def GetMediasTagCount( pool, tag_service_identifier = HC.NULL_SERVICE_IDENTIFIER ):
all_tags = []
@ -312,7 +287,7 @@ def GetMediasTagCount( pool, tag_service_identifier = NULL_SERVICE_IDENTIFIER ):
for tags in all_tags:
if tag_service_identifier == NULL_SERVICE_IDENTIFIER: ( current, deleted, pending, petitioned ) = tags.GetUnionCDPP()
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER: ( current, deleted, pending, petitioned ) = tags.GetUnionCDPP()
else: ( current, deleted, pending, petitioned ) = tags.GetCDPP( tag_service_identifier )
current_tags_to_count.update( current )
@ -434,250 +409,6 @@ def ParseImportablePaths( raw_paths, include_subdirs = True ):
return good_paths
class AdvancedHTTPConnection():
def __init__( self, url = '', scheme = 'http', host = '', port = None, service_identifier = None, accept_cookies = False ):
if len( url ) > 0:
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
self._scheme = scheme
self._host = host
self._port = port
self._service_identifier = service_identifier
self._accept_cookies = accept_cookies
self._cookies = {}
if service_identifier is None: timeout = 30
else: timeout = 300
if self._scheme == 'http': self._connection = httplib.HTTPConnection( self._host, self._port, timeout = timeout )
else: self._connection = httplib.HTTPSConnection( self._host, self._port, timeout = timeout )
def close( self ): self._connection.close()
def connect( self ): self._connection.connect()
def GetCookies( self ): return self._cookies
def geturl( self, url, headers = {}, is_redirect = False, follow_redirects = True ):
parse_result = urlparse.urlparse( url )
request = parse_result.path
query = parse_result.query
if query != '': request += '?' + query
return self.request( 'GET', request, headers = headers, is_redirect = is_redirect, follow_redirects = follow_redirects )
def request( self, request_type, request, headers = {}, body = None, is_redirect = False, follow_redirects = True ):
if 'User-Agent' not in headers: headers[ 'User-Agent' ] = 'hydrus/' + str( HC.NETWORK_VERSION )
if len( self._cookies ) > 0: headers[ 'Cookie' ] = '; '.join( [ k + '=' + v for ( k, v ) in self._cookies.items() ] )
try:
self._connection.request( request_type, request, headers = headers, body = body )
response = self._connection.getresponse()
raw_response = response.read()
except ( httplib.CannotSendRequest, httplib.BadStatusLine ):
# for some reason, we can't send a request on the current connection, so let's make a new one!
try:
if self._scheme == 'http': self._connection = httplib.HTTPConnection( self._host, self._port )
else: self._connection = httplib.HTTPSConnection( self._host, self._port )
self._connection.request( request_type, request, headers = headers, body = body )
response = self._connection.getresponse()
raw_response = response.read()
except:
print( traceback.format_exc() )
raise
except:
print( traceback.format_exc() )
raise Exception( 'Could not connect to server' )
if self._accept_cookies:
for cookie in response.msg.getallmatchingheaders( 'Set-Cookie' ): # msg is a mimetools.Message
try:
cookie = cookie.replace( 'Set-Cookie: ', '' )
if ';' in cookie: ( cookie, expiry_gumpf ) = cookie.split( ';', 1 )
( k, v ) = cookie.split( '=' )
self._cookies[ k ] = v
except: pass
if len( raw_response ) > 0:
content_type = response.getheader( 'Content-Type' )
if content_type is not None:
# additional info can be a filename or charset=utf-8 or whatever
if content_type == 'text/html':
mime_string = content_type
try: raw_response = raw_response.decode( 'utf-8' )
except: pass
elif '; ' in content_type:
( mime_string, additional_info ) = content_type.split( '; ' )
if 'charset=' in additional_info:
# this does utf-8, ISO-8859-4, whatever
( gumpf, charset ) = additional_info.split( '=' )
try: raw_response = raw_response.decode( charset )
except: pass
else: mime_string = content_type
if mime_string in HC.mime_enum_lookup and HC.mime_enum_lookup[ mime_string ] == HC.APPLICATION_YAML:
try: parsed_response = yaml.safe_load( raw_response )
except Exception as e: raise HC.NetworkVersionException( 'Failed to parse a response object!' + os.linesep + unicode( e ) )
else: parsed_response = raw_response
else: parsed_response = raw_response
else: parsed_response = raw_response
if self._service_identifier is not None:
service_type = self._service_identifier.GetType()
server_header = response.getheader( 'Server' )
service_string = HC.service_string_lookup[ service_type ]
if server_header is None or service_string not in server_header:
HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, GetUnknownAccount() ) )
raise HC.WrongServiceTypeException( 'Target was not a ' + service_string + '!' )
if '?' in request: request_command = request.split( '?' )[0]
else: request_command = request
if '/' in request_command: request_command = request_command.split( '/' )[1]
if request_type == 'GET':
if ( service_type, HC.GET, request_command ) in HC.BANDWIDTH_CONSUMING_REQUESTS: HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_REQUEST_MADE, self._service_identifier, len( raw_response ) ) )
elif ( service_type, HC.POST, request_command ) in HC.BANDWIDTH_CONSUMING_REQUESTS: HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_REQUEST_MADE, self._service_identifier, len( body ) ) )
if response.status == 200: return parsed_response
elif response.status == 205: return
elif response.status in ( 301, 302, 303, 307 ):
location = response.getheader( 'Location' )
if location is None: raise Exception( data )
else:
if not follow_redirects: return ''
if is_redirect: raise Exception( 'Too many redirects!' )
url = location
parse_result = urlparse.urlparse( url )
redirected_request = parse_result.path
redirected_query = parse_result.query
if redirected_query != '': redirected_request += '?' + redirected_query
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme is None or scheme == self._scheme ) and ( request == redirected_request or request in redirected_request or redirected_request in request ): raise Exception( 'Redirection problem' )
else:
if host is None or ( host == self._host and port == self._port ): connection = self
else: connection = AdvancedHTTPConnection( url )
if response.status in ( 301, 307 ):
# 301: moved permanently, repeat request
# 307: moved temporarily, repeat request
return connection.request( request_type, redirected_request, headers = headers, body = body, is_redirect = True )
elif response.status in ( 302, 303 ):
# 302: moved temporarily, repeat request (except everyone treats it like 303 for no good fucking reason)
# 303: thanks, now go here with GET
return connection.request( 'GET', redirected_request, is_redirect = True )
elif response.status == 304: raise HC.NotModifiedException()
else:
if self._service_identifier is not None:
HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ERROR, self._service_identifier, parsed_response ) )
if response.status in ( 401, 426 ): HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, GetUnknownAccount() ) )
if response.status == 401: raise HC.PermissionsException( parsed_response )
elif response.status == 403: raise HC.ForbiddenException( parsed_response )
elif response.status == 404: raise HC.NotFoundException( parsed_response )
elif response.status == 426: raise HC.NetworkVersionException( parsed_response )
elif response.status in ( 500, 501, 502, 503 ):
try: print( parsed_response )
except: pass
raise Exception( parsed_response )
else: raise Exception( parsed_response )
def SetCookie( self, key, value ): self._cookies[ key ] = value
class AutocompleteMatches():
def __init__( self, matches ):
@ -884,20 +615,20 @@ class CDPPFileServiceIdentifiers():
def GetCDPP( self ): return ( self._current, self._deleted, self._pending, self._petitioned )
def GetCurrent( self ): return self._current
def GetCurrentRemote( self ): return self._current - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetCurrentRemote( self ): return self._current - set( ( HC.LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetDeleted( self ): return self._deleted
def GetDeletedRemote( self ): return self._deleted - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetDeletedRemote( self ): return self._deleted - set( ( HC.LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetPending( self ): return self._pending
def GetPendingRemote( self ): return self._pending - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetPendingRemote( self ): return self._pending - set( ( HC.LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetPetitioned( self ): return self._petitioned
def GetPetitionedRemote( self ): return self._petitioned - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def GetPetitionedRemote( self ): return self._petitioned - set( ( HC.LOCAL_FILE_SERVICE_IDENTIFIER, ) )
def HasDownloading( self ): return LOCAL_FILE_SERVICE_IDENTIFIER in self._pending
def HasDownloading( self ): return HC.LOCAL_FILE_SERVICE_IDENTIFIER in self._pending
def HasLocal( self ): return LOCAL_FILE_SERVICE_IDENTIFIER in self._current
def HasLocal( self ): return HC.LOCAL_FILE_SERVICE_IDENTIFIER in self._current
def ProcessContentUpdate( self, content_update ):
@ -905,30 +636,30 @@ class CDPPFileServiceIdentifiers():
service_identifier = content_update.GetServiceIdentifier()
if action == CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ADD:
self._current.add( service_identifier )
self._deleted.discard( service_identifier )
self._pending.discard( service_identifier )
elif action == CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
self._deleted.add( service_identifier )
self._current.discard( service_identifier )
self._petitioned.discard( service_identifier )
elif action == CONTENT_UPDATE_PENDING:
elif action == HC.CONTENT_UPDATE_PENDING:
if service_identifier not in self._current: self._pending.add( service_identifier )
elif action == CONTENT_UPDATE_PETITION:
elif action == HC.CONTENT_UPDATE_PETITION:
if service_identifier not in self._deleted: self._petitioned.add( service_identifier )
elif action == CONTENT_UPDATE_RESCIND_PENDING: self._pending.discard( service_identifier )
elif action == CONTENT_UPDATE_RESCIND_PETITION: self._petitioned.discard( service_identifier )
elif action == HC.CONTENT_UPDATE_RESCIND_PENDING: self._pending.discard( service_identifier )
elif action == HC.CONTENT_UPDATE_RESCIND_PETITION: self._petitioned.discard( service_identifier )
def ResetService( self, service_identifier ):
@ -1029,7 +760,7 @@ class CDPPTagServiceIdentifiers():
num_tags = 0
if tag_service_identifier == NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
if include_current_tags: num_tags += len( self._current )
if include_pending_tags: num_tags += len( self._pending )
@ -1065,7 +796,7 @@ class CDPPTagServiceIdentifiers():
action = content_update.GetAction()
if action == CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ADD:
tag = content_update.GetInfo()
@ -1074,7 +805,7 @@ class CDPPTagServiceIdentifiers():
deleted.discard( tag )
pending.discard( tag )
elif action == CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
tag = content_update.GetInfo()
@ -1083,13 +814,13 @@ class CDPPTagServiceIdentifiers():
current.discard( tag )
petitioned.discard( tag )
elif action == CONTENT_UPDATE_EDIT_LOG:
elif action == HC.CONTENT_UPDATE_EDIT_LOG:
edit_log = content_update.GetInfo()
for ( action, info ) in edit_log:
if action == CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ADD:
tag = info
@ -1098,7 +829,7 @@ class CDPPTagServiceIdentifiers():
deleted.discard( tag )
pending.discard( tag )
elif action == CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
tag = info
@ -1107,25 +838,25 @@ class CDPPTagServiceIdentifiers():
current.discard( tag )
petitioned.discard( tag )
elif action == CONTENT_UPDATE_PENDING:
elif action == HC.CONTENT_UPDATE_PENDING:
tag = info
if tag not in current: pending.add( tag )
elif action == CONTENT_UPDATE_RESCIND_PENDING:
elif action == HC.CONTENT_UPDATE_RESCIND_PENDING:
tag = info
pending.discard( tag )
elif action == CONTENT_UPDATE_PETITION:
elif action == HC.CONTENT_UPDATE_PETITION:
( tag, reason ) = info
if tag not in deleted: petitioned.add( tag )
elif action == CONTENT_UPDATE_RESCIND_PETITION:
elif action == HC.CONTENT_UPDATE_RESCIND_PETITION:
tag = info
@ -1174,7 +905,7 @@ class LocalRatings():
action = content_update.GetAction()
if action == CONTENT_UPDATE_RATING:
if action == HC.CONTENT_UPDATE_RATING:
rating = content_update.GetInfo()
@ -1199,7 +930,7 @@ class ConnectionToService():
( host, port ) = self._credentials.GetAddress()
self._connection = AdvancedHTTPConnection( host = host, port = port, service_identifier = self._service_identifier, accept_cookies = True )
self._connection = HC.AdvancedHTTPConnection( host = host, port = port, service_identifier = self._service_identifier, accept_cookies = True )
self._connection.connect()
@ -1207,7 +938,7 @@ class ConnectionToService():
error_message = 'Could not connect.'
if self._service_identifier is not None: HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ERROR, self._service_identifier, error_message ) )
if self._service_identifier is not None: HC.pubsub.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_ERROR, self._service_identifier, error_message ) )
raise Exception( error_message )
@ -1368,13 +1099,13 @@ class ConnectionToService():
account.MakeFresh()
HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, account ) )
HC.pubsub.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_ACCOUNT, self._service_identifier, account ) )
elif request == 'update':
update = response
HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_NEXT_BEGIN, self._service_identifier, update.GetNextBegin() ) )
HC.pubsub.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_NEXT_BEGIN, self._service_identifier, update.GetNextBegin() ) )
return response
@ -1422,21 +1153,21 @@ class CPRemoteRatingsServiceIdentifiers():
# this may well need work; need to figure out how to set the pending back to None after an upload. rescind seems ugly
if action == CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ADD:
rating = content_update.GetInfo()
current = rating
elif action == CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
current = None
elif action == CONTENT_UPDATE_RESCIND_PENDING:
elif action == HC.CONTENT_UPDATE_RESCIND_PENDING:
pending = None
elif action == CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
rating = content_update.GetInfo()
@ -1608,15 +1339,15 @@ class FileQueryResult():
hashes = content_update.GetHashes()
if action == CONTENT_UPDATE_ARCHIVE:
if action == HC.CONTENT_UPDATE_ARCHIVE:
if 'system:inbox' in self._predicates: self._Remove( hashes )
elif action == CONTENT_UPDATE_INBOX:
elif action == HC.CONTENT_UPDATE_INBOX:
if 'system:archive' in self._predicates: self._Remove( hashes )
elif action == CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier: self._Remove( hashes )
elif action == HC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier: self._Remove( hashes )
for hash in self._hashes.intersection( hashes ):
@ -1633,11 +1364,11 @@ class FileQueryResult():
service_identifier = update.GetServiceIdentifier()
if action == SERVICE_UPDATE_DELETE_PENDING:
if action == HC.SERVICE_UPDATE_DELETE_PENDING:
for media_result in self._hashes_to_media_results.values(): media_result.DeletePending( service_identifier )
elif action == SERVICE_UPDATE_RESET:
elif action == HC.SERVICE_UPDATE_RESET:
for media_result in self._hashes_to_media_results.values(): media_result.ResetService( service_identifier )
@ -1645,7 +1376,7 @@ class FileQueryResult():
class FileSearchContext():
def __init__( self, file_service_identifier = LOCAL_FILE_SERVICE_IDENTIFIER, tag_service_identifier = NULL_SERVICE_IDENTIFIER, include_current_tags = True, include_pending_tags = True, predicates = [] ):
def __init__( self, file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER, tag_service_identifier = HC.NULL_SERVICE_IDENTIFIER, include_current_tags = True, include_pending_tags = True, predicates = [] ):
self._file_service_identifier = file_service_identifier
self._tag_service_identifier = tag_service_identifier
@ -2272,10 +2003,10 @@ class MediaResult():
action = content_update.GetAction()
if action == CONTENT_UPDATE_ADD and not file_service_identifiers_cdpp.HasLocal(): inbox = True
elif action == CONTENT_UPDATE_ARCHIVE: inbox = False
elif action == CONTENT_UPDATE_INBOX: inbox = True
elif action == CONTENT_UPDATE_DELETE: inbox = False
if action == HC.CONTENT_UPDATE_ADD and not file_service_identifiers_cdpp.HasLocal(): inbox = True
elif action == HC.CONTENT_UPDATE_ARCHIVE: inbox = False
elif action == HC.CONTENT_UPDATE_INBOX: inbox = True
elif action == HC.CONTENT_UPDATE_DELETE: inbox = False
self._tuple = ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tag_service_identifiers_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings )
@ -2468,8 +2199,8 @@ class ServiceRemote( Service ):
action = update.GetAction()
if action == SERVICE_UPDATE_ERROR: self._last_error = int( time.time() )
elif action == SERVICE_UPDATE_RESET:
if action == HC.SERVICE_UPDATE_ERROR: self._last_error = int( time.time() )
elif action == HC.SERVICE_UPDATE_RESET:
self._service_identifier = update.GetInfo()
@ -2523,14 +2254,14 @@ class ServiceRemoteRestricted( ServiceRemote ):
action = update.GetAction()
if action == SERVICE_UPDATE_ACCOUNT:
if action == HC.SERVICE_UPDATE_ACCOUNT:
account = update.GetInfo()
self._account = account
self._last_error = 0
elif action == SERVICE_UPDATE_REQUEST_MADE:
elif action == HC.SERVICE_UPDATE_REQUEST_MADE:
num_bytes = update.GetInfo()
@ -2591,13 +2322,13 @@ class ServiceRemoteRestrictedRepository( ServiceRemoteRestricted ):
action = update.GetAction()
if action == SERVICE_UPDATE_NEXT_BEGIN:
if action == HC.SERVICE_UPDATE_NEXT_BEGIN:
next_begin = update.GetInfo()
self.SetNextBegin( next_begin )
elif action == SERVICE_UPDATE_RESET:
elif action == HC.SERVICE_UPDATE_RESET:
self._service_identifier = update.GetInfo()
@ -2677,13 +2408,13 @@ class ServiceRemoteRestrictedDepot( ServiceRemoteRestricted ):
action = update.GetAction()
if action == SERVICE_UPDATE_LAST_CHECK:
if action == HC.SERVICE_UPDATE_LAST_CHECK:
last_check = update.GetInfo()
self._last_check = last_check
elif action == SERVICE_UPDATE_RESET:
elif action == HC.SERVICE_UPDATE_RESET:
self._service_identifier = update.GetInfo()
@ -2715,21 +2446,6 @@ class ServiceRemoteRestrictedDepotMessage( ServiceRemoteRestrictedDepot ):
def Decrypt( self, encrypted_message ): return HydrusMessageHandling.UnpackageDeliveredMessage( encrypted_message, self._private_key )
class ServiceUpdate():
def __init__( self, action, service_identifier, info = None ):
self._action = action # make this an enumerated thing, yo
self._service_identifier = service_identifier
self._info = info
def GetAction( self ): return self._action
def GetInfo( self ): return self._info
def GetServiceIdentifier( self ): return self._service_identifier
class ThumbnailCache():
def __init__( self, db, options ):
@ -2905,7 +2621,7 @@ class WebSessionManagerClient():
if name == 'hentai foundry':
connection = AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True )
connection = HC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True )
# this establishes the php session cookie, the csrf cookie, and tells hf that we are 18 years of age
connection.request( 'GET', '/?enterAgree=1' )
@ -2923,7 +2639,7 @@ class WebSessionManagerClient():
raise Exception( 'You need to set up your pixiv credentials in services->manage pixiv account.' )
connection = AdvancedHTTPConnection( url = 'http://www.pixiv.net', accept_cookies = True )
connection = HC.AdvancedHTTPConnection( url = 'http://www.pixiv.net', accept_cookies = True )
form_fields = {}

View File

@ -88,8 +88,6 @@ class Controller( wx.App ):
self.MaintainDB()
threading.Thread( target = self._db.THREADResizeThumbnails, name = 'Resize Thumbnails' ).start()
def EventPubSub( self, event ):

View File

@ -5,6 +5,7 @@ import httplib
import itertools
import HydrusConstants as HC
import HydrusDocumentHandling
import HydrusDownloading
import HydrusFlashHandling
import HydrusImageHandling
import HydrusMessageHandling
@ -17,6 +18,7 @@ import Queue
import random
import shutil
import sqlite3
import stat
import sys
import threading
import time
@ -61,9 +63,11 @@ class FileDB():
try:
path_from = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' )
hash_id = self._GetHashId( c, hash )
mime = HC.GetMimeFromPath( path_from )
mime = self._GetMime( c, self._local_file_service_id, hash_id )
path_from = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ]
path_to = export_path + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ]
@ -107,11 +111,11 @@ class FileDB():
HC.pubsub.pub( 'progress_update', job_key, index, num_hashes, 'The client is now exporting the files to ' + export_path + ' (' + HC.ConvertIntToPrettyString( index + 1 ) + '/' + HC.ConvertIntToPrettyString( num_hashes ) + ')' )
path_from = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' )
hash_id = self._GetHashId( c, hash )
mime = HC.GetMimeFromPath( path_from )
mime = self._GetMime( c, self._local_file_service_id, hash_id )
# could search for some appropriate tags here, convert them to ascii or whatever, and make sure they are unique given the whole list
path_from = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ]
path_to = export_path + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ]
@ -152,7 +156,9 @@ class FileDB():
try:
with open( HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ), 'rb' ) as f: file = f.read()
with self._hashes_to_mimes_lock: mime = self._hashes_to_mimes[ hash ]
with open( HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ], 'rb' ) as f: file = f.read()
except MemoryError: print( 'Memory error!' )
except: raise Exception( 'Could not find that file!' )
@ -1192,8 +1198,8 @@ class RatingDB():
c.executemany( 'INSERT INTO ratings ( service_id, hash_id, count, rating, score ) VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, self._GetHashId( c, hash ), count, rating, HC.CalculateScoreFromRating( count, rating ) ) for ( hash, count, rating ) in ratings if count > 0 ] )
# these need count and score in
#self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_RATING_REMOTE, service_identifier, ( hash, ), rating ) for ( hash, rating ) in ratings ] )
#self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_RATING_REMOTE, service_identifier, ( hash, ), rating ) for ( hash, rating ) in ratings ] )
#self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_RATING_REMOTE, service_identifier, ( hash, ), rating ) for ( hash, rating ) in ratings ] )
#self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_RATING_REMOTE, service_identifier, ( hash, ), rating ) for ( hash, rating ) in ratings ] )
class TagDB():
@ -1298,14 +1304,21 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.executemany( 'INSERT OR IGNORE INTO file_transfers ( service_id_from, service_id_to, hash_id ) VALUES ( ?, ?, ? );', [ ( service_id, self._local_file_service_id, hash_id ) for hash_id in hash_ids ] )
self.pub( 'notify_new_downloads' )
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_PENDING, HC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_PENDING, HC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
def _AddFiles( self, c, files_info_rows ):
# service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words
for ( hash_id, mime ) in [ ( row[ 1 ], row[ 3 ] ) for row in files_info_rows ]:
hash = self._GetHash( c, hash_id )
with self._hashes_to_mimes_lock: self._hashes_to_mimes[ hash ] = mime
c.executemany( 'INSERT OR IGNORE INTO files_info VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );', files_info_rows )
service_ids_to_rows = HC.BuildKeyToListDict( [ ( row[ 0 ], row[ 1: ] ) for row in files_info_rows ] )
@ -1379,14 +1392,14 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if len( new_hashes ) > 0:
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, new_hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, new_hashes ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, new_hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, new_hashes ) ] )
if len( deleted_hashes ) > 0:
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, deleted_hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, deleted_hashes ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, deleted_hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, deleted_hashes ) ] )
if len( new_hashes ) > 0 or len( deleted_hashes ) > 0: self.pub( 'notify_new_thumbnails' )
@ -1501,7 +1514,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
service_id = self._GetServiceId( c, service_identifier )
if action == CC.SERVICE_UPDATE_ACCOUNT:
if action == HC.SERVICE_UPDATE_ACCOUNT:
account = service_update.GetInfo()
@ -1510,8 +1523,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
do_new_permissions = True
elif action == CC.SERVICE_UPDATE_ERROR: c.execute( 'UPDATE addresses SET last_error = ? WHERE service_id = ?;', ( int( time.time() ), service_id ) )
elif action == CC.SERVICE_UPDATE_REQUEST_MADE: requests_made.append( ( service_id, service_update.GetInfo() ) )
elif action == HC.SERVICE_UPDATE_ERROR: c.execute( 'UPDATE addresses SET last_error = ? WHERE service_id = ?;', ( int( time.time() ), service_id ) )
elif action == HC.SERVICE_UPDATE_REQUEST_MADE: requests_made.append( ( service_id, service_update.GetInfo() ) )
except: pass
@ -1585,11 +1598,11 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
deleted_mappings = [ deleted_mapping for deleted_mapping in update.GetDeletedMappings() ] # to clear generator
if len( mappings ) > 0:
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings ] )
if len( deleted_mappings ) > 0:
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( tag, hashes ) in deleted_mappings ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( tag, hashes ) in deleted_mappings ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( tag, hashes ) in deleted_mappings ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( tag, hashes ) in deleted_mappings ] )
def _AddUpdate( self, c, service_identifier, update ):
@ -1624,8 +1637,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.executemany( 'INSERT OR IGNORE INTO file_transfers ( service_id_from, service_id_to, hash_id ) VALUES ( ?, ?, ? );', [ ( self._local_file_service_id, service_id, hash_id ) for hash_id in hash_ids ] )
self.pub( 'notify_new_pending' )
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_PENDING, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_PENDING, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
def _AddWebSession( self, c, name, cookies, expiry ):
@ -1715,13 +1728,31 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
local_files_hashes = set()
hashes_to_filenames = {}
for filename in cached_filenames:
try: local_files_hashes.add( filename.decode( 'hex' ) ) # this try ... except is for weird files that might have got into the directory by accident
try:
( hash, ext ) = filename.split( '.' )
hash = hash.decode( 'hex' )
local_files_hashes.add( hash ) # this try ... except is for weird files that might have got into the directory by accident
hashes_to_filenames[ hash ] = filename
except: pass
for hash in local_files_hashes & deletee_hashes: os.remove( HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) )
for hash in local_files_hashes & deletee_hashes:
path = HC.CLIENT_FILES_DIR + os.path.sep + hashes_to_filenames[ hash ]
os.chmod( path, stat.S_IWRITE )
os.remove( path )
# perceptual_hashes and thumbs
@ -1785,7 +1816,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self.pub( 'notify_new_pending' )
self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_DELETE_PENDING, service_identifier ) )
self.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_DELETE_PENDING, service_identifier ) )
def _DoFileQuery( self, c, query_key, search_context ):
@ -1872,8 +1903,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
else:
if file_service_identifier != CC.NULL_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) }
elif tag_service_identifier != CC.NULL_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) }
if file_service_identifier != HC.NULL_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) }
elif tag_service_identifier != HC.NULL_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) }
else: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings UNION SELECT hash_id FROM files_info;' ) }
@ -1885,7 +1916,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if include_current_tags:
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: current_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings;' ) }
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER: current_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings;' ) }
else: current_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) }
zero_tag_hash_ids = current_zero_tag_hash_ids
@ -1893,7 +1924,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if include_pending_tags:
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: pending_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings;' ) }
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER: pending_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings;' ) }
else: pending_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id = ?;', ( tag_service_id, ) ) }
zero_tag_hash_ids.update( pending_zero_tag_hash_ids )
@ -1908,7 +1939,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if include_current_tags:
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: current_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings;' ) }
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER: current_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings;' ) }
else: current_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) }
nonzero_tag_hash_ids = current_nonzero_tag_hash_ids
@ -1916,7 +1947,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if include_pending_tags:
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: pending_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings;' ) }
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER: pending_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings;' ) }
else: pending_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id = ?;', ( tag_service_id, ) ) }
nonzero_tag_hash_ids.update( pending_nonzero_tag_hash_ids )
@ -1996,8 +2027,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
tag_service_identifiers = self._GetServiceIdentifiers( c, ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ) )
file_service_identifiers = self._GetServiceIdentifiers( c, ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ) )
tag_service_identifiers.add( CC.NULL_SERVICE_IDENTIFIER )
file_service_identifiers.add( CC.NULL_SERVICE_IDENTIFIER )
tag_service_identifiers.add( HC.NULL_SERVICE_IDENTIFIER )
file_service_identifiers.add( HC.NULL_SERVICE_IDENTIFIER )
for ( tag_service_identifier, file_service_identifier ) in itertools.product( tag_service_identifiers, file_service_identifiers ): self._GetAutocompleteTags( c, tag_service_identifier = tag_service_identifier, file_service_identifier = file_service_identifier )
@ -2021,13 +2052,13 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
return all_downloads
def _GetAutocompleteTags( self, c, tag_service_identifier = CC.NULL_SERVICE_IDENTIFIER, file_service_identifier = CC.NULL_SERVICE_IDENTIFIER, half_complete_tag = '', include_current = True, include_pending = True ):
def _GetAutocompleteTags( self, c, tag_service_identifier = HC.NULL_SERVICE_IDENTIFIER, file_service_identifier = HC.NULL_SERVICE_IDENTIFIER, half_complete_tag = '', include_current = True, include_pending = True ):
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
tag_service_id = None
if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if file_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
file_service_id = None
@ -2046,7 +2077,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
tag_service_id = self._GetServiceId( c, tag_service_identifier )
if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if file_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
file_service_id = None
@ -2169,9 +2200,9 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
hash_ids = set()
if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if file_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
current_tables_phrase = 'active_mappings'
pending_tables_phrase = 'active_pending_mappings'
@ -2194,7 +2225,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
file_service_id = self._GetServiceId( c, file_service_identifier )
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
current_tables_phrase = '( active_mappings, files_info USING ( hash_id ) )'
pending_tables_phrase = '( active_pending_mappings, files_info USING ( hash_id ) )'
@ -2227,9 +2258,9 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
hash_ids = set()
if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if file_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
current_tables_phrase = 'active_mappings'
pending_tables_phrase = 'active_pending_mappings'
@ -2252,7 +2283,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
file_service_id = self._GetServiceId( c, file_service_identifier )
if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
current_tables_phrase = '( active_mappings, files_info USING ( hash_id ) )'
pending_tables_phrase = '( active_pending_mappings, files_info USING ( hash_id ) )'
@ -3108,6 +3139,19 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
return shutdown_timestamps
def _GetSubscriptions( self, c ):
result = c.execute( 'SELECT subscriptions FROM subscriptions;', ).fetchone()
if result is None: return []
else:
( subscriptions, ) = result
return subscriptions
def _GetTagServicePrecedence( self, c ):
service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence ASC;' ) ]
@ -3208,8 +3252,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.execute( 'DELETE FROM file_inbox WHERE hash_id = ?;', ( hash_id, ) )
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, set( ( hash, ) ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, set( ( hash, ) ) ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ARCHIVE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, set( ( hash, ) ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ARCHIVE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, set( ( hash, ) ) ) ] )
can_add = False
@ -3278,12 +3322,14 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
timestamp = int( time.time() )
dest_path = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' )
dest_path = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ]
if not os.path.exists( dest_path ):
with open( dest_path, 'wb' ) as f: f.write( file )
os.chmod( dest_path, stat.S_IREAD )
if mime in HC.MIMES_WITH_THUMBNAILS:
@ -3322,10 +3368,10 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
for ( service_identifier, tags ) in service_identifiers_to_tags.items():
if service_identifier == CC.LOCAL_TAG_SERVICE_IDENTIFIER: edit_log = [ ( CC.CONTENT_UPDATE_ADD, tag ) for tag in tags ]
else: edit_log = [ ( CC.CONTENT_UPDATE_PENDING, tag ) for tag in tags ]
if service_identifier == HC.LOCAL_TAG_SERVICE_IDENTIFIER: edit_log = [ ( HC.CONTENT_UPDATE_ADD, tag ) for tag in tags ]
else: edit_log = [ ( HC.CONTENT_UPDATE_PENDING, tag ) for tag in tags ]
content_updates = [ HC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), edit_log ) ]
content_updates = [ HC.ContentUpdate( HC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), edit_log ) ]
self._ProcessContentUpdates( c, content_updates )
@ -3355,8 +3401,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
if media_result is not None: self.pub( 'add_media_result', page_key, media_result )
if result == 'successful':
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
self.pub( 'import_done', page_key, result )
@ -3397,8 +3443,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.executemany( 'INSERT OR IGNORE INTO file_petitions ( service_id, hash_id, reason_id ) VALUES ( ?, ?, ? );', [ ( service_id, hash_id, reason_id ) for hash_id in hash_ids ] )
self.pub( 'notify_new_pending' )
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_PETITION, service_identifier, hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_PETITION, service_identifier, hashes ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_PETITION, service_identifier, hashes ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_PETITION, service_identifier, hashes ) ] )
def _ProcessContentUpdates( self, c, content_updates ):
@ -3419,10 +3465,10 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
hash_ids = self._GetHashIds( c, hashes )
if action == CC.CONTENT_UPDATE_ARCHIVE: self._ArchiveFiles( c, hash_ids )
elif action == CC.CONTENT_UPDATE_INBOX: self._InboxFiles( c, hash_ids )
elif action == CC.CONTENT_UPDATE_DELETE: self._DeleteFiles( c, service_id, hash_ids )
elif action == CC.CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ARCHIVE: self._ArchiveFiles( c, hash_ids )
elif action == HC.CONTENT_UPDATE_INBOX: self._InboxFiles( c, hash_ids )
elif action == HC.CONTENT_UPDATE_DELETE: self._DeleteFiles( c, service_id, hash_ids )
elif action == HC.CONTENT_UPDATE_ADD:
# this is really 'uploaded' rather than a strict add, so may need to improve it in future!
@ -3439,7 +3485,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
info = content_update.GetInfo()
if action == CC.CONTENT_UPDATE_EDIT_LOG:
if action == HC.CONTENT_UPDATE_EDIT_LOG:
splayed_hash_ids = HC.SplayListForDB( hash_ids )
@ -3452,7 +3498,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
for ( action, info ) in edit_log:
if action == CC.CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ADD:
tag = info
@ -3462,7 +3508,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
mappings_ids.append( ( namespace_id, tag_id, hash_ids ) )
elif action == CC.CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
tag = info
@ -3485,7 +3531,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
info = content_update.GetInfo()
if action == CC.CONTENT_UPDATE_ADD:
if action == HC.CONTENT_UPDATE_ADD:
tag = info
@ -3493,7 +3539,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self._UpdateMappings( c, service_id, [ ( namespace_id, tag_id, hash_ids ) ], [] )
elif action == CC.CONTENT_UPDATE_DELETE:
elif action == HC.CONTENT_UPDATE_DELETE:
tag = info
@ -3501,7 +3547,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self._UpdateMappings( c, service_id, [], [ ( namespace_id, tag_id, hash_ids ) ] )
elif action == CC.CONTENT_UPDATE_EDIT_LOG:
elif action == HC.CONTENT_UPDATE_EDIT_LOG:
( precedence, ) = c.execute( 'SELECT precedence FROM tag_service_precedence WHERE service_id = ?;', ( service_id, ) ).fetchone()
@ -3517,7 +3563,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
for ( action, info ) in edit_log:
if action == CC.CONTENT_UPDATE_PENDING:
if action == HC.CONTENT_UPDATE_PENDING:
tag = info
@ -3545,7 +3591,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self._UpdateAutocompleteTagCacheFromActivePendingTags( c, namespace_id, tag_id, valid_hash_ids, 1 )
elif action == CC.CONTENT_UPDATE_RESCIND_PENDING:
elif action == HC.CONTENT_UPDATE_RESCIND_PENDING:
tag = info
@ -3567,7 +3613,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self._UpdateAutocompleteTagCacheFromActivePendingTags( c, namespace_id, tag_id, actual_hash_ids_i_can_delete, -1 )
elif action == CC.CONTENT_UPDATE_PETITION:
elif action == HC.CONTENT_UPDATE_PETITION:
( tag, reason ) = info
@ -3577,7 +3623,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.executemany( 'INSERT OR IGNORE INTO mapping_petitions VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id, reason_id ) for hash_id in hash_ids ] )
elif action == CC.CONTENT_UPDATE_RESCIND_PETITION:
elif action == HC.CONTENT_UPDATE_RESCIND_PETITION:
tag = info
@ -3602,7 +3648,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
info = content_update.GetInfo()
if action == CC.CONTENT_UPDATE_RATING:
if action == HC.CONTENT_UPDATE_RATING:
rating = info
@ -3628,7 +3674,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
# and then do a thing here where it looks up remote services links and then pends/rescinds pends appropriately
elif action == CC.CONTENT_UPDATE_RATINGS_FILTER:
elif action == HC.CONTENT_UPDATE_RATINGS_FILTER:
( min, max ) = info
@ -3725,7 +3771,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self._AddService( c, new_service_identifier, credentials, extra_info )
self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_RESET, service_identifier, new_service_identifier ) )
self.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_RESET, service_identifier, new_service_identifier ) )
self.pub( 'notify_new_pending' )
self.pub( 'permissions_are_stale' )
self.pub( 'log_message', 'database', 'reset ' + service_name )
@ -3752,6 +3798,13 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.execute( 'INSERT INTO pixiv_account ( pixiv_id, password ) VALUES ( ?, ? );', ( pixiv_id, password ) )
def _SetSubscriptions( self, c, subscriptions ):
c.execute( 'DELETE FROM subscriptions;' )
c.execute( 'INSERT INTO subscriptions ( subscriptions ) VALUES ( ? );', ( subscriptions, ) )
def _SetTagServicePrecedence( self, c, service_identifiers ):
del self._tag_service_precedence[:]
@ -4044,7 +4097,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.execute( 'DELETE FROM services WHERE service_id = ?;', ( client_service_id, ) )
self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_RESET, client_service_identifier ) )
self.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_RESET, client_service_identifier ) )
if len( names ) > 0: recalc_active_mappings = True
@ -4077,7 +4130,7 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
c.execute( 'DELETE FROM services WHERE service_id = ?;', ( service_id, ) )
self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_RESET, service_identifier ) )
self.pub( 'service_update_db', HC.ServiceUpdate( HC.SERVICE_UPDATE_RESET, service_identifier ) )
service_type = service_identifier.GetType()
@ -4248,8 +4301,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
content_updates = []
content_updates += [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, hash_ids ), info = self._GetNamespaceTag( c, namespace_id, tag_id ) ) for ( namespace_id, tag_id, hash_ids ) in mappings_ids ]
content_updates += [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ), info = self._GetNamespaceTag( c, namespace_id, tag_id ) ) for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids ]
content_updates += [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, hash_ids ), info = self._GetNamespaceTag( c, namespace_id, tag_id ) ) for ( namespace_id, tag_id, hash_ids ) in mappings_ids ]
content_updates += [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ), info = self._GetNamespaceTag( c, namespace_id, tag_id ) ) for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids ]
HC.pubsub.pub( 'progress_update', job_key, 6, 7, u'saving changes to gui' )
@ -4318,8 +4371,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
HC.pubsub.pub( 'progress_update', job_key, num_uploads + 2, num_uploads + 4, u'saving changes to gui' )
if len( good_hash_ids ) > 0:
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, good_hash_ids ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, good_hash_ids ) ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, good_hash_ids ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, good_hash_ids ) ) ] )
if num_petitions > 0:
@ -4336,8 +4389,8 @@ class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ):
self._DeleteFiles( c, service_id, hash_ids )
self.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
self.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
self.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ) ) ] )
except Exception as e: raise Exception( 'Encountered an error while trying to uploads petitions to '+ service_name + ':' + os.linesep + unicode( e ) )
@ -4401,6 +4454,8 @@ class DB( ServiceDB ):
( self._local_file_service_id, ) = c.execute( 'SELECT service_id FROM services WHERE type = ?;', ( HC.LOCAL_FILE, ) ).fetchone()
self._InitHashToMimeCache( c )
( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
self._tag_service_precedence = self._GetTagServicePrecedence( c )
@ -4410,6 +4465,13 @@ class DB( ServiceDB ):
threading.Thread( target = self.MainLoop, name = 'Database Main Loop' ).start()
def _InitHashToMimeCache( self, c ):
self._hashes_to_mimes_lock = threading.Lock()
self._hashes_to_mimes = { hash : mime for ( hash, mime ) in c.execute( 'SELECT hash, mime FROM hashes, files_info USING ( hash_id ) WHERE service_id = ?;', ( self._local_file_service_id, ) ) }
def _InitPostGUI( self ):
port = HC.DEFAULT_LOCAL_FILE_PORT
@ -4432,6 +4494,7 @@ class DB( ServiceDB ):
HC.DAEMONWorker( 'DownloadFiles', self.DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) )
HC.DAEMONWorker( 'DownloadThumbnails', self.DAEMONDownloadThumbnails, ( 'notify_new_permissions', 'notify_new_thumbnails' ) )
HC.DAEMONWorker( 'ResizeThumbnails', self.DAEMONResizeThumbnails, init_wait = 1200 )
HC.DAEMONWorker( 'SynchroniseAccounts', self.DAEMONSynchroniseAccounts, ( 'notify_new_services', 'permissions_are_stale' ) )
HC.DAEMONWorker( 'SynchroniseMessages', self.DAEMONSynchroniseMessages, ( 'notify_new_permissions', 'notify_check_messages' ), period = 60 )
HC.DAEMONWorker( 'SynchroniseRepositories', self.DAEMONSynchroniseRepositories, ( 'notify_new_permissions', ) )
@ -4669,6 +4732,8 @@ class DB( ServiceDB ):
c.execute( 'CREATE TABLE statuses ( status_id INTEGER PRIMARY KEY, status TEXT );' )
c.execute( 'CREATE UNIQUE INDEX statuses_status_index ON statuses ( status );' )
c.execute( 'CREATE TABLE subscriptions ( subscriptions TEXT_YAML );' )
c.execute( 'CREATE TABLE tag_service_precedence ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, precedence INTEGER );' )
c.execute( 'CREATE TABLE tags ( tag_id INTEGER PRIMARY KEY, tag TEXT );' )
@ -4806,7 +4871,7 @@ class DB( ServiceDB ):
CLIENT_DEFAULT_OPTIONS[ 'shortcuts' ] = shortcuts
CLIENT_DEFAULT_OPTIONS[ 'default_tag_repository' ] = CC.LOCAL_TAG_SERVICE_IDENTIFIER
CLIENT_DEFAULT_OPTIONS[ 'default_tag_repository' ] = HC.LOCAL_TAG_SERVICE_IDENTIFIER
CLIENT_DEFAULT_OPTIONS[ 'default_tag_sort' ] = CC.SORT_BY_LEXICOGRAPHIC_ASC
c.execute( 'INSERT INTO options ( options ) VALUES ( ? );', ( CLIENT_DEFAULT_OPTIONS, ) )
@ -5014,6 +5079,34 @@ class DB( ServiceDB ):
c.execute( 'UPDATE options SET options = ?;', ( self._options, ) )
if version < 65:
wx.GetApp().SetSplashText( 'renaming db files' )
filenames = dircache.listdir( HC.CLIENT_FILES_DIR )
i = 1
for filename in filenames:
old_path = HC.CLIENT_FILES_DIR + os.path.sep + filename
mime = HC.GetMimeFromPath( old_path )
new_path = old_path + HC.mime_ext_lookup[ mime ]
shutil.move( old_path, new_path )
os.chmod( new_path, stat.S_IREAD )
i += 1
if i % 250 == 0: wx.GetApp().SetSplashText( 'renaming file ' + HC.ConvertIntToPrettyString( i ) + '/' + HC.ConvertIntToPrettyString( len( filenames ) ) )
c.execute( 'CREATE TABLE subscriptions ( subscriptions TEXT_YAML );' )
unknown_account = CC.GetUnknownAccount()
unknown_account.MakeStale()
@ -5921,7 +6014,7 @@ class DB( ServiceDB ):
( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone()
self._options[ 'default_tag_repository' ] = CC.LOCAL_TAG_SERVICE_IDENTIFIER
self._options[ 'default_tag_repository' ] = HC.LOCAL_TAG_SERVICE_IDENTIFIER
c.execute( 'UPDATE options SET options = ?;', ( self._options, ) )
@ -6134,8 +6227,8 @@ class DB( ServiceDB ):
self.Write( 'import_file', HC.LOW_PRIORITY, file )
HC.pubsub.pub( 'content_updates_data', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
HC.pubsub.pub( 'content_updates_gui', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
HC.pubsub.pub( 'content_updates_data', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
HC.pubsub.pub( 'content_updates_gui', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] )
self.pub( 'log_message', 'download files daemon', 'downloaded ' + hash.encode( 'hex' ) + ' from ' + file_repository.GetServiceIdentifier().GetName() )
@ -6204,6 +6297,50 @@ class DB( ServiceDB ):
def DAEMONFlushServiceUpdates( self, update_log ): self.Write( 'service_updates', HC.HIGH_PRIORITY, update_log )
def DAEMONResizeThumbnails( self ):
all_thumbnail_paths = dircache.listdir( HC.CLIENT_THUMBNAILS_DIR )
full_size_thumbnail_paths = { path for path in all_thumbnail_paths if not path.endswith( '_resized' ) }
resized_thumbnail_paths = { path for path in all_thumbnail_paths if path.endswith( '_resized' ) }
thumbnail_paths_to_render = full_size_thumbnail_paths.difference( resized_thumbnail_paths )
thumbnail_paths_to_render = list( thumbnail_paths_to_render )
random.shuffle( thumbnail_paths_to_render )
i = 0
limit = max( 100, len( thumbnail_paths_to_render ) / 10 )
for thumbnail_path in thumbnail_paths_to_render:
try:
with open( HC.CLIENT_THUMBNAILS_DIR + os.path.sep + thumbnail_path, 'rb' ) as f: thumbnail = f.read()
thumbnail_resized = HydrusImageHandling.GenerateThumbnailFileFromFile( thumbnail, self._options[ 'thumbnail_dimensions' ] )
thumbnail_resized_path_to = thumbnail_path + '_resized'
with open( HC.CLIENT_THUMBNAILS_DIR + os.path.sep + thumbnail_resized_path_to, 'wb' ) as f: f.write( thumbnail_resized )
except: print( traceback.format_exc() )
if limit > 10000: time.sleep( 0.10 )
elif limit > 1000: time.sleep( 0.5 )
else: time.sleep( 1 )
i += 1
if i > limit: break
if HC.shutdown: break
def DAEMONSynchroniseAccounts( self ):
services = self.Read( 'services', HC.LOW_PRIORITY, HC.RESTRICTED_SERVICES )
@ -6521,43 +6658,108 @@ class DB( ServiceDB ):
def THREADResizeThumbnails( self ):
def DAEMONSynchroniseSubscriptions( self ):
all_thumbnail_paths = dircache.listdir( HC.CLIENT_THUMBNAILS_DIR )
subscriptions = wx.GetApp().Read( 'subscriptions' )
full_size_thumbnail_paths = { path for path in all_thumbnail_paths if not path.endswith( '_resized' ) }
updated_subscriptions = []
resized_thumbnail_paths = { path for path in all_thumbnail_paths if path.endswith( '_resized' ) }
thumbnail_paths_to_render = full_size_thumbnail_paths.difference( resized_thumbnail_paths )
i = 0
limit = max( 100, len( thumbnail_paths_to_render ) / 10 )
for thumbnail_path in thumbnail_paths_to_render:
for ( subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) in subscriptions:
try:
with open( HC.CLIENT_THUMBNAILS_DIR + os.path.sep + thumbnail_path, 'rb' ) as f: thumbnail = f.read()
now = int( time.time() )
thumbnail_resized = HydrusImageHandling.GenerateThumbnailFileFromFile( thumbnail, self._options[ 'thumbnail_dimensions' ] )
do_tags = len( advanced_tag_options ) > 0
thumbnail_resized_path_to = thumbnail_path + '_resized'
if last_checked + ( frequency_type * frequency_number ) > now:
if subscription_type == HC.SUBSCRIPTION_TYPE_BOORU:
( booru_name, query_type ) = query_type
try: booru = wx.GetApp().Read( 'booru', booru_name )
except: raise Exception( 'While attempting to execute a subscription on booru ' + name + ', the client could not find that booru in the db.' )
args = ( booru, query )
elif subscription_type == HC.SUBSCRIPTION_TYPE_HENTAI_FOUNTDRY: args = ( query_type, query )
elif subscription_type == HC.SUBSCRIPTION_TYPE_PIXIV: args = ( query_type, query )
else: args = ( query, )
downloader = HydrusDownloading.GetDownloader( subscription_type, *args )
all_url_args = set()
while len( urls.intersection( url_cache ) ) == 0:
page_of_url_args = downloader.GetAnotherPage()
if len( page_of_url_args ) == 0: break
else: all_url_args.update( page_of_url_args )
if len( all_url_args ) > 0:
for url_args in all_url_args:
url = url_args[0]
url_cache.add( url )
( status, hash ) = wx.GetApp().Read( 'url_status', url )
if status == 'deleted' and 'exclude_deleted_files' not in advanced_import_options: status = 'new'
if status == 'redundant':
if do_tags:
try:
tags = downloader.GetTags( *url_args )
service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
content_updates = HydrusDownloading.ConvertServiceIdentifiersToTagsToContentUpdates( service_identifiers_to_tags )
wx.GetApp().Write( 'content_updates', content_updates )
except: pass
elif status == 'new':
if do_tags: ( file, tags ) = downloader.GetFileAndTags( *url_args )
else:
file = downloader.GetFile( *url_args )
tags = []
service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
wx.GetApp().Write( 'import', file, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, url = url )
last_checked = now
with open( HC.CLIENT_THUMBNAILS_DIR + os.path.sep + thumbnail_resized_path_to, 'wb' ) as f: f.write( thumbnail_resized )
except:
last_checked = now + HC.UPDATE_DURATION
# write an error to the normal log, maybe print to the text file log too
except: print( traceback.format_exc() )
time.sleep( 1 )
i += 1
if i > limit: break
if HC.shutdown: break
updated_subscriptions.append( ( subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) )
wx.GetApp().Write( 'subscriptions', updated_subscriptions )
def ProcessRequest( self, request_type, request, request_args ):
@ -6696,6 +6898,7 @@ class DB( ServiceDB ):
elif action == 'services': result = self._GetServices( c, *args, **kwargs )
elif action == 'shutdown_timestamps': result = self._GetShutdownTimestamps( c, *args, **kwargs )
elif action == 'status_num_inbox': result = self._DoStatusNumInbox( c, *args, **kwargs )
elif action == 'subscriptions': result = self._GetSubscriptions( c, *args, **kwargs )
elif action == 'tag_service_precedence': result = self._tag_service_precedence
elif action == 'thumbnail': result = self._GetThumbnail( *args, **kwargs )
elif action == 'thumbnail_hashes_i_should_have': result = self._GetThumbnailHashesIShouldHave( c, *args, **kwargs )
@ -6743,6 +6946,7 @@ class DB( ServiceDB ):
elif action == 'session': self._AddSession( c, *args, **kwargs )
elif action == 'set_password': self._SetPassword( c, *args, **kwargs )
elif action == 'set_tag_service_precedence': self._SetTagServicePrecedence( c, *args, **kwargs )
elif action == 'subscriptions': self._SetSubscriptions( c, *args, **kwargs )
elif action == 'thumbnails': self._AddThumbnails( c, *args, **kwargs )
elif action == 'update': self._AddUpdate( c, *args, **kwargs )
elif action == 'update_boorus': self._UpdateBoorus( c, *args, **kwargs )

View File

@ -100,7 +100,7 @@ class FrameGUI( ClientGUICommon.Frame ):
self.Show( True )
wx.CallAfter( self._NewPageQuery, CC.LOCAL_FILE_SERVICE_IDENTIFIER )
wx.CallAfter( self._NewPageQuery, HC.LOCAL_FILE_SERVICE_IDENTIFIER )
def _THREADUploadPending( self, service_identifier, job_key, cancel_event ):
@ -156,8 +156,8 @@ class FrameGUI( ClientGUICommon.Frame ):
content_updates = []
content_updates += [ HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings_object ]
content_updates += [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( reason, tag, hashes ) in petitions_object ]
content_updates += [ HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings_object ]
content_updates += [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( reason, tag, hashes ) in petitions_object ]
wx.GetApp().Write( 'content_updates', content_updates )
@ -224,8 +224,8 @@ class FrameGUI( ClientGUICommon.Frame ):
content_updates = []
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, good_hashes ) )
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, petitions_object.GetHashes() ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_ADD, service_identifier, good_hashes ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, service_identifier, petitions_object.GetHashes() ) )
wx.GetApp().Write( 'content_updates', content_updates )
@ -644,6 +644,15 @@ class FrameGUI( ClientGUICommon.Frame ):
except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() )
def _ManageSubscriptions( self ):
try:
with ClientGUIDialogs.DialogManageSubscriptions( self ) as dlg: dlg.ShowModal()
except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() )
def _ManageTagServicePrecedence( self ):
try:
@ -1012,6 +1021,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
elif command == 'manage_imageboards': self._ManageImageboards()
elif command == 'manage_pixiv_account': self._ManagePixivAccount()
elif command == 'manage_services': self._ManageServices( data )
elif command == 'manage_subscriptions': self._ManageSubscriptions()
elif command == 'manage_tag_service_precedence': self._ManageTagServicePrecedence()
elif command == 'modify_account': self._ModifyAccount( data )
elif command == 'new_accounts': self._NewAccounts( data )
@ -1144,7 +1154,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def NewSimilarTo( self, file_service_identifier, hash ): self._NewPageQuery( file_service_identifier, [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, ( hash, 5 ) ), None ) ] )
def NewSimilarTo( self, file_service_identifier, hash ): self._NewPageQuery( file_service_identifier, initial_predicates = [ HC.Predicate( HC.PREDICATE_TYPE_SYSTEM, ( HC.SYSTEM_PREDICATE_TYPE_SIMILAR_TO, ( hash, 5 ) ), None ) ] )
def RefreshAcceleratorTable( self ):
@ -1195,7 +1205,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'import' ), p( '&Import Files' ), p( 'Add new files to the database.' ) )
file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'open_export_folder' ), p( 'Open E&xport Folder' ), p( 'Open the export folder so you can easily access files you have exported.' ) )
file.AppendSeparator()
file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'options', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), p( '&Options' ) )
file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'options', HC.LOCAL_FILE_SERVICE_IDENTIFIER ), p( '&Options' ) )
file.AppendSeparator()
file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'exit' ), p( '&Exit' ) )
@ -1207,7 +1217,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
view.AppendSeparator()
view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page' ), p( 'Pick a New &Page' ), p( 'Pick a new page.' ) )
view.AppendSeparator()
view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page_query', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), p( '&New Local Search' ), p( 'Open a new search tab for your files' ) )
view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page_query', HC.LOCAL_FILE_SERVICE_IDENTIFIER ), p( '&New Local Search' ), p( 'Open a new search tab for your files' ) )
for s_i in file_service_identifiers: view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page_query', s_i ), p( 'New ' + s_i.GetName() + ' Search' ), p( 'Open a new search tab for ' + s_i.GetName() + '.' ) )
if len( petition_resolve_tag_service_identifiers ) > 0 or len( petition_resolve_file_service_identifiers ) > 0:
@ -1272,6 +1282,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_imageboards' ), p( 'Manage &Imageboards' ), p( 'Change the html POST form information for imageboards to dump to.' ) )
services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_4chan_pass' ), p( 'Manage &4chan Pass' ), p( 'Set up your 4chan pass, so you can dump without having to fill in a captcha.' ) )
services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_pixiv_account' ), p( 'Manage &Pixiv Account' ), p( 'Set up your pixiv username and password.' ) )
services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_subscriptions' ), p( 'Manage &Subscriptions' ), p( 'Change the queries you want the client to regularly import from.' ) )
services.AppendSeparator()
services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_contacts' ), p( 'Manage &Contacts and Identities' ), p( 'Change the names and addresses of the people you talk to.' ) )
services.AppendSeparator()
@ -1580,7 +1591,7 @@ class FramePageChooser( ClientGUICommon.Frame ):
file_repos = [ ( 'page_query', service_identifier ) for service_identifier in [ service.GetServiceIdentifier() for service in self._services ] if service_identifier.GetType() == HC.FILE_REPOSITORY ]
entries = [ ( 'page_query', CC.LOCAL_FILE_SERVICE_IDENTIFIER ) ] + file_repos
entries = [ ( 'page_query', HC.LOCAL_FILE_SERVICE_IDENTIFIER ) ] + file_repos
elif menu_keyword == 'download': entries = [ ( 'page_import_url', None ), ( 'page_import_thread_watcher', None ), ( 'menu', 'gallery' ) ]
elif menu_keyword == 'gallery': entries = [ ( 'page_import_booru', None ), ( 'page_import_gallery', 'giphy' ), ( 'page_import_gallery', 'deviant art by artist' ), ( 'menu', 'hentai foundry' ), ( 'menu', 'pixiv' ), ( 'page_import_gallery', 'tumblr' ) ]
@ -2211,9 +2222,9 @@ class FrameReviewServicesServicePanel( wx.ScrolledWindow ):
action = update.GetAction()
if action == CC.SERVICE_UPDATE_RESET: self._service_identifier = update.GetInfo()
if action == HC.SERVICE_UPDATE_RESET: self._service_identifier = update.GetInfo()
if action in ( CC.SERVICE_UPDATE_ACCOUNT, CC.SERVICE_UPDATE_REQUEST_MADE ): wx.CallLater( 200, self._DisplayAccountInfo )
if action in ( HC.SERVICE_UPDATE_ACCOUNT, HC.SERVICE_UPDATE_REQUEST_MADE ): wx.CallLater( 200, self._DisplayAccountInfo )
else:
wx.CallLater( 200, self._DisplayService )
wx.CallLater( 400, self.Layout ) # ugly hack, but it works for now

View File

@ -419,7 +419,7 @@ class Canvas():
file_hash = self._current_display_media.GetHash()
self._media_window.movie = HC.CLIENT_FILES_DIR + os.path.sep + file_hash.encode( 'hex' )
self._media_window.movie = HC.CLIENT_FILES_DIR + os.path.sep + file_hash.encode( 'hex' ) + '.swf'
elif self._current_display_media.GetMime() == HC.VIDEO_FLV:
@ -428,7 +428,7 @@ class Canvas():
file_hash = self._current_display_media.GetHash()
flash_vars = []
flash_vars.append( ( 'flv', HC.CLIENT_FILES_DIR + os.path.sep + file_hash.encode( 'hex' ) ) )
flash_vars.append( ( 'flv', HC.CLIENT_FILES_DIR + os.path.sep + file_hash.encode( 'hex' ) + '.flv' ) )
flash_vars.append( ( 'margin', '0' ) )
flash_vars.append( ( 'autoload', '1' ) )
flash_vars.append( ( 'autoplay', '1' ) )
@ -961,7 +961,7 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaList ):
else: self.SetMedia( self._GetMedia( { first_hash } )[0] )
def _Archive( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _Archive( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ARCHIVE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _CopyLocalUrlToClipboard( self ):
@ -980,7 +980,7 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaList ):
if wx.TheClipboard.Open():
data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._current_media.GetHash().encode( 'hex' ) )
data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._current_media.GetHash().encode( 'hex' ) + HC.mime_ext_lookup[ self._current_media.GetMime() ] )
wx.TheClipboard.SetData( data )
@ -993,13 +993,13 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaList ):
with ClientGUIDialogs.DialogYesNo( self, 'Delete this file from the database?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
self.SetFocus() # annoying bug because of the modal dialog
def _Inbox( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_INBOX, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _Inbox( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_INBOX, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _PausePlaySlideshow( self ):
@ -1173,7 +1173,7 @@ class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaList ):
if self._current_media.HasInbox(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'archive' ), '&archive' )
if self._current_media.HasArchive(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'inbox' ), 'return to &inbox' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), '&delete' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', HC.LOCAL_FILE_SERVICE_IDENTIFIER ), '&delete' )
menu.AppendSeparator()
@ -1228,7 +1228,7 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ):
self.SetMedia( self._GetFirst() )
def _Archive( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _Archive( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ARCHIVE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _CopyLocalUrlToClipboard( self ):
@ -1247,7 +1247,7 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ):
if wx.TheClipboard.Open():
data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._current_media.GetHash().encode( 'hex' ) )
data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._current_media.GetHash().encode( 'hex' ) + HC.mime_ext_lookup[ self._current_media.GetMime() ] )
wx.TheClipboard.SetData( data )
@ -1260,13 +1260,13 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ):
with ClientGUIDialogs.DialogYesNo( self, 'Delete this file from the database?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
self.SetFocus() # annoying bug because of the modal dialog
def _Inbox( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_INBOX, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def _Inbox( self ): wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_INBOX, HC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] )
def EventKeyDown( self, event ):
@ -1316,37 +1316,37 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ):
if service_type == HC.LOCAL_TAG:
if action in current: edit_log = [ ( CC.CONTENT_UPDATE_DELETE, action ) ]
else: edit_log = [ ( CC.CONTENT_UPDATE_ADD, action ) ]
if action in current: edit_log = [ ( HC.CONTENT_UPDATE_DELETE, action ) ]
else: edit_log = [ ( HC.CONTENT_UPDATE_ADD, action ) ]
else:
if action in current:
if action in petitioned: edit_log = [ ( CC.CONTENT_UPDATE_RESCIND_PETITION, action ) ]
if action in petitioned: edit_log = [ ( HC.CONTENT_UPDATE_RESCIND_PETITION, action ) ]
else:
message = 'Enter a reason for this tag to be removed. A janitor will review your petition.'
with wx.TextEntryDialog( self, message ) as dlg:
if dlg.ShowModal() == wx.ID_OK: edit_log = [ ( CC.CONTENT_UPDATE_PETITION, ( action, dlg.GetValue() ) ) ]
if dlg.ShowModal() == wx.ID_OK: edit_log = [ ( HC.CONTENT_UPDATE_PETITION, ( action, dlg.GetValue() ) ) ]
else: return
else:
if action in pending: edit_log = [ ( CC.CONTENT_UPDATE_RESCIND_PENDING, action ) ]
else: edit_log = [ ( CC.CONTENT_UPDATE_PENDING, action ) ]
if action in pending: edit_log = [ ( HC.CONTENT_UPDATE_RESCIND_PENDING, action ) ]
else: edit_log = [ ( HC.CONTENT_UPDATE_PENDING, action ) ]
content_update = HC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( self._current_media.GetHash(), ), info = edit_log )
content_update = HC.ContentUpdate( HC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( self._current_media.GetHash(), ), info = edit_log )
elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ):
content_update = HC.ContentUpdate( CC.CONTENT_UPDATE_RATING, service_identifier, ( self._current_media.GetHash(), ), info = action )
content_update = HC.ContentUpdate( HC.CONTENT_UPDATE_RATING, service_identifier, ( self._current_media.GetHash(), ), info = action )
wx.GetApp().Write( 'content_updates', ( content_update, ) )
@ -1461,7 +1461,7 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ):
if self._current_media.HasInbox(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'archive' ), '&archive' )
if self._current_media.HasArchive(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'inbox' ), 'return to &inbox' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), '&delete' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', HC.LOCAL_FILE_SERVICE_IDENTIFIER ), '&delete' )
menu.AppendSeparator()
@ -1556,8 +1556,8 @@ class CanvasFullscreenMediaListFilter( CanvasFullscreenMediaList ):
content_updates = []
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, self._deleted_hashes ) )
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, self._kept_hashes ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, self._deleted_hashes ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_ARCHIVE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, self._kept_hashes ) )
wx.GetApp().Write( 'content_updates', content_updates )
@ -1698,7 +1698,7 @@ class RatingsFilterFrameLike( CanvasFullscreenMediaListFilter ):
def __init__( self, my_parent, page_key, service_identifier, media_results ):
CanvasFullscreenMediaListFilter.__init__( self, my_parent, page_key, CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
CanvasFullscreenMediaListFilter.__init__( self, my_parent, page_key, HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
self._rating_service_identifier = service_identifier
self._service = wx.GetApp().Read( 'service', service_identifier )
@ -1733,8 +1733,8 @@ class RatingsFilterFrameLike( CanvasFullscreenMediaListFilter ):
content_updates = []
content_updates.extend( [ HC.ContentUpdate( CC.CONTENT_UPDATE_RATING, self._rating_service_identifier, ( hash, ), info = 0.0 ) for hash in self._deleted_hashes ] )
content_updates.extend( [ HC.ContentUpdate( CC.CONTENT_UPDATE_RATING, self._rating_service_identifier, ( hash, ), info = 1.0 ) for hash in self._kept_hashes ] )
content_updates.extend( [ HC.ContentUpdate( HC.CONTENT_UPDATE_RATING, self._rating_service_identifier, ( hash, ), info = 0.0 ) for hash in self._deleted_hashes ] )
content_updates.extend( [ HC.ContentUpdate( HC.CONTENT_UPDATE_RATING, self._rating_service_identifier, ( hash, ), info = 1.0 ) for hash in self._kept_hashes ] )
wx.GetApp().Write( 'content_updates', content_updates )
@ -1770,7 +1770,7 @@ class RatingsFilterFrameNumerical( ClientGUICommon.Frame ):
self._service_identifier = service_identifier
self._media_still_to_rate = { ClientGUIMixins.MediaSingleton( media_result ) for media_result in media_results }
self._file_query_result = CC.FileQueryResult( CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
self._file_query_result = CC.FileQueryResult( HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
if service_identifier.GetType() == HC.LOCAL_RATING_LIKE: self._score_gap = 1.0
else:
@ -2331,8 +2331,8 @@ class RatingsFilterFrameNumerical( ClientGUICommon.Frame ):
content_updates = []
content_updates.extend( [ HC.ContentUpdate( CC.CONTENT_UPDATE_RATING, self._service_identifier, ( hash, ), info = rating ) for ( hash, rating ) in certain_ratings ] )
content_updates.extend( [ HC.ContentUpdate( CC.CONTENT_UPDATE_RATINGS_FILTER, self._service_identifier, ( hash, ), info = ( min, max ) ) for ( hash, min, max ) in uncertain_ratings ] )
content_updates.extend( [ HC.ContentUpdate( HC.CONTENT_UPDATE_RATING, self._service_identifier, ( hash, ), info = rating ) for ( hash, rating ) in certain_ratings ] )
content_updates.extend( [ HC.ContentUpdate( HC.CONTENT_UPDATE_RATINGS_FILTER, self._service_identifier, ( hash, ), info = ( min, max ) ) for ( hash, min, max ) in uncertain_ratings ] )
wx.GetApp().Write( 'content_updates', content_updates )
@ -2437,7 +2437,7 @@ class RatingsFilterFrameNumerical( ClientGUICommon.Frame ):
def __init__( self, parent ):
wx.Window.__init__( self, parent, style = wx.SIMPLE_BORDER | wx.WANTS_CHARS )
Canvas.__init__( self, CC.LOCAL_FILE_SERVICE_IDENTIFIER, wx.GetApp().GetFullscreenImageCache() )
Canvas.__init__( self, HC.LOCAL_FILE_SERVICE_IDENTIFIER, wx.GetApp().GetFullscreenImageCache() )
wx.CallAfter( self.Refresh )
@ -2942,16 +2942,10 @@ class PDFButton( wx.Button ):
def EventButton( self, event ):
existing_path = HC.CLIENT_FILES_DIR + os.path.sep + self._hash.encode( 'hex' )
path = HC.CLIENT_FILES_DIR + os.path.sep + self._hash.encode( 'hex' ) + '.pdf'
new_path = HC.TEMP_DIR + os.path.sep + self._hash.encode( 'hex' ) + '.pdf'
try:
if not os.path.exists( new_path ): shutil.copy( existing_path, new_path )
except: pass
# os.system( 'start ' + new_path )
subprocess.call( 'start "" "' + new_path + '"', shell = True )
# os.system( 'start ' + path )
subprocess.call( 'start "" "' + path + '"', shell = True )
class Text( wx.Window ):

View File

@ -389,13 +389,13 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
self._file_service_identifier = file_service_identifier
self._tag_service_identifier = tag_service_identifier
if self._file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known files'
if self._file_service_identifier == HC.NULL_SERVICE_IDENTIFIER: name = 'all known files'
else: name = self._file_service_identifier.GetName()
self._file_repo_button = wx.Button( self._dropdown_window, label = name )
self._file_repo_button.Bind( wx.EVT_BUTTON, self.EventFileButton )
if self._tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known tags'
if self._tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER: name = 'all known tags'
else: name = self._tag_service_identifier.GetName()
self._tag_repo_button = wx.Button( self._dropdown_window, label = name )
@ -423,8 +423,8 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
menu = wx.Menu()
if len( service_identifiers ) > 0: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', CC.NULL_SERVICE_IDENTIFIER ), 'all known files' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), 'local files' )
if len( service_identifiers ) > 0: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', HC.NULL_SERVICE_IDENTIFIER ), 'all known files' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', HC.LOCAL_FILE_SERVICE_IDENTIFIER ), 'local files' )
for service_identifier in service_identifiers: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', service_identifier ), service_identifier.GetName() )
@ -449,7 +449,7 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
self._file_service_identifier = service_identifier
if service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known files'
if service_identifier == HC.NULL_SERVICE_IDENTIFIER: name = 'all known files'
else: name = service_identifier.GetName()
self._file_repo_button.SetLabel( name )
@ -462,7 +462,7 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
self._tag_service_identifier = service_identifier
if service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known tags'
if service_identifier == HC.NULL_SERVICE_IDENTIFIER: name = 'all known tags'
else: name = service_identifier.GetName()
self._tag_repo_button.SetLabel( name )
@ -495,8 +495,8 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
menu = wx.Menu()
if len( service_identifiers ) > 0: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', CC.NULL_SERVICE_IDENTIFIER ), 'all known tags' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', CC.LOCAL_TAG_SERVICE_IDENTIFIER ), 'local tags' )
if len( service_identifiers ) > 0: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', HC.NULL_SERVICE_IDENTIFIER ), 'all known tags' )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', HC.LOCAL_TAG_SERVICE_IDENTIFIER ), 'local tags' )
for service_identifier in service_identifiers: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', service_identifier ), service_identifier.GetName() )
@ -578,7 +578,7 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
self._first_letters = ''
self._current_namespace = ''
if self._file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: s_i = self._tag_service_identifier
if self._file_service_identifier == HC.NULL_SERVICE_IDENTIFIER: s_i = self._tag_service_identifier
else: s_i = self._file_service_identifier
matches = wx.GetApp().Read( 'file_system_predicates', s_i )
@ -626,7 +626,7 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
absolutely_all_tags = []
if self._tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER:
if self._tag_service_identifier == HC.NULL_SERVICE_IDENTIFIER:
if self._include_current: absolutely_all_tags += [ list( current ) for ( current, deleted, pending, petitioned ) in [ tags.GetUnionCDPP() for tags in all_tags ] ]
if self._include_pending: absolutely_all_tags += [ list( pending ) for ( current, deleted, pending, petitioned ) in [ tags.GetUnionCDPP() for tags in all_tags ] ]
@ -693,7 +693,7 @@ class AutoCompleteDropdownTagsWrite( AutoCompleteDropdownTags ):
self._options = wx.GetApp().Read( 'options' )
if self._options[ 'show_all_tags_in_autocomplete' ]: file_service_identifier = CC.NULL_SERVICE_IDENTIFIER
if self._options[ 'show_all_tags_in_autocomplete' ]: file_service_identifier = HC.NULL_SERVICE_IDENTIFIER
AutoCompleteDropdownTags.__init__( self, parent, file_service_identifier, tag_service_identifier )
@ -2335,23 +2335,32 @@ class AdvancedHentaiFoundryOptions( AdvancedOptions ):
class AdvancedImportOptions( AdvancedOptions ):
def __init__( self, parent ): AdvancedOptions.__init__( self, parent, 'advanced import options' )
def __init__( self, parent, initial_settings = {} ):
self._initial_settings = initial_settings
AdvancedOptions.__init__( self, parent, 'advanced import options' )
def _InitPanel( self, panel ):
options = wx.GetApp().Read( 'options' )
self._auto_archive = wx.CheckBox( panel )
self._auto_archive.SetValue( False )
if 'auto_archive' in self._initial_settings: self._auto_archive.SetValue( self._initial_settings[ 'auto_archive' ] )
else: self._auto_archive.SetValue( False )
self._exclude_deleted = wx.CheckBox( panel )
self._exclude_deleted.SetValue( options[ 'exclude_deleted_files' ] )
if 'exclude_deleted_files' in self._initial_settings: self._exclude_deleted.SetValue( self._initial_settings[ 'exclude_deleted_files' ] )
else: self._exclude_deleted.SetValue( options[ 'exclude_deleted_files' ] )
self._min_size = NoneableSpinCtrl( panel, 'minimum size (KB): ', 5120, multiplier = 1024 )
self._min_size.SetValue( None )
if 'min_size' in self._initial_settings: self._min_size.SetValue( self._initial_settings[ 'min_size' ] )
else: self._min_size.SetValue( None )
self._min_resolution = NoneableSpinCtrl( panel, 'minimum resolution: ', ( 50, 50 ), num_dimensions = 2 )
self._min_resolution.SetValue( None )
if 'min_resolution' in self._initial_settings: self._min_resolution.SetValue( self._initial_settings[ 'min_resolution' ] )
else: self._min_resolution.SetValue( None )
hbox1 = wx.BoxSizer( wx.HORIZONTAL )
@ -2394,10 +2403,11 @@ class AdvancedImportOptions( AdvancedOptions ):
class AdvancedTagOptions( AdvancedOptions ):
def __init__( self, parent, info_string, namespaces = [] ):
def __init__( self, parent, info_string, namespaces = [], initial_settings = {} ):
self._info_string = info_string
self._namespaces = namespaces
self._initial_settings = initial_settings
self._checkboxes_to_service_identifiers = {}
self._service_identifiers_to_namespaces = {}
@ -2418,6 +2428,7 @@ class AdvancedTagOptions( AdvancedOptions ):
hbox = wx.BoxSizer( wx.HORIZONTAL )
checkbox = wx.CheckBox( panel )
if service_identifier in self._initial_settings: checkbox.SetValue( True )
checkbox.Bind( wx.EVT_CHECKBOX, self.EventChecked )
self._checkboxes_to_service_identifiers[ checkbox ] = service_identifier
@ -2441,7 +2452,8 @@ class AdvancedTagOptions( AdvancedOptions ):
else: text = wx.StaticText( panel, label = namespace )
namespace_checkbox = wx.CheckBox( panel )
namespace_checkbox.SetValue( True )
if service_identifier in self._initial_settings and namespace not in self._initial_settings[ service_identifier ]: namespace_checkbox.SetValue( False )
else: namespace_checkbox.SetValue( True )
namespace_checkbox.Bind( wx.EVT_CHECKBOX, self.EventChecked )
self._service_identifiers_to_namespaces[ service_identifier ].append( ( namespace, namespace_checkbox ) )
@ -2471,11 +2483,18 @@ class AdvancedTagOptions( AdvancedOptions ):
def EventChecked( self, event ):
wx.PostEvent( self, wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'advanced_tag_options_changed' ) ) )
event.Skip()
def GetInfo( self ):
service_identifiers = [ self._checkboxes_to_service_identifiers[ checkbox ] for checkbox in self._checkboxes_to_service_identifiers.keys() if checkbox.GetValue() ]
result = []
result = {}
for service_identifier in service_identifiers:
@ -2491,19 +2510,12 @@ class AdvancedTagOptions( AdvancedOptions ):
result.append( ( service_identifier, good_namespaces ) )
result[ service_identifier ] = good_namespaces
return result
def EventChecked( self, event ):
wx.PostEvent( self, wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'advanced_tag_options_changed' ) ) )
event.Skip()
class RadioBox( StaticBox ):
def __init__( self, parent, title, choice_pairs, initial_index = None ):
@ -2619,7 +2631,7 @@ class TagsBoxCPP( TagsBox ):
self._page_key = page_key
self._tag_service_identifier = CC.NULL_SERVICE_IDENTIFIER
self._tag_service_identifier = HC.NULL_SERVICE_IDENTIFIER
self._last_media = None
self._current_tags_to_count = {}
@ -2967,8 +2979,10 @@ class TagsBoxPredicates( TagsBox ):
for predicate in initial_predicates:
self._ordered_strings.append( predicate )
self._strings_to_terms[ predicate ] = predicate
predicate_string = predicate.GetUnicode()
self._ordered_strings.append( predicate_string )
self._strings_to_terms[ predicate_string ] = predicate
self._TextsHaveChanged()

View File

@ -349,7 +349,7 @@ class DialogInputCustomFilterAction( Dialog ):
self._tag_service_identifiers = wx.Choice( self._tag_panel )
self._tag_value = wx.TextCtrl( self._tag_panel, style = wx.TE_READONLY )
self._tag_input = ClientGUICommon.AutoCompleteDropdownTagsWrite( self._tag_panel, self.SetTag, CC.LOCAL_FILE_SERVICE_IDENTIFIER, CC.NULL_SERVICE_IDENTIFIER )
self._tag_input = ClientGUICommon.AutoCompleteDropdownTagsWrite( self._tag_panel, self.SetTag, HC.LOCAL_FILE_SERVICE_IDENTIFIER, HC.NULL_SERVICE_IDENTIFIER )
self._ok_tag = wx.Button( self._tag_panel, label = 'ok' )
self._ok_tag.Bind( wx.EVT_BUTTON, self.EventOKTag )
@ -3397,7 +3397,7 @@ class DialogManage4chanPass( Dialog ):
headers = {}
headers[ 'Content-Type' ] = ct
connection = CC.AdvancedHTTPConnection( url = 'https://sys.4chan.org/', accept_cookies = True )
connection = HC.AdvancedHTTPConnection( url = 'https://sys.4chan.org/', accept_cookies = True )
response = connection.request( 'POST', '/auth', headers = headers, body = body )
@ -5240,7 +5240,7 @@ class DialogManagePixivAccount( Dialog ):
headers = {}
headers[ 'Content-Type' ] = 'application/x-www-form-urlencoded'
connection = CC.AdvancedHTTPConnection( url = 'http://www.pixiv.net/', accept_cookies = True )
connection = HC.AdvancedHTTPConnection( url = 'http://www.pixiv.net/', accept_cookies = True )
response = connection.request( 'POST', '/login.php', headers = headers, body = body, follow_redirects = False )
@ -5344,7 +5344,7 @@ class DialogManageRatings( Dialog ):
rating = panel.GetRating()
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_RATING, service_identifier, self._hashes, info = rating ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_RATING, service_identifier, self._hashes, info = rating ) )
@ -6115,26 +6115,12 @@ class DialogManageServices( Dialog ):
def EventExport( self, event ):
services_listbook = self._listbook.GetCurrentPage()
if services_listbook is not None:
try: self._CheckCurrentServiceIsValid()
except Exception as e:
service_panel = services_listbook.GetCurrentPage()
wx.MessageBox( unicode( e ) )
if service_panel is not None:
( service_identifier, credentials, extra_info ) = service_panel.GetInfo()
old_name = services_listbook.GetCurrentName()
name = service_identifier.GetName()
if old_name is not None and name != old_name:
if services_listbook.NameExists( name ): raise Exception( 'That name is already in use!' )
services_listbook.RenamePage( old_name, name )
return
services_listbook = self._listbook.GetCurrentPage()
@ -6243,7 +6229,13 @@ class DialogManageServices( Dialog ):
def Import( self, paths ):
self._CheckCurrentServiceIsValid()
try: self._CheckCurrentSubscriptionIsValid()
except Exception as e:
wx.MessageBox( unicode( e ) )
return
for path in paths:
@ -6524,6 +6516,521 @@ class DialogManageServices( Dialog ):
class DialogManageSubscriptions( Dialog ):
def __init__( self, parent ):
def InitialiseControls():
self._listbook = ClientGUICommon.ListBook( self )
self._listbook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
types_to_listbooks = {}
self._deviant_art = ClientGUICommon.ListBook( self._listbook )
self._deviant_art.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
self._hentai_foundry = ClientGUICommon.ListBook( self._listbook )
self._hentai_foundry.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
self._giphy = ClientGUICommon.ListBook( self._listbook )
self._giphy.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
self._pixiv = ClientGUICommon.ListBook( self._listbook )
self._pixiv.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
self._booru = ClientGUICommon.ListBook( self._listbook )
self._booru.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
self._tumblr = ClientGUICommon.ListBook( self._listbook )
self._tumblr.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging )
types_to_listbooks[ HC.SUBSCRIPTION_TYPE_DEVIANT_ART ] = self._deviant_art
types_to_listbooks[ HC.SUBSCRIPTION_TYPE_HENTAI_FOUNDRY ] = self._hentai_foundry
types_to_listbooks[ HC.SUBSCRIPTION_TYPE_GIPHY ] = self._giphy
types_to_listbooks[ HC.SUBSCRIPTION_TYPE_PIXIV ] = self._pixiv
types_to_listbooks[ HC.SUBSCRIPTION_TYPE_BOORU ] = self._booru
types_to_listbooks[ HC.SUBSCRIPTION_TYPE_TUMBLR ] = self._tumblr
for ( subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) in self._original_subscriptions:
listbook = types_to_listbooks[ subscription_type ]
page_info = ( self._Panel, ( listbook, subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ), {} )
listbook.AddPage( page_info, name )
self._listbook.AddPage( self._deviant_art, 'deviant art' )
self._listbook.AddPage( self._hentai_foundry, 'hentai foundry' )
self._listbook.AddPage( self._giphy, 'giphy' )
self._listbook.AddPage( self._pixiv, 'pixiv' )
self._listbook.AddPage( self._booru, 'booru' )
self._listbook.AddPage( self._tumblr, 'tumblr' )
self._add = wx.Button( self, label='add' )
self._add.Bind( wx.EVT_BUTTON, self.EventAdd )
self._add.SetForegroundColour( ( 0, 128, 0 ) )
self._remove = wx.Button( self, label='remove' )
self._remove.Bind( wx.EVT_BUTTON, self.EventRemove )
self._remove.SetForegroundColour( ( 128, 0, 0 ) )
self._export = wx.Button( self, label='export' )
self._export.Bind( wx.EVT_BUTTON, self.EventExport )
self._ok = wx.Button( self, label='ok' )
self._ok.Bind( wx.EVT_BUTTON, self.EventOk )
self._ok.SetForegroundColour( ( 0, 128, 0 ) )
self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' )
self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel )
self._cancel.SetForegroundColour( ( 128, 0, 0 ) )
# these need to be below the addpages because they'd fire the events
self._listbook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventPageChanging, source = self._listbook )
def InitialisePanel():
add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL )
add_remove_hbox.AddF( self._add, FLAGS_MIXED )
add_remove_hbox.AddF( self._remove, FLAGS_MIXED )
add_remove_hbox.AddF( self._export, FLAGS_MIXED )
ok_hbox = wx.BoxSizer( wx.HORIZONTAL )
ok_hbox.AddF( self._ok, FLAGS_MIXED )
ok_hbox.AddF( self._cancel, FLAGS_MIXED )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( self._listbook, FLAGS_EXPAND_BOTH_WAYS )
vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT )
vbox.AddF( ok_hbox, FLAGS_BUTTON_SIZERS )
self.SetSizer( vbox )
( x, y ) = self.GetEffectiveMinSize()
self.SetInitialSize( ( 680, max( 720, y ) ) )
Dialog.__init__( self, parent, 'manage subscriptions' )
self._original_subscriptions = wx.GetApp().Read( 'subscriptions' )
InitialiseControls()
InitialisePanel()
self.SetDropTarget( ClientGUICommon.FileDropTarget( self.Import ) )
def _CheckCurrentSubscriptionIsValid( self ):
subs_listbook = self._listbook.GetCurrentPage()
if subs_listbook is not None:
sub_panel = subs_listbook.GetCurrentPage()
if sub_panel is not None:
name = sub_panel.GetName()
old_name = subs_listbook.GetCurrentName()
if old_name is not None and name != old_name:
if subs_listbook.NameExists( name ): raise Exception( 'That name is already in use!' )
subs_listbook.RenamePage( old_name, name )
def EventAdd( self, event ):
with wx.TextEntryDialog( self, 'Enter name for subscription' ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
try:
name = dlg.GetValue()
subscription_listbook = self._listbook.GetCurrentPage()
if subscription_listbook.NameExists( name ): raise Exception( 'That name is already in use!' )
if name == '': raise Exception( 'Please enter a nickname for the subscription.' )
if subscription_listbook == self._deviant_art: subscription_type = HC.SUBSCRIPTION_TYPE_DEVIANT_ART
elif subscription_listbook == self._hentai_foundry: subscription_type = HC.SUBSCRIPTION_TYPE_HENTAI_FOUNDRY
elif subscription_listbook == self._giphy: subscription_type = HC.SUBSCRIPTION_TYPE_GIPHY
elif subscription_listbook == self._pixiv: subscription_type = HC.SUBSCRIPTION_TYPE_PIXIV
elif subscription_listbook == self._booru: subscription_type = HC.SUBSCRIPTION_TYPE_BOORU
elif subscription_listbook == self._tumblr: subscription_type = HC.SUBSCRIPTION_TYPE_TUMBLR
if subscription_type in ( HC.SUBSCRIPTION_TYPE_DEVIANT_ART, HC.SUBSCRIPTION_TYPE_TUMBLR ): query_type = 'artist'
else: query_type = 'tags'
if subscription_type == HC.SUBSCRIPTION_TYPE_BOORU: query_type = ( '', query_type )
query = ''
frequency_type = 86400
frequency_number = 7
advanced_tag_options = {}
advanced_import_options = {} # blaaah not sure
last_checked = None
url_cache = set()
page = self._Panel( subscription_listbook, subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache )
subscription_listbook.AddPage( page, name, select = True )
except Exception as e:
wx.MessageBox( unicode( e ) )
self.EventAdd( event )
def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL )
def EventExport( self, event ):
try: self._CheckCurrentSubscriptionIsValid()
except Exception as e:
wx.MessageBox( unicode( e ) )
return
subscription_listbook = self._listbook.GetCurrentPage()
if subscription_listbook is not None:
sub_panel = subscription_listbook.GetCurrentPage()
info = sub_panel.GetInfo()
try:
with wx.FileDialog( self, 'select where to export subscription', defaultFile = name + '.yaml', style = wx.FD_SAVE ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( info ) )
except:
with wx.FileDialog( self, 'select where to export subscription', defaultFile = 'subscription.yaml', style = wx.FD_SAVE ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( info ) )
def EventOk( self, event ):
try: self._CheckCurrentSubscriptionIsValid()
except Exception as e:
wx.MessageBox( unicode( e ) )
return
all_pages = []
all_pages.extend( self._deviant_art.GetNameToPageDict().values() )
all_pages.extend( self._hentai_foundry.GetNameToPageDict().values() )
all_pages.extend( self._giphy.GetNameToPageDict().values() )
all_pages.extend( self._pixiv.GetNameToPageDict().values() )
all_pages.extend( self._booru.GetNameToPageDict().values() )
all_pages.extend( self._tumblr.GetNameToPageDict().values() )
subscriptions = [ page.GetInfo() for page in all_pages ]
try: wx.GetApp().Write( 'subscriptions', subscriptions )
except Exception as e: wx.MessageBox( 'Saving services to DB raised this error: ' + unicode( e ) )
self.EndModal( wx.ID_OK )
def EventPageChanging( self, event ):
try: self._CheckCurrentSubscriptionIsValid()
except Exception as e:
wx.MessageBox( unicode( e ) )
event.Veto()
def EventRemove( self, event ):
subscription_listbook = self._listbook.GetCurrentPage()
sub_panel = subscription_listbook.GetCurrentPage()
if sub_panel is not None: subscription_listbook.DeleteCurrentPage()
def EventServiceChanging( self, event ):
try: self._CheckCurrentSubscriptionIsValid()
except Exception as e:
wx.MessageBox( unicode( e ) )
event.Veto()
def Import( self, paths ):
try: self._CheckCurrentSubscriptionIsValid()
except Exception as e:
wx.MessageBox( unicode( e ) )
return
# do this
return
for path in paths:
try:
with open( path, 'rb' ) as f: file = f.read()
( service_identifier, credentials, extra_info ) = yaml.safe_load( file )
name = service_identifier.GetName()
service_type = service_identifier.GetType()
if service_type == HC.TAG_REPOSITORY: services_listbook = self._tag_repositories
elif service_type == HC.FILE_REPOSITORY: services_listbook = self._file_repositories
elif service_type == HC.MESSAGE_DEPOT: services_listbook = self._message_depots
elif service_type == HC.SERVER_ADMIN: services_listbook = self._servers_admin
self._listbook.SelectPage( services_listbook )
if services_listbook.NameExists( name ):
message = 'A service already exists with that name. Overwrite it?'
with DialogYesNo( self, message ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
page = services_listbook.GetNameToPageDict()[ name ]
page.Update( service_identifier, credentials, extra_info )
else:
self._edit_log.append( ( 'add', ( service_identifier, credentials, extra_info ) ) )
page = self._Panel( services_listbook, service_identifier, credentials, extra_info )
services_listbook.AddPage( page, name, select = True )
except:
wx.MessageBox( traceback.format_exc() )
class _Panel( wx.ScrolledWindow ):
def __init__( self, parent, subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ):
wx.ScrolledWindow.__init__( self, parent )
self.SetScrollRate( 0, 20 )
self.SetMinSize( ( 540, 620 ) )
advanced_tag_options = dict( advanced_tag_options ) # db yaml storage bug
self._original_info = ( subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache )
# init controls
self._name_panel = ClientGUICommon.StaticBox( self, 'name' )
self._name = wx.TextCtrl( self._name_panel, value = name )
self._query_panel = ClientGUICommon.StaticBox( self, 'query' )
self._query = wx.TextCtrl( self._query_panel, value = query )
self._booru_selector = wx.ListBox( self._query_panel )
if subscription_type == HC.SUBSCRIPTION_TYPE_BOORU:
( booru_name, query_type ) = query_type
boorus = wx.GetApp().Read( 'boorus' )
i = 0
index_to_select = 0
for booru in boorus:
self._booru_selector.Append( booru.GetName(), booru )
if booru.GetName() == booru_name: index_to_select = i
i += 1
self._booru_selector.Select( index_to_select )
initial_index = 1
else:
self._booru_selector.Hide()
if query_type == 'artist': initial_index = 0
elif query_type == 'tags': initial_index = 1
self._query_type = ClientGUICommon.RadioBox( self._query_panel, 'query type', ( ( 'artist', 'artist' ), ( 'tags', 'tags' ) ), initial_index = initial_index )
if subscription_type in ( HC.SUBSCRIPTION_TYPE_BOORU, HC.SUBSCRIPTION_TYPE_DEVIANT_ART, HC.SUBSCRIPTION_TYPE_GIPHY, HC.SUBSCRIPTION_TYPE_TUMBLR ): self._query_type.Hide()
self._frequency_number = wx.SpinCtrl( self._query_panel )
self._frequency_number.SetValue( frequency_number )
self._frequency_type = wx.Choice( self._query_panel )
index_to_select = None
i = 0
for ( title, time ) in ( ( 'days', 86400 ), ( 'weeks', 86400 * 7 ), ( 'months', 86400 * 30 ) ):
self._frequency_type.Append( title, time )
if frequency_type == time: index_to_select = i
i += 1
if index_to_select is not None: self._frequency_type.Select( index_to_select )
self._info_panel = ClientGUICommon.StaticBox( self, 'info' )
if subscription_type == HC.SUBSCRIPTION_TYPE_BOORU: namespaces = [ 'creator', 'series', 'character', '' ]
elif subscription_type == HC.SUBSCRIPTION_TYPE_DEVIANT_ART: namespaces = [ 'creator', 'title', '' ]
elif subscription_type == HC.SUBSCRIPTION_TYPE_GIPHY: namespaces = [ '' ]
elif subscription_type == HC.SUBSCRIPTION_TYPE_HENTAI_FOUNDRY: namespaces = [ 'creator', 'title', '' ]
elif subscription_type == HC.SUBSCRIPTION_TYPE_PIXIV: namespaces = [ 'creator', 'title', '' ]
elif subscription_type == HC.SUBSCRIPTION_TYPE_TUMBLR: namespaces = [ '' ]
self._advanced_tag_options = ClientGUICommon.AdvancedTagOptions( self, 'send tags to ', namespaces, initial_settings = advanced_tag_options )
self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self, initial_settings = advanced_import_options )
# init panel
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) )
self._name_panel.AddF( self._name, FLAGS_EXPAND_PERPENDICULAR )
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.AddF( wx.StaticText( self._query_panel, label = 'Check subscription every ' ), FLAGS_MIXED )
hbox.AddF( self._frequency_number, FLAGS_MIXED )
hbox.AddF( self._frequency_type, FLAGS_MIXED )
self._query_panel.AddF( self._query, FLAGS_EXPAND_PERPENDICULAR )
self._query_panel.AddF( self._query_type, FLAGS_EXPAND_PERPENDICULAR )
self._query_panel.AddF( self._booru_selector, FLAGS_EXPAND_PERPENDICULAR )
self._query_panel.AddF( hbox, FLAGS_EXPAND_SIZER_PERPENDICULAR )
if last_checked is None: last_checked_message = 'not yet initialised'
else: last_checked_message = 'set this static text up'
self._info_panel.AddF( wx.StaticText( self._info_panel, label = last_checked_message ), FLAGS_EXPAND_PERPENDICULAR )
self._info_panel.AddF( wx.StaticText( self._info_panel, label = str( len( url_cache ) ) + ' urls in cache' ), FLAGS_EXPAND_PERPENDICULAR )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( self._name_panel, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._query_panel, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._info_panel, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
def GetInfo( self ):
( subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ) = self._original_info
name = self._name.GetValue()
query_type = self._query_type.GetSelectedClientData()
if subscription_type == HC.SUBSCRIPTION_TYPE_BOORU:
booru_name = self._booru_selector.GetStringSelection()
query_type = ( booru_name, query_type )
query = self._query.GetValue()
frequency_number = self._frequency_number.GetValue()
frequency_type = self._frequency_type.GetClientData( self._frequency_type.GetSelection() )
advanced_tag_options = self._advanced_tag_options.GetInfo()
advanced_tag_options = advanced_tag_options.items() # db yaml storage bug
advanced_import_options = self._advanced_import_options.GetInfo()
return ( subscription_type, name, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache )
def GetName( self ): return self._name.GetValue()
def Update( self, query_type, query, frequency_type, frequency_number, advanced_tag_options, advanced_import_options, last_checked, url_cache ):
pass
# do this
class DialogManageTagServicePrecedence( Dialog ):
def __init__( self, parent ):
@ -6667,7 +7174,7 @@ class DialogManageTags( Dialog ):
service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.TAG_REPOSITORY, ) )
for service_identifier in list( service_identifiers ) + [ CC.LOCAL_TAG_SERVICE_IDENTIFIER ]:
for service_identifier in list( service_identifiers ) + [ HC.LOCAL_TAG_SERVICE_IDENTIFIER ]:
service_type = service_identifier.GetType()
@ -6762,7 +7269,7 @@ class DialogManageTags( Dialog ):
edit_log = page.GetEditLog()
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, self._hashes, info = edit_log ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_EDIT_LOG, service_identifier, self._hashes, info = edit_log ) )
@ -6909,8 +7416,8 @@ class DialogManageTags( Dialog ):
self._edit_log = []
self._edit_log.extend( [ ( CC.CONTENT_UPDATE_ADD, tag ) for tag in self._pending_tags ] )
self._edit_log.extend( [ ( CC.CONTENT_UPDATE_DELETE, tag ) for tag in self._petitioned_tags ] )
self._edit_log.extend( [ ( HC.CONTENT_UPDATE_ADD, tag ) for tag in self._pending_tags ] )
self._edit_log.extend( [ ( HC.CONTENT_UPDATE_DELETE, tag ) for tag in self._petitioned_tags ] )
else:
@ -6920,7 +7427,7 @@ class DialogManageTags( Dialog ):
self._tags_box.RescindPend( tag )
self._edit_log.append( ( CC.CONTENT_UPDATE_RESCIND_PENDING, tag ) )
self._edit_log.append( ( HC.CONTENT_UPDATE_RESCIND_PENDING, tag ) )
elif tag in self._petitioned_tags:
@ -6928,13 +7435,13 @@ class DialogManageTags( Dialog ):
self._tags_box.RescindPetition( tag )
self._edit_log.append( ( CC.CONTENT_UPDATE_RESCIND_PETITION, tag ) )
self._edit_log.append( ( HC.CONTENT_UPDATE_RESCIND_PETITION, tag ) )
elif tag in self._current_tags:
if self._account.HasPermission( HC.RESOLVE_PETITIONS ):
self._edit_log.append( ( CC.CONTENT_UPDATE_PETITION, ( tag, 'admin' ) ) )
self._edit_log.append( ( HC.CONTENT_UPDATE_PETITION, ( tag, 'admin' ) ) )
self._petitioned_tags.append( tag )
@ -6948,7 +7455,7 @@ class DialogManageTags( Dialog ):
if dlg.ShowModal() == wx.ID_OK:
self._edit_log.append( ( CC.CONTENT_UPDATE_PETITION, ( tag, dlg.GetValue() ) ) )
self._edit_log.append( ( HC.CONTENT_UPDATE_PETITION, ( tag, dlg.GetValue() ) ) )
self._petitioned_tags.append( tag )
@ -6961,7 +7468,7 @@ class DialogManageTags( Dialog ):
if self._account.HasPermission( HC.RESOLVE_PETITIONS ):
self._edit_log.append( ( CC.CONTENT_UPDATE_PENDING, tag ) )
self._edit_log.append( ( HC.CONTENT_UPDATE_PENDING, tag ) )
self._pending_tags.append( tag )
@ -6970,7 +7477,7 @@ class DialogManageTags( Dialog ):
else:
self._edit_log.append( ( CC.CONTENT_UPDATE_PENDING, tag ) )
self._edit_log.append( ( HC.CONTENT_UPDATE_PENDING, tag ) )
self._pending_tags.append( tag )
@ -7390,9 +7897,9 @@ class DialogPathsToTagsRegex( Dialog ):
page = self._Panel( self._tag_repositories, CC.LOCAL_TAG_SERVICE_IDENTIFIER, paths )
page = self._Panel( self._tag_repositories, HC.LOCAL_TAG_SERVICE_IDENTIFIER, paths )
name = CC.LOCAL_TAG_SERVICE_IDENTIFIER.GetName()
name = HC.LOCAL_TAG_SERVICE_IDENTIFIER.GetName()
self._tag_repositories.AddPage( page, name )
@ -7586,7 +8093,7 @@ class DialogPathsToTagsRegex( Dialog ):
self._tags = ClientGUICommon.TagsBoxFlat( self._tags_panel, self.TagRemoved )
self._tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self._tags_panel, self.AddTag, CC.LOCAL_FILE_SERVICE_IDENTIFIER, service_identifier )
self._tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self._tags_panel, self.AddTag, HC.LOCAL_FILE_SERVICE_IDENTIFIER, service_identifier )
#
@ -7596,7 +8103,7 @@ class DialogPathsToTagsRegex( Dialog ):
self._single_tags = ClientGUICommon.TagsBoxFlat( self._single_tags_panel, self.SingleTagRemoved )
self._single_tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self._single_tags_panel, self.AddTagSingle, CC.LOCAL_FILE_SERVICE_IDENTIFIER, service_identifier )
self._single_tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self._single_tags_panel, self.AddTagSingle, HC.LOCAL_FILE_SERVICE_IDENTIFIER, service_identifier )
self._single_tag_box.Disable()
for path in self._paths:
@ -8142,7 +8649,7 @@ class DialogRegisterService( Dialog ):
return
connection = CC.AdvancedHTTPConnection( host = host, port = port )
connection = HC.AdvancedHTTPConnection( host = host, port = port )
headers = {}

View File

@ -1,4 +1,5 @@
import HydrusConstants as HC
import HydrusDownloading
import HydrusImageHandling
import ClientConstants as CC
import ClientConstantsMessages
@ -244,7 +245,7 @@ class CaptchaControl( wx.Panel ):
try:
connection = CC.AdvancedHTTPConnection( scheme = 'http', host = 'www.google.com', port = 80 )
connection = HC.AdvancedHTTPConnection( scheme = 'http', host = 'www.google.com', port = 80 )
javascript_string = connection.request( 'GET', '/recaptcha/api/challenge?k=' + self._captcha_key )
@ -346,7 +347,7 @@ class Comment( wx.Panel ):
class ManagementPanel( wx.lib.scrolledpanel.ScrolledPanel ):
def __init__( self, parent, page, page_key, file_service_identifier = CC.LOCAL_FILE_SERVICE_IDENTIFIER ):
def __init__( self, parent, page, page_key, file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER ):
wx.lib.scrolledpanel.ScrolledPanel.__init__( self, parent, style = wx.BORDER_NONE | wx.VSCROLL )
@ -358,7 +359,7 @@ class ManagementPanel( wx.lib.scrolledpanel.ScrolledPanel ):
self._page = page
self._page_key = page_key
self._file_service_identifier = file_service_identifier
self._tag_service_identifier = CC.NULL_SERVICE_IDENTIFIER
self._tag_service_identifier = HC.NULL_SERVICE_IDENTIFIER
HC.pubsub.sub( self, 'SetSearchFocus', 'set_search_focus' )
@ -400,7 +401,7 @@ class ManagementPanelDumper( ManagementPanel ):
self._imageboard = imageboard
self._media_list = ClientGUIMixins.ListeningMediaList( CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
self._media_list = ClientGUIMixins.ListeningMediaList( HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
self._current_media = None
@ -578,7 +579,7 @@ class ManagementPanelDumper( ManagementPanel ):
try:
connection = CC.AdvancedHTTPConnection( scheme = self._post_scheme, host = self._post_host, port = self._post_port )
connection = HC.AdvancedHTTPConnection( scheme = self._post_scheme, host = self._post_host, port = self._post_port )
data = connection.request( 'POST', self._post_request, headers = headers, body = body )
@ -622,9 +623,9 @@ class ManagementPanelDumper( ManagementPanel ):
initial += str( index + 1 ) + '/' + str( num_files )
info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
for ( service_identifier, namespaces ) in info:
for ( service_identifier, namespaces ) in advanced_tag_options.items():
( current, deleted, pending, petitioned ) = media.GetTags().GetCDPP( service_identifier )
@ -1028,7 +1029,7 @@ class ManagementPanelDumper( ManagementPanel ):
if page_key == self._page_key:
self._media_list = ClientGUIMixins.ListeningMediaList( CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
self._media_list = ClientGUIMixins.ListeningMediaList( HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results )
new_media_to_dump_info = {}
@ -1590,13 +1591,13 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
# this could be in the advanced_tag_options class
def _DoRedundantTagContentUpdates( self, hash, tags ):
tag_import_info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0:
if len( advanced_tag_options ) > 0:
content_updates = []
for ( service_identifier, namespaces ) in tag_import_info:
for ( service_identifier, namespaces ) in advanced_tag_options.items():
if len( namespaces ) > 0:
@ -1610,12 +1611,12 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
if len( tags_to_add_here ) > 0:
if service_identifier == CC.LOCAL_TAG_SERVICE_IDENTIFIER: action = CC.CONTENT_UPDATE_ADD
else: action = CC.CONTENT_UPDATE_PENDING
if service_identifier == HC.LOCAL_TAG_SERVICE_IDENTIFIER: action = HC.CONTENT_UPDATE_ADD
else: action = HC.CONTENT_UPDATE_PENDING
edit_log = [ ( action, tag ) for tag in tags_to_add_here ]
content_updates.append( HC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), info = edit_log ) )
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), info = edit_log ) )
@ -1631,7 +1632,9 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
service_identifiers_to_tags = {}
for ( service_identifier, namespaces ) in self._advanced_tag_options.GetInfo():
advanced_tag_options = self._advanced_tag_options.GetInfo()
for ( service_identifier, namespaces ) in advanced_tag_options.items():
if len( namespaces ) > 0:
@ -1677,6 +1680,106 @@ class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ):
def _THREADGetImportArgs( self, *url_args ):
try:
downloader = self._GetDownloader( 'example' )
advanced_tag_options = self._advanced_tag_options.GetInfo()
do_tags = len( advanced_tag_options ) > 0
url = url_args[0]
( status, hash ) = wx.GetApp().Read( 'url_status', url )
if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new'
if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
elif status == 'redundant':
( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) )
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
if do_tags:
tags = downloader.GetTags( *url_args )
service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
content_updates = HydrusDownloading.ConvertServiceIdentifiersToTagsToContentUpdates( hash, service_identifiers_to_tags )
wx.GetApp().Write( 'content_updates', content_updates )
HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
else:
HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) )
if do_tags: ( file, tags ) = downloader.GetFileAndTags( *url_args )
else:
file = downloader.GetFile( *url_args )
tags = []
service_identifiers_to_tags = HydrusDownloading.ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options )
advanced_import_options = self._advanced_import_options.GetInfo()
wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, url = url )
except Exception as e:
print( traceback.format_exc() )
wx.CallAfter( self.CALLBACKImportArgs, self._page_key, '', {}, {}, exception = e )
def _THREADDownloadImportItems( self, raw_query ):
# this is important, because we'll instantiate new objects in the eventcancel
cancel_import = self._cancel_import_queue
cancel_download = self._cancel_outer_queue
try:
downloader = self._GetDownloader( raw_query )
total_urls_found = 0
while True:
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'found ' + str( total_urls_found ) + ' urls' )
while self._pause_outer_queue: time.sleep( 1 )
if cancel_import.is_set(): break
if cancel_download.is_set(): break
urls = downloader.GetAnotherPage()
total_urls_found += len( urls )
if len( urls ) == 0: break
else: wx.CallAfter( self.CALLBACKAddToImportQueue, urls )
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, '' )
except HC.NotFoundException: pass
except Exception as e:
print( traceback.format_exc() )
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) )
HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key )
def EventCancelOuterQueue( self, event ):
self._cancel_outer_queue.set()
@ -1714,146 +1817,11 @@ class ManagementPanelImportWithQueueAdvancedBooru( ManagementPanelImportWithQueu
ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces )
def _GetImageUrlAndTags( self, html, url ):
def _GetDownloader( self, raw_tags ):
( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = self._booru.GetData()
tags = raw_tags.split( ' ' )
( image_url, tags ) = ClientParsers.ParseBooruPage( html, url, tag_classnames_to_namespaces, image_id = image_id, image_data = image_data )
return ( image_url, tags )
def _THREADGetImportArgs( self, queue_object ):
try:
url = queue_object
( status, hash ) = wx.GetApp().Read( 'url_status', url )
if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new'
if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' )
elif status == 'redundant':
( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) )
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
tag_import_info = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0:
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
html = connection.geturl( url )
( image_url, tags ) = self._GetImageUrlAndTags( html, url )
self._DoRedundantTagContentUpdates( hash, tags )
HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
else:
HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) )
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
html = connection.geturl( url )
( image_url, tags ) = self._GetImageUrlAndTags( html, url )
parse_result = urlparse.urlparse( image_url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
file = connection.geturl( image_url )
service_identifiers_to_tags = self._GetServiceIdentifiersToTags( tags )
advanced_import_options = self._advanced_import_options.GetInfo()
wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options, service_identifiers_to_tags, url = url )
except Exception as e:
print( traceback.format_exc() )
wx.CallAfter( self.CALLBACKImportArgs, self._page_key, '', {}, {}, exception = e )
def _THREADDownloadImportItems( self, tags_string ):
# this is important, because we'll instantiate new objects in the eventcancel
cancel_import = self._cancel_import_queue
cancel_download = self._cancel_outer_queue
try:
tags = tags_string.split( ' ' )
( search_url, gallery_advance_num, search_separator, thumb_classname ) = self._booru.GetGalleryParsingInfo()
urls = []
example_url = search_url.replace( '%tags%', search_separator.join( tags ) ).replace( '%index%', '0' )
connection = CC.AdvancedHTTPConnection( url = example_url )
if gallery_advance_num == 1: i = 1 # page 1, 2, 3
else: i = 0 # index 0, 25, 50
total_urls_found = 0
while True:
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'found ' + str( total_urls_found ) + ' urls' )
while self._pause_outer_queue: time.sleep( 1 )
if cancel_import.is_set(): break
if cancel_download.is_set(): break
current_url = search_url.replace( '%tags%', search_separator.join( tags ) ).replace( '%index%', str( i * gallery_advance_num ) )
html = connection.geturl( current_url )
urls = ClientParsers.ParseBooruGallery( html, current_url, thumb_classname )
total_urls_found += len( urls )
if len( urls ) == 0: break
else: wx.CallAfter( self.CALLBACKAddToImportQueue, urls )
i += 1
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, '' )
except HC.NotFoundException: pass
except Exception as e:
print( traceback.format_exc() )
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) )
HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key )
return HydrusDownloading.GetDownloader( HC.SUBSCRIPTION_TYPE_BOORU, self._booru, tags )
class ManagementPanelImportWithQueueAdvancedDeviantArt( ManagementPanelImportWithQueueAdvanced ):
@ -1897,7 +1865,7 @@ class ManagementPanelImportWithQueueAdvancedDeviantArt( ManagementPanelImportWit
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -1929,7 +1897,7 @@ class ManagementPanelImportWithQueueAdvancedDeviantArt( ManagementPanelImportWit
example_url = gallery_url + '0'
connection = CC.AdvancedHTTPConnection( url = example_url )
connection = HC.AdvancedHTTPConnection( url = example_url )
i = 0
@ -1987,7 +1955,7 @@ class ManagementPanelImportWithQueueAdvancedGiphy( ManagementPanelImportWithQueu
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -2028,9 +1996,9 @@ class ManagementPanelImportWithQueueAdvancedGiphy( ManagementPanelImportWithQueu
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
tag_import_info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0:
if len( advanced_tag_options ) > 0:
try:
@ -2051,7 +2019,7 @@ class ManagementPanelImportWithQueueAdvancedGiphy( ManagementPanelImportWithQueu
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -2085,7 +2053,7 @@ class ManagementPanelImportWithQueueAdvancedGiphy( ManagementPanelImportWithQueu
example_url = gallery_url + '0'
connection = CC.AdvancedHTTPConnection( url = example_url )
connection = HC.AdvancedHTTPConnection( url = example_url )
i = 0
@ -2194,9 +2162,9 @@ class ManagementPanelImportWithQueueAdvancedHentaiFoundry( ManagementPanelImport
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
tag_import_info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0:
if len( advanced_tag_options ) > 0:
html = self._page_connection.geturl( url )
@ -2219,7 +2187,7 @@ class ManagementPanelImportWithQueueAdvancedHentaiFoundry( ManagementPanelImport
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -2242,8 +2210,8 @@ class ManagementPanelImportWithQueueAdvancedHentaiFoundry( ManagementPanelImport
try:
self._search_connection = CC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True )
self._page_connection = CC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True )
self._search_connection = HC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True )
self._page_connection = HC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True )
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'establishing session with hentai foundry' )
@ -2461,9 +2429,9 @@ class ManagementPanelImportWithQueueAdvancedPixiv( ManagementPanelImportWithQueu
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
tag_import_info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0:
if len( advanced_tag_options ) > 0:
html = self._page_connection.geturl( url )
@ -2478,9 +2446,9 @@ class ManagementPanelImportWithQueueAdvancedPixiv( ManagementPanelImportWithQueu
HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) )
tag_import_info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0:
if len( advanced_tag_options ) > 0:
html = self._page_connection.geturl( url )
@ -2492,7 +2460,7 @@ class ManagementPanelImportWithQueueAdvancedPixiv( ManagementPanelImportWithQueu
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -2518,8 +2486,8 @@ class ManagementPanelImportWithQueueAdvancedPixiv( ManagementPanelImportWithQueu
try:
self._search_connection = CC.AdvancedHTTPConnection( url = 'http://www.pixiv.net', accept_cookies = True )
self._page_connection = CC.AdvancedHTTPConnection( url = 'http://www.pixiv.net', accept_cookies = True )
self._search_connection = HC.AdvancedHTTPConnection( url = 'http://www.pixiv.net', accept_cookies = True )
self._page_connection = HC.AdvancedHTTPConnection( url = 'http://www.pixiv.net', accept_cookies = True )
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'establishing session with pixiv' )
@ -2739,9 +2707,9 @@ class ManagementPanelImportWithQueueAdvancedTumblr( ManagementPanelImportWithQue
HC.pubsub.pub( 'add_media_result', self._page_key, media_result )
tag_import_info = self._advanced_tag_options.GetInfo()
advanced_tag_options = self._advanced_tag_options.GetInfo()
if len( tag_import_info ) > 0: self._DoRedundantTagContentUpdates( hash, tags )
if len( advanced_tag_options ) > 0: self._DoRedundantTagContentUpdates( hash, tags )
HC.pubsub.pub( 'import_done', self._page_key, 'redundant' )
@ -2753,7 +2721,7 @@ class ManagementPanelImportWithQueueAdvancedTumblr( ManagementPanelImportWithQue
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -2786,7 +2754,7 @@ class ManagementPanelImportWithQueueAdvancedTumblr( ManagementPanelImportWithQue
example_url = search_url.replace( '%start%', '0' )
connection = CC.AdvancedHTTPConnection( url = example_url )
connection = HC.AdvancedHTTPConnection( url = example_url )
i = 0
@ -2895,7 +2863,7 @@ class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ):
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -2929,7 +2897,7 @@ class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ):
HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'Connecting to address' )
try: connection = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
try: connection = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
except: raise Exception( 'Could not connect to server' )
try: html = connection.geturl( url )
@ -3024,7 +2992,7 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
try:
connection = CC.AdvancedHTTPConnection( url = url )
connection = HC.AdvancedHTTPConnection( url = url )
raw_json = connection.geturl( url )
@ -3104,7 +3072,7 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
connection = self._connections[ ( scheme, host, port ) ]
@ -3357,13 +3325,13 @@ class ManagementPanelPetitions( ManagementPanel ):
hashes = self._current_petition.GetPetitionHashes()
content_updates = [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, self._file_service_identifier, hashes ) ]
content_updates = [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, self._file_service_identifier, hashes ) ]
elif isinstance( self._current_petition, HC.ServerMappingPetition ):
( reason, tag, hashes ) = self._current_petition.GetPetitionInfo()
content_updates = [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, self._file_service_identifier, hashes, tag ) ]
content_updates = [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, self._file_service_identifier, hashes, tag ) ]
wx.GetApp().Write( 'content_updates', content_updates )
@ -3453,7 +3421,7 @@ class ManagementPanelQuery( ManagementPanel ):
self._current_predicates_box = ClientGUICommon.TagsBoxPredicates( self._search_panel, self._page_key, initial_predicates )
self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self._search_panel, self._page_key, self._file_service_identifier, CC.NULL_SERVICE_IDENTIFIER, self._page.GetMedia )
self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self._search_panel, self._page_key, self._file_service_identifier, HC.NULL_SERVICE_IDENTIFIER, self._page.GetMedia )
self._search_panel.AddF( self._current_predicates_box, FLAGS_EXPAND_PERPENDICULAR )
self._search_panel.AddF( self._searchbox, FLAGS_EXPAND_PERPENDICULAR )

View File

@ -97,7 +97,7 @@ class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ):
hashes = self._GetSelectedHashes( CC.DISCRIMINANT_INBOX )
if len( hashes ) > 0: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
if len( hashes ) > 0: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_ARCHIVE, HC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
def _CopyHashToClipboard( self ):
@ -143,7 +143,9 @@ class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ):
if wx.TheClipboard.Open():
data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._focussed_media.GetDisplayMedia().GetHash().encode( 'hex' ) )
display_media = self._focussed_media.GetDisplayMedia()
data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + display_media.GetHash().encode( 'hex' ) + HC.mime_ext_lookup[ display_media.GetMime() ] )
wx.TheClipboard.SetData( data )
@ -188,7 +190,7 @@ class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ):
if dlg.ShowModal() == wx.ID_YES:
try: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, file_service_identifier, hashes ) ] )
try: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_DELETE, file_service_identifier, hashes ) ] )
except: wx.MessageBox( traceback.format_exc() )
@ -376,7 +378,7 @@ class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ):
hashes = self._GetSelectedHashes( CC.DISCRIMINANT_ARCHIVE )
if len( hashes ) > 0: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( CC.CONTENT_UPDATE_INBOX, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
if len( hashes ) > 0: wx.GetApp().Write( 'content_updates', [ HC.ContentUpdate( HC.CONTENT_UPDATE_INBOX, HC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] )
def _ManageRatings( self ):
@ -673,7 +675,7 @@ class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ):
action = content_update.GetAction()
if action == CC.CONTENT_UPDATE_DELETE and service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ) and self._focussed_media in affected_media: self._SetFocussedMedia( None )
if action == HC.CONTENT_UPDATE_DELETE and service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ) and self._focussed_media in affected_media: self._SetFocussedMedia( None )
if len( affected_media ) > 0: self._ReblitMedia( affected_media )
@ -691,7 +693,7 @@ class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ):
service_identifier = update.GetServiceIdentifier()
if action in ( CC.SERVICE_UPDATE_DELETE_PENDING, CC.SERVICE_UPDATE_RESET ):
if action in ( HC.SERVICE_UPDATE_DELETE_PENDING, HC.SERVICE_UPDATE_RESET ):
self._RefitCanvas()
@ -1170,7 +1172,7 @@ class MediaPanelThumbnails( MediaPanel ):
if t is not None:
if t.GetFileServiceIdentifiersCDPP().HasLocal(): self._FullScreen( t )
elif self._file_service_identifier != CC.NULL_SERVICE_IDENTIFIER: wx.GetApp().Write( 'add_downloads', self._file_service_identifier, t.GetHashes() )
elif self._file_service_identifier != HC.NULL_SERVICE_IDENTIFIER: wx.GetApp().Write( 'add_downloads', self._file_service_identifier, t.GetHashes() )
@ -1464,7 +1466,7 @@ class MediaPanelThumbnails( MediaPanel ):
if selection_has_archive: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'inbox' ), inbox_phrase )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'remove' ), remove_phrase )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), local_delete_phrase )
menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', HC.LOCAL_FILE_SERVICE_IDENTIFIER ), local_delete_phrase )
#menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'export' ), export_phrase )
@ -1609,8 +1611,8 @@ class MediaPanelThumbnails( MediaPanel ):
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_HOME, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_home' ) ),
( wx.ACCEL_NORMAL, wx.WXK_END, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_end' ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_END, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_end' ) ),
( wx.ACCEL_NORMAL, wx.WXK_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ) ),
( wx.ACCEL_NORMAL, wx.WXK_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', HC.LOCAL_FILE_SERVICE_IDENTIFIER ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', HC.LOCAL_FILE_SERVICE_IDENTIFIER ) ),
( wx.ACCEL_NORMAL, wx.WXK_RETURN, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fullscreen' ) ),
( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_ENTER, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fullscreen' ) ),
( wx.ACCEL_NORMAL, wx.WXK_UP, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_up' ) ),
@ -1767,7 +1769,7 @@ class Thumbnail( Selectable ):
my_file_service_identifiers = self.GetFileServiceIdentifiersCDPP().GetCurrent()
if CC.LOCAL_FILE_SERVICE_IDENTIFIER in my_file_service_identifiers: thumbnail_file_service_identifier = CC.LOCAL_FILE_SERVICE_IDENTIFIER
if HC.LOCAL_FILE_SERVICE_IDENTIFIER in my_file_service_identifiers: thumbnail_file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER
elif len( my_file_service_identifiers ) > 0: thumbnail_file_service_identifier = list( my_file_service_identifiers )[0]
else: thumbnail_file_service_identifier = self._file_service_identifier
@ -1926,7 +1928,7 @@ class Thumbnail( Selectable ):
file_service_identifiers = self.GetFileServiceIdentifiersCDPP()
if inbox: dc.DrawBitmap( CC.GlobalBMPs.inbox_bmp, width - 18, 0 )
elif CC.LOCAL_FILE_SERVICE_IDENTIFIER in file_service_identifiers.GetPending(): dc.DrawBitmap( CC.GlobalBMPs.downloading_bmp, width - 18, 0 )
elif HC.LOCAL_FILE_SERVICE_IDENTIFIER in file_service_identifiers.GetPending(): dc.DrawBitmap( CC.GlobalBMPs.downloading_bmp, width - 18, 0 )
if self._dump_status == CC.DUMPER_DUMPED_OK: dc.DrawBitmap( CC.GlobalBMPs.dump_ok, width - 18, 18 )
elif self._dump_status == CC.DUMPER_RECOVERABLE_ERROR: dc.DrawBitmap( CC.GlobalBMPs.dump_recoverable, width - 18, 18 )
@ -1996,7 +1998,7 @@ class ThumbnailMediaCollection( Thumbnail, ClientGUIMixins.MediaCollection ):
ClientGUIMixins.MediaCollection.ProcessContentUpdate( self, content_update )
if content_update.GetAction() == CC.CONTENT_UPDATE_ADD and content_update.GetServiceIdentifier() == CC.LOCAL_FILE_SERVICE_IDENTIFIER:
if content_update.GetAction() == HC.CONTENT_UPDATE_ADD and content_update.GetServiceIdentifier() == HC.LOCAL_FILE_SERVICE_IDENTIFIER:
if self.GetDisplayMedia() in self._GetMedia( content_update.GetHashes() ): self.ReloadFromDB()
@ -2014,6 +2016,6 @@ class ThumbnailMediaSingleton( Thumbnail, ClientGUIMixins.MediaSingleton ):
ClientGUIMixins.MediaSingleton.ProcessContentUpdate( self, content_update )
if content_update.GetAction() == CC.CONTENT_UPDATE_ADD and content_update.GetServiceIdentifier() == CC.LOCAL_FILE_SERVICE_IDENTIFIER: self.ReloadFromDB()
if content_update.GetAction() == HC.CONTENT_UPDATE_ADD and content_update.GetServiceIdentifier() == HC.LOCAL_FILE_SERVICE_IDENTIFIER: self.ReloadFromDB()

View File

@ -225,7 +225,7 @@ class MediaList():
for media in self._GetMedia( hashes, 'collections' ): media.ProcessContentUpdate( content_update )
if action == CC.CONTENT_UPDATE_ARCHIVE:
if action == HC.CONTENT_UPDATE_ARCHIVE:
if HC.SYSTEM_PREDICATE_INBOX in self._predicates:
@ -235,7 +235,7 @@ class MediaList():
self._RemoveMedia( affected_singleton_media, affected_collected_media )
elif action == CC.CONTENT_UPDATE_INBOX:
elif action == HC.CONTENT_UPDATE_INBOX:
if HC.SYSTEM_PREDICATE_ARCHIVE in self._predicates:
@ -245,7 +245,7 @@ class MediaList():
self._RemoveMedia( affected_singleton_media, affected_collected_media )
elif action == CC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier:
elif action == HC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier:
affected_singleton_media = self._GetMedia( hashes, 'singletons' )
affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ]
@ -265,8 +265,8 @@ class MediaList():
service_identifier = update.GetServiceIdentifier()
if action == CC.SERVICE_UPDATE_DELETE_PENDING: self.DeletePending( service_identifier )
elif action == CC.SERVICE_UPDATE_RESET: self.ResetService( service_identifier )
if action == HC.SERVICE_UPDATE_DELETE_PENDING: self.DeletePending( service_identifier )
elif action == HC.SERVICE_UPDATE_RESET: self.ResetService( service_identifier )
def ResetService( self, service_identifier ):

View File

@ -137,7 +137,7 @@ class PageMessages( PageBase, wx.SplitterWindow ):
self._search_preview_split.Bind( wx.EVT_SPLITTER_DCLICK, self.EventPreviewUnsplit )
self._InitManagementPanel()
self._preview_panel = ClientGUICanvas.CanvasPanel( self._search_preview_split, self._page_key, CC.LOCAL_FILE_SERVICE_IDENTIFIER )
self._preview_panel = ClientGUICanvas.CanvasPanel( self._search_preview_split, self._page_key, HC.LOCAL_FILE_SERVICE_IDENTIFIER )
self._InitMessagesPanel()
self.SplitVertically( self._search_preview_split, self._messages_panel, self._options[ 'hpos' ] )
@ -171,7 +171,7 @@ class PageMessages( PageBase, wx.SplitterWindow ):
class PageWithMedia( PageBase, wx.SplitterWindow ):
def __init__( self, parent, file_service_identifier = CC.LOCAL_FILE_SERVICE_IDENTIFIER ):
def __init__( self, parent, file_service_identifier = HC.LOCAL_FILE_SERVICE_IDENTIFIER ):
wx.SplitterWindow.__init__( self, parent )
PageBase.__init__( self )
@ -382,10 +382,10 @@ class PageThreadDumper( PageWithMedia ):
self._media_results = filter( self._imageboard.IsOkToPost, self._media_results )
PageWithMedia.__init__( self, parent, CC.LOCAL_FILE_SERVICE_IDENTIFIER )
PageWithMedia.__init__( self, parent, HC.LOCAL_FILE_SERVICE_IDENTIFIER )
def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelDumper( self._search_preview_split, self, self._page_key, self._imageboard, self._media_results )
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], self._media_results )
def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, HC.LOCAL_FILE_SERVICE_IDENTIFIER, [], self._media_results )

View File

@ -40,91 +40,6 @@ def Parse4chanPostScreen( html ):
except: return ( 'error', 'unknown error' )
def ParseBooruGallery( html, url_base, thumb_classname ):
urls_set = set()
urls = []
soup = bs4.BeautifulSoup( html )
thumbnails = soup.find_all( class_ = thumb_classname )
for thumbnail in thumbnails:
links = thumbnail.find_all( 'a' )
if thumbnail.name == 'a': links.append( thumbnail )
for link in links:
if link.string is not None and link.string == 'Image Only': continue # rule 34 @ paheal fix
url = link[ 'href' ]
url = urlparse.urljoin( url_base, url )
if url not in urls_set:
urls_set.add( url )
urls.append( url )
return urls
def ParseBooruPage( html, url_base, tag_classnames_to_namespaces, image_id = None, image_data = None ):
soup = bs4.BeautifulSoup( html )
image_base = None
if image_id is not None:
image = soup.find( id = image_id )
image_url = image[ 'src' ]
if image_data is not None:
links = soup.find_all( 'a' )
for link in links:
if link.string == image_data: image_url = link[ 'href' ]
image_url = urlparse.urljoin( url_base, image_url )
image_url = image_url.replace( 'sample/sample-', '' ) # fix for danbooru resizing
tags = []
for ( tag_classname, namespace ) in tag_classnames_to_namespaces.items():
tag_list_entries = soup.find_all( class_ = tag_classname )
for tag_list_entry in tag_list_entries:
links = tag_list_entry.find_all( 'a' )
if tag_list_entry.name == 'a': links.append( tag_list_entry )
for link in links:
if link.string not in ( '?', '-', '+' ):
if namespace == '': tags.append( link.string )
else: tags.append( namespace + ':' + link.string )
return ( image_url, tags )
def ParseDeviantArtGallery( html ):
results = []

View File

@ -1,4 +1,5 @@
import collections
import httplib
import HydrusPubSub
import locale
import os
@ -9,6 +10,7 @@ import sys
import threading
import time
import traceback
import urlparse
import wx
import yaml
@ -28,7 +30,7 @@ TEMP_DIR = BASE_DIR + os.path.sep + 'temp'
# Misc
NETWORK_VERSION = 9
SOFTWARE_VERSION = 64
SOFTWARE_VERSION = 65
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@ -43,6 +45,18 @@ is_first_start = False
# Enums
CONTENT_UPDATE_ADD = 0
CONTENT_UPDATE_DELETE = 1
CONTENT_UPDATE_PENDING = 2
CONTENT_UPDATE_RESCIND_PENDING = 3
CONTENT_UPDATE_PETITION = 4
CONTENT_UPDATE_RESCIND_PETITION = 5
CONTENT_UPDATE_EDIT_LOG = 6
CONTENT_UPDATE_ARCHIVE = 7
CONTENT_UPDATE_INBOX = 8
CONTENT_UPDATE_RATING = 9
CONTENT_UPDATE_RATINGS_FILTER = 10
GET_DATA = 0
POST_DATA = 1
POST_PETITIONS = 2
@ -127,6 +141,14 @@ SERVICE_INFO_NUM_CONVERSATIONS = 12
SERVICE_INFO_NUM_UNREAD = 13
SERVICE_INFO_NUM_DRAFTS = 14
SERVICE_UPDATE_ACCOUNT = 0
SERVICE_UPDATE_DELETE_PENDING = 1
SERVICE_UPDATE_ERROR = 2
SERVICE_UPDATE_NEXT_BEGIN = 3
SERVICE_UPDATE_RESET = 4
SERVICE_UPDATE_REQUEST_MADE = 5
SERVICE_UPDATE_LAST_CHECK = 6
ADD = 0
DELETE = 1
EDIT = 2
@ -235,6 +257,13 @@ PREDICATE_TYPE_SYSTEM = 0
PREDICATE_TYPE_TAG = 1
PREDICATE_TYPE_NAMESPACE = 2
SUBSCRIPTION_TYPE_DEVIANT_ART = 0
SUBSCRIPTION_TYPE_GIPHY = 1
SUBSCRIPTION_TYPE_PIXIV = 2
SUBSCRIPTION_TYPE_BOORU = 3
SUBSCRIPTION_TYPE_TUMBLR = 4
SUBSCRIPTION_TYPE_HENTAI_FOUNDRY = 5
SYSTEM_PREDICATE_TYPE_EVERYTHING = 0
SYSTEM_PREDICATE_TYPE_INBOX = 1
SYSTEM_PREDICATE_TYPE_ARCHIVE = 2
@ -968,6 +997,250 @@ def ThumbnailResolution( original_resolution, target_resolution ):
return ( int( round( original_width ) ), int( round( original_height ) ) )
class AdvancedHTTPConnection():
def __init__( self, url = '', scheme = 'http', host = '', port = None, service_identifier = None, accept_cookies = False ):
if len( url ) > 0:
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
self._scheme = scheme
self._host = host
self._port = port
self._service_identifier = service_identifier
self._accept_cookies = accept_cookies
self._cookies = {}
if service_identifier is None: timeout = 30
else: timeout = 300
if self._scheme == 'http': self._connection = httplib.HTTPConnection( self._host, self._port, timeout = timeout )
else: self._connection = httplib.HTTPSConnection( self._host, self._port, timeout = timeout )
def close( self ): self._connection.close()
def connect( self ): self._connection.connect()
def GetCookies( self ): return self._cookies
def geturl( self, url, headers = {}, is_redirect = False, follow_redirects = True ):
parse_result = urlparse.urlparse( url )
request = parse_result.path
query = parse_result.query
if query != '': request += '?' + query
return self.request( 'GET', request, headers = headers, is_redirect = is_redirect, follow_redirects = follow_redirects )
def request( self, request_type, request, headers = {}, body = None, is_redirect = False, follow_redirects = True ):
if 'User-Agent' not in headers: headers[ 'User-Agent' ] = 'hydrus/' + str( NETWORK_VERSION )
if len( self._cookies ) > 0: headers[ 'Cookie' ] = '; '.join( [ k + '=' + v for ( k, v ) in self._cookies.items() ] )
try:
self._connection.request( request_type, request, headers = headers, body = body )
response = self._connection.getresponse()
raw_response = response.read()
except ( httplib.CannotSendRequest, httplib.BadStatusLine ):
# for some reason, we can't send a request on the current connection, so let's make a new one!
try:
if self._scheme == 'http': self._connection = httplib.HTTPConnection( self._host, self._port )
else: self._connection = httplib.HTTPSConnection( self._host, self._port )
self._connection.request( request_type, request, headers = headers, body = body )
response = self._connection.getresponse()
raw_response = response.read()
except:
print( traceback.format_exc() )
raise
except:
print( traceback.format_exc() )
raise Exception( 'Could not connect to server' )
if self._accept_cookies:
for cookie in response.msg.getallmatchingheaders( 'Set-Cookie' ): # msg is a mimetools.Message
try:
cookie = cookie.replace( 'Set-Cookie: ', '' )
if ';' in cookie: ( cookie, expiry_gumpf ) = cookie.split( ';', 1 )
( k, v ) = cookie.split( '=' )
self._cookies[ k ] = v
except: pass
if len( raw_response ) > 0:
content_type = response.getheader( 'Content-Type' )
if content_type is not None:
# additional info can be a filename or charset=utf-8 or whatever
if content_type == 'text/html':
mime_string = content_type
try: raw_response = raw_response.decode( 'utf-8' )
except: pass
elif '; ' in content_type:
( mime_string, additional_info ) = content_type.split( '; ' )
if 'charset=' in additional_info:
# this does utf-8, ISO-8859-4, whatever
( gumpf, charset ) = additional_info.split( '=' )
try: raw_response = raw_response.decode( charset )
except: pass
else: mime_string = content_type
if mime_string in mime_enum_lookup and mime_enum_lookup[ mime_string ] == APPLICATION_YAML:
try: parsed_response = yaml.safe_load( raw_response )
except Exception as e: raise NetworkVersionException( 'Failed to parse a response object!' + os.linesep + unicode( e ) )
else: parsed_response = raw_response
else: parsed_response = raw_response
else: parsed_response = raw_response
if self._service_identifier is not None:
service_type = self._service_identifier.GetType()
server_header = response.getheader( 'Server' )
service_string = service_string_lookup[ service_type ]
if server_header is None or service_string not in server_header:
pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, GetUnknownAccount() ) )
raise WrongServiceTypeException( 'Target was not a ' + service_string + '!' )
if '?' in request: request_command = request.split( '?' )[0]
else: request_command = request
if '/' in request_command: request_command = request_command.split( '/' )[1]
if request_type == 'GET':
if ( service_type, GET, request_command ) in BANDWIDTH_CONSUMING_REQUESTS: pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_REQUEST_MADE, self._service_identifier, len( raw_response ) ) )
elif ( service_type, POST, request_command ) in BANDWIDTH_CONSUMING_REQUESTS: pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_REQUEST_MADE, self._service_identifier, len( body ) ) )
if response.status == 200: return parsed_response
elif response.status == 205: return
elif response.status in ( 301, 302, 303, 307 ):
location = response.getheader( 'Location' )
if location is None: raise Exception( data )
else:
if not follow_redirects: return ''
if is_redirect: raise Exception( 'Too many redirects!' )
url = location
parse_result = urlparse.urlparse( url )
redirected_request = parse_result.path
redirected_query = parse_result.query
if redirected_query != '': redirected_request += '?' + redirected_query
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme is None or scheme == self._scheme ) and ( request == redirected_request or request in redirected_request or redirected_request in request ): raise Exception( 'Redirection problem' )
else:
if host is None or ( host == self._host and port == self._port ): connection = self
else: connection = AdvancedHTTPConnection( url )
if response.status in ( 301, 307 ):
# 301: moved permanently, repeat request
# 307: moved temporarily, repeat request
return connection.request( request_type, redirected_request, headers = headers, body = body, is_redirect = True )
elif response.status in ( 302, 303 ):
# 302: moved temporarily, repeat request (except everyone treats it like 303 for no good fucking reason)
# 303: thanks, now go here with GET
return connection.request( 'GET', redirected_request, is_redirect = True )
elif response.status == 304: raise NotModifiedException()
else:
if self._service_identifier is not None:
pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ERROR, self._service_identifier, parsed_response ) )
if response.status in ( 401, 426 ): pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, GetUnknownAccount() ) )
if response.status == 401: raise PermissionsException( parsed_response )
elif response.status == 403: raise ForbiddenException( parsed_response )
elif response.status == 404: raise NotFoundException( parsed_response )
elif response.status == 426: raise NetworkVersionException( parsed_response )
elif response.status in ( 500, 501, 502, 503 ):
try: print( parsed_response )
except: pass
raise Exception( parsed_response )
else: raise Exception( parsed_response )
def SetCookie( self, key, value ): self._cookies[ key ] = value
class HydrusYAMLBase( yaml.YAMLObject ):
yaml_loader = yaml.SafeLoader
@ -1304,6 +1577,10 @@ class ClientServiceIdentifier( HydrusYAMLBase ):
def GetType( self ): return self._type
LOCAL_FILE_SERVICE_IDENTIFIER = ClientServiceIdentifier( 'local files', LOCAL_FILE, 'local files' )
LOCAL_TAG_SERVICE_IDENTIFIER = ClientServiceIdentifier( 'local tags', LOCAL_TAG, 'local tags' )
NULL_SERVICE_IDENTIFIER = ClientServiceIdentifier( '', NULL_SERVICE, 'no service' )
class ContentUpdate():
def __init__( self, action, service_identifier, hashes, info = None ):
@ -1380,11 +1657,12 @@ class DAEMONQueue( DAEMON ):
class DAEMONWorker( DAEMON ):
def __init__( self, name, callable, topics = [], period = 1200 ):
def __init__( self, name, callable, topics = [], period = 1200, init_wait = 3 ):
DAEMON.__init__( self, name, callable, period )
self._topics = topics
self._init_wait = init_wait
self.start()
@ -1393,7 +1671,7 @@ class DAEMONWorker( DAEMON ):
def run( self ):
time.sleep( 3 )
self._event.wait( self._init_wait )
while True:
@ -1886,6 +2164,21 @@ class ServerServiceIdentifier( HydrusYAMLBase ):
def GetType( self ): return self._type
class ServiceUpdate():
def __init__( self, action, service_identifier, info = None ):
self._action = action # make this an enumerated thing, yo
self._service_identifier = service_identifier
self._info = info
def GetAction( self ): return self._action
def GetInfo( self ): return self._info
def GetServiceIdentifier( self ): return self._service_identifier
# sqlite mod
sqlite3.register_adapter( dict, yaml.safe_dump )

View File

@ -0,0 +1,681 @@
import bs4
import HydrusConstants as HC
import json
import lxml
import traceback
import urlparse
import wx
def ConvertServiceIdentifiersToTagsToContentUpdates( hash, service_identifiers_to_tags ):
content_updates = []
for ( service_identifier, tags ) in service_identifiers_to_tags.items():
if len( tags ) > 0:
if service_identifier == HC.LOCAL_TAG_SERVICE_IDENTIFIER: action = HC.CONTENT_UPDATE_ADD
else: action = HC.CONTENT_UPDATE_PENDING
edit_log = [ ( action, tag ) for tag in tags ]
content_updates.append( HC.ContentUpdate( HC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), info = edit_log ) )
return content_updates
def GetDownloader( subscription_type, *args ):
if subscription_type == HC.SUBSCRIPTION_TYPE_BOORU: c = DownloaderBooru
elif subscription_type == HC.SUBSCRIPTION_TYPE_DEVIANT_ART: c = DownloaderDeviantArt
elif subscription_type == HC.SUBSCRIPTION_TYPE_GIPHY: c = DownloaderGiphy
elif subscription_type == HC.SUBSCRIPTION_TYPE_HENTAI_FOUNDRY: c = DownloaderHentaiFoundry
elif subscription_type == HC.SUBSCRIPTION_TYPE_PIXIV: c = DownloaderPixiv
elif subscription_type == HC.SUBSCRIPTION_TYPE_TUMBLR: c = DownloaderTumblr
return c( *args )
def ConvertTagsToServiceIdentifiersToTags( tags, advanced_tag_options ):
tags = [ tag for tag in tags if tag is not None ]
service_identifiers_to_tags = {}
for ( service_identifier, namespaces ) in advanced_tag_options.items():
if len( namespaces ) > 0:
tags_to_add_here = []
for namespace in namespaces:
if namespace == '': tags_to_add_here.extend( [ HC.CleanTag( tag ) for tag in tags if not ':' in tag ] )
else: tags_to_add_here.extend( [ HC.CleanTag( tag ) for tag in tags if tag.startswith( namespace + ':' ) ] )
if len( tags_to_add_here ) > 0: service_identifiers_to_tags[ service_identifier ] = tags_to_add_here
return service_identifiers_to_tags
class Downloader():
def __init__( self ):
self._connections = {}
self._num_pages_done = 0
example_url = self._GetNextGalleryPageURL()
def _EstablishSession( self, connection ): pass
def _GetConnection( self, url ):
parse_result = urlparse.urlparse( url )
( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port )
if ( scheme, host, port ) not in self._connections:
connection = HC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port )
self._EstablishSession( connection )
self._connections[ ( scheme, host, port ) ] = connection
return self._connections[ ( scheme, host, port ) ]
def GetAnotherPage( self ):
url = self._GetNextGalleryPageURL()
connection = self._GetConnection( url )
data = connection.geturl( url )
url_info = self._ParseGalleryPage( data, url )
self._num_pages_done += 1
return url_info
def GetFile( self, url, *args ):
connection = self._GetConnection( url )
return connection.geturl( url )
def GetFileAndTags( self, url, *args ):
file = self.GetFile( url, *args )
tags = self.GetTags( url, *args )
return ( file, tags )
def GetTags( self, url ): pass
class DownloaderBooru( Downloader ):
def __init__( self, booru, query ):
self._booru = booru
self._tags = query
( self._search_url, self._gallery_advance_num, self._search_separator, self._thumb_classname ) = booru.GetGalleryParsingInfo()
Downloader.__init__( self )
def _GetNextGalleryPageURL( self ):
if self._gallery_advance_num == 1: num_page_base = 1
else: num_page_base = 0
return self._search_url.replace( '%tags%', self._search_separator.join( self._tags ) ).replace( '%index%', str( num_page_base + ( self._num_pages_done * self._gallery_advance_num ) ) )
def _ParseGalleryPage( self, html, url_base ):
urls_set = set()
urls = []
soup = bs4.BeautifulSoup( html )
thumbnails = soup.find_all( class_ = self._thumb_classname )
for thumbnail in thumbnails:
links = thumbnail.find_all( 'a' )
if thumbnail.name == 'a': links.append( thumbnail )
for link in links:
if link.string is not None and link.string == 'Image Only': continue # rule 34 @ paheal fix
url = link[ 'href' ]
url = urlparse.urljoin( url_base, url )
if url not in urls_set:
urls_set.add( url )
urls.append( url )
return urls
def _ParseImagePage( self, html, url_base ):
( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = self._booru.GetData()
soup = bs4.BeautifulSoup( html )
image_base = None
if image_id is not None:
image = soup.find( id = image_id )
image_url = image[ 'src' ]
if image_data is not None:
links = soup.find_all( 'a' )
for link in links:
if link.string == image_data: image_url = link[ 'href' ]
image_url = urlparse.urljoin( url_base, image_url )
image_url = image_url.replace( 'sample/sample-', '' ) # fix for danbooru resizing
tags = []
for ( tag_classname, namespace ) in tag_classnames_to_namespaces.items():
tag_list_entries = soup.find_all( class_ = tag_classname )
for tag_list_entry in tag_list_entries:
links = tag_list_entry.find_all( 'a' )
if tag_list_entry.name == 'a': links.append( tag_list_entry )
for link in links:
if link.string not in ( '?', '-', '+' ):
if namespace == '': tags.append( link.string )
else: tags.append( namespace + ':' + link.string )
return ( image_url, tags )
def _GetFileURLAndTags( self, url ):
connection = self._GetConnection( url )
html = connection.geturl( url )
return self._ParseImagePage( html, url )
def GetFile( self, url ):
( file_url, tags ) = self._GetFileURLAndTags( url )
connection = self._GetConnection( file_url )
return connection.geturl( file_url )
def GetFileAndTags( self, url ):
( file_url, tags ) = self._GetFileURLAndTags( url )
connection = self._GetConnection( file_url )
file = connection.geturl( file_url )
return ( file, tags )
def GetTags( self, url ):
( file_url, tags ) = self._GetFileURLAndTags( url )
return tags
class DownloaderDeviantArt( Downloader ):
def __init__( self, query ):
artist = query
self._gallery_url = 'http://' + artist + '.deviantart.com/gallery/?catpath=/&offset='
Downloader.__init__( self )
def _GetNextGalleryPageURL( self ): return self._gallery_url + str( self._num_pages_done * 24 )
def _ParseGalleryPage( self, html, url_base ):
results = []
soup = bs4.BeautifulSoup( html )
thumbs_container = soup.find( class_ = 'stream stream-fh' )
def starts_with_thumb( classname ): return classname is not None and classname.startswith( 'thumb' )
links = thumbs_container.find_all( 'a', class_ = starts_with_thumb )
for link in links:
page_url = link[ 'href' ] # something in the form of blah.da.com/art/blah-123456
page_url_split = page_url.split( '-' )
deviant_art_file_id = page_url_split[-1 ]
image_url = 'http://www.deviantart.com/download/' + deviant_art_file_id + '/' # trailing slash is important
raw_title = link[ 'title' ] # something in the form sweet dolls by ~AngeniaC, Feb 29, 2012 in Artisan Crafts &gt; Miniatures &gt; Jewelry
tags = []
( title, raw_title ) = raw_title.split( ' by ~', 1 )
( creator, raw_title ) = raw_title.split( ', ', 1 )
( date_gumpf, raw_category_tags ) = raw_title.split( ' in ', 1 )
category_tags = raw_category_tags.split( ' > ' )
tags = []
tags.append( 'title:' + title )
tags.append( 'creator:' + creator )
tags.extend( category_tags )
results.append( ( image_url, tags ) )
return results
def GetTags( self, url, tags ): return tags
class DownloaderGiphy( Downloader ):
def __init__( self, query ):
tag = query
self._gallery_url = 'http://giphy.com/api/gifs?tag=' + tag.replace( ' ', '+' ) + '&page='
Downloader.__init__( self )
def _GetNextGalleryPageURL( self ): return self._gallery_url + str( self._num_pages_done )
def _ParseGalleryPage( self, data, url_base ):
json_dict = json.loads( data )
if 'data' in json_dict:
json_data = json_dict[ 'data' ]
return [ ( d[ 'image_original_url' ], d[ 'id' ] ) for d in json_data ]
else: return []
def GetTags( self, url, id ):
url = 'http://giphy.com/api/gifs/' + str( id )
connection = self._GetConnection( url )
try:
raw_json = connection.geturl( url )
json_dict = json.loads( raw_json )
tags_data = json_dict[ 'data' ][ 'tags' ]
tags = [ tag_data[ 'name' ] for tag_data in tags_data ]
except:
print( traceback.format_exc() )
tags = []
return tags
class DownloaderHentaiFoundry( Downloader ):
def __init__( self, query_type, query ):
self._query_type = query
self._query = query
Downloader.__init__( self )
def _EstablishSession( self, connection ):
cookies = wx.GetApp().GetWebCookies( 'hentai foundry' )
for ( key, value ) in cookies.items(): connection.SetCookie( key, value )
def _GetNextGalleryPageURL( self ):
if self._query_type == 'artist':
pass
# this is slightly more difficult since it needs to manage scraps interlace
elif self._query_type == 'tags':
tags = self._query
return 'http://www.hentai-foundry.com/search/pictures?query=' + '+'.join( tags ) + '&search_in=all&scraps=-1&page=' + str( self._num_pages_done )
def _ParseGalleryPage( self, html, url_base ):
urls_set = set()
soup = bs4.BeautifulSoup( html )
def correct_url( href ):
# a good url is in the form "/pictures/user/artist_name/file_id/title"
if href.count( '/' ) == 5 and href.startswith( '/pictures/user/' ):
( nothing, pictures, user, artist_name, file_id, title ) = href.split( '/' )
# /pictures/user/artist_name/page/3
if file_id != 'page': return True
return False
links = soup.find_all( 'a', href = correct_url )
urls = [ 'http://www.hentai-foundry.com' + link['href'] for link in links ]
result_urls = []
for url in urls:
if url not in urls_set:
urls_set.add( url )
result_urls.append( url )
return result_urls
def _ParseImagePage( self, html ):
# can't parse this easily normally because HF is a pain with the preview->click to see full size business.
# find http://pictures.hentai-foundry.com//
# then extend it to http://pictures.hentai-foundry.com//k/KABOS/172144.jpg
# the .jpg bit is what we really need, but whatever
try:
index = html.index( 'http://pictures.hentai-foundry.com//' )
stuff = html[ index : index + 100 ]
try: ( image_url, gumpf ) = stuff.split( '"', 1 )
except: ( image_url, gumpf ) = stuff.split( '&#039;', 1 )
except: raise Exception( 'Could not parse image url!' )
soup = bs4.BeautifulSoup( html )
tags = []
try:
title = soup.find( 'title' )
( data, nothing ) = unicode( title.string ).split( ' - Hentai Foundry' )
data_reversed = data[::-1] # want to do it right-side first, because title might have ' by ' in it
( artist_reversed, title_reversed ) = data_reversed.split( ' yb ' )
artist = artist_reversed[::-1]
title = title_reversed[::-1]
tags.append( 'creator:' + artist )
tags.append( 'title:' + title )
except: pass
tag_links = soup.find_all( 'a', rel = 'tag' )
for tag_link in tag_links: tags.append( tag_link.string )
return ( image_url, tags )
def GetFile( self, url ):
( file_url, tags ) = self._GetFileURLAndTags( url )
connection = self._GetConnection( file_url )
return connection.geturl( file_url )
def GetFileAndTags( self, url ):
( file_url, tags ) = self._GetFileURLAndTags( url )
connection = self._GetConnection( file_url )
file = connection.geturl( file_url )
return ( file, tags )
def GetTags( self, url ):
( file_url, tags ) = self._GetFileURLAndTags( url )
return tags
class DownloaderPixiv( Downloader ):
def __init__( self, query_type, query ):
self._query_type = query_type
self._query = query
Downloader.__init__( self )
def _EstablishSession( self, connection ):
cookies = wx.GetApp().GetWebCookies( 'pixiv' )
for ( key, value ) in cookies.items(): connection.SetCookie( key, value )
def _GetNextGalleryPageURL( self ):
if self._query_type == 'artist': pass
elif self._query_type == 'tags': pass
def _ParseGalleryPage( self, html, url_base ):
results = []
soup = bs4.BeautifulSoup( html )
thumbnail_links = soup.find_all( class_ = 'work' )
for thumbnail_link in thumbnail_links:
url = urlparse.urljoin( url_base, thumbnail_link[ 'href' ] ) # http://www.pixiv.net/member_illust.php?mode=medium&illust_id=33500690
image_url_reference_url = url.replace( 'medium', 'big' ) # http://www.pixiv.net/member_illust.php?mode=big&illust_id=33500690
thumbnail_img = thumbnail_link.find( class_ = '_thumbnail' )
thumbnail_image_url = thumbnail_img[ 'src' ] # http://i2.pixiv.net/img02/img/dnosuke/462657_s.jpg
image_url = thumbnail_image_url.replace( '_s', '' ) # http://i2.pixiv.net/img02/img/dnosuke/462657.jpg
results.append( ( url, image_url_reference_url, image_url ) )
return results
def _ParseImagePage( self, html, image_url ):
soup = bs4.BeautifulSoup( html )
tags = soup.find( 'ul', class_ = 'tags' )
tags = [ a_item.string for a_item in tags.find_all( 'a', class_ = 'text' ) ]
user = soup.find( 'h1', class_ = 'user' )
tags.append( 'creator:' + user.string )
title_parent = soup.find( 'section', class_ = 'work-info' )
title = title_parent.find( 'h1', class_ = 'title' )
tags.append( 'title:' + title.string )
try: tags.append( 'creator:' + image_url.split( '/' )[ -2 ] ) # http://i2.pixiv.net/img02/img/dnosuke/462657.jpg -> dnosuke
except: pass
return tags
def GetFile( self, url, image_url_reference_url, image_url ):
connection = self._GetConnection( image_url )
headers = { 'Referer' : image_url_reference_url }
return connection.geturl( image_url, headers = headers )
def GetFileAndTags( self, url, image_url_reference_url, image_url ):
file = self.GetFile( url, image_url_reference_url, image_url )
tags = self.GetTags( url, image_url_reference_url, image_url )
def GetTags( self, url, image_url_reference_url, image_url ):
connection = self._GetConnection( url )
html = self._page_connection.geturl( url )
return self._ParseImagePage( html, image_url )
class DownloaderTumblr( Downloader ):
def __init__( self, query ):
username = query
self._gallery_url = 'http://' + username + '.tumblr.com/api/read/json?start=%start%&num=50'
Downloader.__init__( self )
def _GetNextGalleryPageURL( self ): return search_url.replace( '%start%', str( self._num_pages_done * 50 ) )
def _ParseGalleryPage( self, data, url_base ):
processed_raw_json = data.split( 'var tumblr_api_read = ' )[1][:-2] # -2 takes a couple newline chars off at the end
json_object = json.loads( processed_raw_json )
results = []
if 'posts' in json_object:
for post in json_object[ 'posts' ]:
if 'tags' in post: tags = post[ 'tags' ]
else: tags = []
post_type = post[ 'type' ]
if post_type == 'photo':
if len( post[ 'photos' ] ) == 0:
try: results.append( ( post[ 'photo-url-1280' ], tags ) )
except: pass
else:
for photo in post[ 'photos' ]:
try: results.append( ( photo[ 'photo-url-1280' ], tags ) )
except: pass
return results
def GetTags( self, url, tags ): return tags

File diff suppressed because it is too large Load Diff