2017-01-04 22:48:23 +00:00
|
|
|
import ClientConstants as CC
|
|
|
|
import ClientData
|
|
|
|
import ClientSearch
|
|
|
|
import HydrusConstants as HC
|
|
|
|
import HydrusData
|
2017-05-10 21:33:58 +00:00
|
|
|
import HydrusGlobals as HG
|
2017-01-04 22:48:23 +00:00
|
|
|
import HydrusPaths
|
|
|
|
import HydrusSerialisable
|
2017-02-08 22:27:00 +00:00
|
|
|
import HydrusTags
|
2017-11-15 22:35:49 +00:00
|
|
|
import HydrusThreading
|
2017-01-04 22:48:23 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import stat
|
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
MAX_PATH_LENGTH = 245 # bit of padding from 255 for .txt neigbouring and other surprises
|
|
|
|
|
|
|
|
def GenerateExportFilename( destination_directory, media, terms ):
|
2017-01-04 22:48:23 +00:00
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
if len( destination_directory ) > ( MAX_PATH_LENGTH - 10 ):
|
|
|
|
|
|
|
|
raise Exception( 'The destination directory is too long!' )
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
filename = ''
|
|
|
|
|
|
|
|
for ( term_type, term ) in terms:
|
|
|
|
|
|
|
|
tags_manager = media.GetTagsManager()
|
|
|
|
|
|
|
|
if term_type == 'string':
|
|
|
|
|
|
|
|
filename += term
|
|
|
|
|
|
|
|
elif term_type == 'namespace':
|
|
|
|
|
|
|
|
tags = tags_manager.GetNamespaceSlice( ( term, ) )
|
|
|
|
|
2017-02-08 22:27:00 +00:00
|
|
|
subtags = [ HydrusTags.SplitTag( tag )[1] for tag in tags ]
|
|
|
|
|
|
|
|
subtags.sort()
|
|
|
|
|
|
|
|
filename += ', '.join( subtags )
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
elif term_type == 'predicate':
|
|
|
|
|
|
|
|
if term in ( 'tags', 'nn tags' ):
|
|
|
|
|
|
|
|
current = tags_manager.GetCurrent()
|
|
|
|
pending = tags_manager.GetPending()
|
|
|
|
|
|
|
|
tags = list( current.union( pending ) )
|
|
|
|
|
2017-02-08 22:27:00 +00:00
|
|
|
if term == 'nn tags':
|
|
|
|
|
|
|
|
tags = [ tag for tag in tags if ':' not in tag ]
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
tags = [ HydrusTags.SplitTag( tag )[1] for tag in tags ]
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
tags.sort()
|
|
|
|
|
|
|
|
filename += ', '.join( tags )
|
|
|
|
|
|
|
|
elif term == 'hash':
|
|
|
|
|
|
|
|
hash = media.GetHash()
|
|
|
|
|
|
|
|
filename += hash.encode( 'hex' )
|
|
|
|
|
|
|
|
|
|
|
|
elif term_type == 'tag':
|
|
|
|
|
2017-09-20 19:47:31 +00:00
|
|
|
tag = term
|
|
|
|
|
2017-02-08 22:27:00 +00:00
|
|
|
( namespace, subtag ) = HydrusTags.SplitTag( tag )
|
2017-01-04 22:48:23 +00:00
|
|
|
|
2017-02-08 22:27:00 +00:00
|
|
|
if tags_manager.HasTag( subtag ):
|
|
|
|
|
|
|
|
filename += subtag
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if HC.PLATFORM_WINDOWS:
|
|
|
|
|
|
|
|
filename = re.sub( '\\\\|/|:|\\*|\\?|"|<|>|\\|', '_', filename, flags = re.UNICODE )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
filename = re.sub( '/', '_', filename, flags = re.UNICODE )
|
|
|
|
|
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
#
|
|
|
|
|
|
|
|
mime = media.GetMime()
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
ext = HC.mime_ext_lookup[ mime ]
|
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
if filename.endswith( ext ):
|
2017-01-04 22:48:23 +00:00
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
filename = filename[ : - len( ext ) ]
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
example_dest_path = os.path.join( destination_directory, filename + ext )
|
|
|
|
|
|
|
|
excess_chars = len( example_dest_path ) - MAX_PATH_LENGTH
|
|
|
|
|
|
|
|
if excess_chars > 0:
|
|
|
|
|
|
|
|
filename = filename[ : - excess_chars ]
|
|
|
|
|
|
|
|
|
|
|
|
filename = filename + ext
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
return filename
|
|
|
|
|
|
|
|
def GetExportPath():
|
|
|
|
|
2017-05-10 21:33:58 +00:00
|
|
|
options = HG.client_controller.GetOptions()
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
portable_path = options[ 'export_path' ]
|
|
|
|
|
|
|
|
if portable_path is None:
|
|
|
|
|
|
|
|
path = os.path.join( os.path.expanduser( '~' ), 'hydrus_export' )
|
|
|
|
|
|
|
|
HydrusPaths.MakeSureDirectoryExists( path )
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
path = HydrusPaths.ConvertPortablePathToAbsPath( portable_path )
|
|
|
|
|
|
|
|
|
|
|
|
return path
|
|
|
|
|
|
|
|
def ParseExportPhrase( phrase ):
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
terms = [ ( 'string', phrase ) ]
|
|
|
|
|
|
|
|
new_terms = []
|
|
|
|
|
|
|
|
for ( term_type, term ) in terms:
|
|
|
|
|
|
|
|
if term_type == 'string':
|
|
|
|
|
|
|
|
while '[' in term:
|
|
|
|
|
|
|
|
( pre, term ) = term.split( '[', 1 )
|
|
|
|
|
|
|
|
( namespace, term ) = term.split( ']', 1 )
|
|
|
|
|
|
|
|
new_terms.append( ( 'string', pre ) )
|
|
|
|
new_terms.append( ( 'namespace', namespace ) )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
new_terms.append( ( term_type, term ) )
|
|
|
|
|
|
|
|
|
|
|
|
terms = new_terms
|
|
|
|
|
|
|
|
new_terms = []
|
|
|
|
|
|
|
|
for ( term_type, term ) in terms:
|
|
|
|
|
|
|
|
if term_type == 'string':
|
|
|
|
|
|
|
|
while '{' in term:
|
|
|
|
|
|
|
|
( pre, term ) = term.split( '{', 1 )
|
|
|
|
|
|
|
|
( predicate, term ) = term.split( '}', 1 )
|
|
|
|
|
|
|
|
new_terms.append( ( 'string', pre ) )
|
|
|
|
new_terms.append( ( 'predicate', predicate ) )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
new_terms.append( ( term_type, term ) )
|
|
|
|
|
|
|
|
|
|
|
|
terms = new_terms
|
|
|
|
|
|
|
|
new_terms = []
|
|
|
|
|
|
|
|
for ( term_type, term ) in terms:
|
|
|
|
|
|
|
|
if term_type == 'string':
|
|
|
|
|
|
|
|
while '(' in term:
|
|
|
|
|
|
|
|
( pre, term ) = term.split( '(', 1 )
|
|
|
|
|
|
|
|
( tag, term ) = term.split( ')', 1 )
|
|
|
|
|
|
|
|
new_terms.append( ( 'string', pre ) )
|
|
|
|
new_terms.append( ( 'tag', tag ) )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
new_terms.append( ( term_type, term ) )
|
|
|
|
|
|
|
|
|
|
|
|
terms = new_terms
|
|
|
|
|
|
|
|
except: raise Exception( 'Could not parse that phrase!' )
|
|
|
|
|
|
|
|
return terms
|
|
|
|
|
|
|
|
|
|
|
|
class ExportFolder( HydrusSerialisable.SerialisableBaseNamed ):
|
|
|
|
|
|
|
|
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER
|
2017-11-29 21:48:23 +00:00
|
|
|
SERIALISABLE_NAME = 'Export Folder'
|
2017-01-04 22:48:23 +00:00
|
|
|
SERIALISABLE_VERSION = 2
|
|
|
|
|
2017-03-29 19:39:34 +00:00
|
|
|
def __init__( self, name, path = '', export_type = HC.EXPORT_FOLDER_TYPE_REGULAR, file_search_context = None, period = 3600, phrase = None ):
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
HydrusSerialisable.SerialisableBaseNamed.__init__( self, name )
|
|
|
|
|
|
|
|
if file_search_context is None:
|
|
|
|
|
|
|
|
file_search_context = ClientSearch.FileSearchContext( file_service_key = CC.LOCAL_FILE_SERVICE_KEY )
|
|
|
|
|
|
|
|
|
2017-03-29 19:39:34 +00:00
|
|
|
if phrase is None:
|
|
|
|
|
2017-05-10 21:33:58 +00:00
|
|
|
new_options = HG.client_controller.GetNewOptions()
|
2017-03-29 19:39:34 +00:00
|
|
|
|
|
|
|
phrase = new_options.GetString( 'export_phrase' )
|
|
|
|
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
self._path = path
|
|
|
|
self._export_type = export_type
|
|
|
|
self._file_search_context = file_search_context
|
|
|
|
self._period = period
|
|
|
|
self._phrase = phrase
|
|
|
|
self._last_checked = 0
|
|
|
|
|
|
|
|
|
|
|
|
def _GetSerialisableInfo( self ):
|
|
|
|
|
|
|
|
serialisable_file_search_context = self._file_search_context.GetSerialisableTuple()
|
|
|
|
|
|
|
|
return ( self._path, self._export_type, serialisable_file_search_context, self._period, self._phrase, self._last_checked )
|
|
|
|
|
|
|
|
|
|
|
|
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
|
|
|
|
|
|
|
|
( self._path, self._export_type, serialisable_file_search_context, self._period, self._phrase, self._last_checked ) = serialisable_info
|
|
|
|
|
|
|
|
self._file_search_context = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_file_search_context )
|
|
|
|
|
|
|
|
|
|
|
|
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
|
|
|
|
|
|
|
|
if version == 1:
|
|
|
|
|
|
|
|
( export_type, serialisable_file_search_context, period, phrase, last_checked ) = old_serialisable_info
|
|
|
|
|
|
|
|
path = self._name
|
|
|
|
|
|
|
|
new_serialisable_info = ( path, export_type, serialisable_file_search_context, period, phrase, last_checked )
|
|
|
|
|
|
|
|
return ( 2, new_serialisable_info )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def DoWork( self ):
|
|
|
|
|
|
|
|
if HydrusData.TimeHasPassed( self._last_checked + self._period ):
|
|
|
|
|
|
|
|
folder_path = HydrusData.ToUnicode( self._path )
|
|
|
|
|
|
|
|
if folder_path != '' and os.path.exists( folder_path ) and os.path.isdir( folder_path ):
|
|
|
|
|
2017-05-10 21:33:58 +00:00
|
|
|
query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
media_results = []
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
|
|
|
|
base = 256
|
|
|
|
|
|
|
|
while i < len( query_hash_ids ):
|
|
|
|
|
2017-11-15 22:35:49 +00:00
|
|
|
if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
if i == 0: ( last_i, i ) = ( 0, base )
|
|
|
|
else: ( last_i, i ) = ( i, i + base )
|
|
|
|
|
|
|
|
sub_query_hash_ids = query_hash_ids[ last_i : i ]
|
|
|
|
|
2017-05-10 21:33:58 +00:00
|
|
|
more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
media_results.extend( more_media_results )
|
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
terms = ParseExportPhrase( self._phrase )
|
|
|
|
|
|
|
|
previous_filenames = set( os.listdir( folder_path ) )
|
|
|
|
|
|
|
|
sync_filenames = set()
|
|
|
|
|
2017-06-28 20:23:21 +00:00
|
|
|
client_files_manager = HG.client_controller.client_files_manager
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
num_copied = 0
|
|
|
|
|
|
|
|
for media_result in media_results:
|
|
|
|
|
2017-11-15 22:35:49 +00:00
|
|
|
if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
|
2017-11-08 22:07:12 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2017-01-04 22:48:23 +00:00
|
|
|
hash = media_result.GetHash()
|
|
|
|
mime = media_result.GetMime()
|
|
|
|
size = media_result.GetSize()
|
|
|
|
|
|
|
|
source_path = client_files_manager.GetFilePath( hash, mime )
|
|
|
|
|
2017-08-23 21:34:25 +00:00
|
|
|
filename = GenerateExportFilename( folder_path, media_result, terms )
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
dest_path = os.path.join( folder_path, filename )
|
|
|
|
|
|
|
|
if filename not in sync_filenames:
|
|
|
|
|
|
|
|
copied = HydrusPaths.MirrorFile( source_path, dest_path )
|
|
|
|
|
|
|
|
if copied:
|
|
|
|
|
|
|
|
num_copied += 1
|
|
|
|
|
|
|
|
try: os.chmod( dest_path, stat.S_IWRITE | stat.S_IREAD )
|
|
|
|
except: pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sync_filenames.add( filename )
|
|
|
|
|
|
|
|
|
|
|
|
if num_copied > 0:
|
|
|
|
|
|
|
|
HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ConvertIntToPrettyString( num_copied ) + ' files.' )
|
|
|
|
|
|
|
|
|
|
|
|
if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
|
|
|
|
|
|
|
|
deletee_filenames = previous_filenames.difference( sync_filenames )
|
|
|
|
|
|
|
|
for deletee_filename in deletee_filenames:
|
|
|
|
|
|
|
|
deletee_path = os.path.join( folder_path, deletee_filename )
|
|
|
|
|
|
|
|
ClientData.DeletePath( deletee_path )
|
|
|
|
|
|
|
|
|
|
|
|
if len( deletee_filenames ) > 0:
|
|
|
|
|
|
|
|
HydrusData.Print( 'Export folder ' + self._name + ' deleted ' + HydrusData.ConvertIntToPrettyString( len( deletee_filenames ) ) + ' files.' )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self._last_checked = HydrusData.GetNow()
|
|
|
|
|
2017-05-10 21:33:58 +00:00
|
|
|
HG.client_controller.WriteSynchronous( 'serialisable', self )
|
2017-01-04 22:48:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def ToTuple( self ):
|
|
|
|
|
|
|
|
return ( self._name, self._path, self._export_type, self._file_search_context, self._period, self._phrase )
|
|
|
|
|
|
|
|
|
|
|
|
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER ] = ExportFolder
|