2021-04-28 21:43:16 +00:00
import collections
2020-05-20 21:36:02 +00:00
import os
import re
2022-07-13 21:35:17 +00:00
import typing
2020-05-20 21:36:02 +00:00
2020-04-22 21:00:35 +00:00
from hydrus . core import HydrusConstants as HC
from hydrus . core import HydrusData
2020-08-27 01:00:42 +00:00
from hydrus . core import HydrusExceptions
2020-04-22 21:00:35 +00:00
from hydrus . core import HydrusGlobals as HG
from hydrus . core import HydrusPaths
from hydrus . core import HydrusSerialisable
from hydrus . core import HydrusTags
from hydrus . core import HydrusThreading
2017-01-04 22:48:23 +00:00
2020-07-29 20:52:44 +00:00
from hydrus . client import ClientConstants as CC
from hydrus . client import ClientPaths
from hydrus . client import ClientSearch
2022-10-26 20:43:00 +00:00
from hydrus . client . metadata import ClientMetadataMigration
2020-08-05 20:10:36 +00:00
from hydrus . client . metadata import ClientTags
2020-07-29 20:52:44 +00:00
2019-09-05 00:05:32 +00:00
MAX_PATH_LENGTH = 240 # bit of padding from 255 for .txt neigbouring and other surprises
2017-08-23 21:34:25 +00:00
2022-09-28 17:15:23 +00:00
def GenerateExportFilename ( destination_directory , media , terms , file_index , do_not_use_filenames = None ) :
2017-01-04 22:48:23 +00:00
2018-09-12 21:36:26 +00:00
def clean_tag_text ( t ) :
if HC . PLATFORM_WINDOWS :
2019-01-09 22:59:03 +00:00
t = re . sub ( r ' \\ ' , ' _ ' , t )
2018-09-12 21:36:26 +00:00
else :
2019-01-09 22:59:03 +00:00
t = re . sub ( ' / ' , ' _ ' , t )
2018-09-12 21:36:26 +00:00
return t
2017-08-23 21:34:25 +00:00
if len ( destination_directory ) > ( MAX_PATH_LENGTH - 10 ) :
raise Exception ( ' The destination directory is too long! ' )
2017-01-04 22:48:23 +00:00
filename = ' '
for ( term_type , term ) in terms :
tags_manager = media . GetTagsManager ( )
if term_type == ' string ' :
filename + = term
elif term_type == ' namespace ' :
2022-07-20 19:17:03 +00:00
tags = tags_manager . GetNamespaceSlice ( CC . COMBINED_TAG_SERVICE_KEY , ( term , ) , ClientTags . TAG_DISPLAY_ACTUAL )
2017-01-04 22:48:23 +00:00
2020-05-13 19:03:16 +00:00
subtags = sorted ( ( HydrusTags . SplitTag ( tag ) [ 1 ] for tag in tags ) )
2017-02-08 22:27:00 +00:00
2018-09-12 21:36:26 +00:00
filename + = clean_tag_text ( ' , ' . join ( subtags ) )
2017-01-04 22:48:23 +00:00
elif term_type == ' predicate ' :
if term in ( ' tags ' , ' nn tags ' ) :
2020-10-21 22:22:10 +00:00
current = tags_manager . GetCurrent ( CC . COMBINED_TAG_SERVICE_KEY , ClientTags . TAG_DISPLAY_ACTUAL )
pending = tags_manager . GetPending ( CC . COMBINED_TAG_SERVICE_KEY , ClientTags . TAG_DISPLAY_ACTUAL )
2017-01-04 22:48:23 +00:00
2020-05-13 19:03:16 +00:00
tags = sorted ( current . union ( pending ) )
2017-01-04 22:48:23 +00:00
2017-02-08 22:27:00 +00:00
if term == ' nn tags ' :
tags = [ tag for tag in tags if ' : ' not in tag ]
else :
tags = [ HydrusTags . SplitTag ( tag ) [ 1 ] for tag in tags ]
2017-01-04 22:48:23 +00:00
2018-09-12 21:36:26 +00:00
filename + = clean_tag_text ( ' , ' . join ( tags ) )
2017-01-04 22:48:23 +00:00
elif term == ' hash ' :
hash = media . GetHash ( )
2019-01-09 22:59:03 +00:00
filename + = hash . hex ( )
2017-01-04 22:48:23 +00:00
2019-12-05 05:29:32 +00:00
elif term == ' file_id ' :
hash_id = media . GetHashId ( )
filename + = str ( hash_id )
2022-09-28 17:15:23 +00:00
2022-09-20 14:30:12 +00:00
elif term == ' # ' :
2022-09-28 17:15:23 +00:00
filename + = str ( file_index )
2017-01-04 22:48:23 +00:00
elif term_type == ' tag ' :
2017-09-20 19:47:31 +00:00
tag = term
2017-02-08 22:27:00 +00:00
( namespace , subtag ) = HydrusTags . SplitTag ( tag )
2017-01-04 22:48:23 +00:00
2020-10-21 22:22:10 +00:00
if tags_manager . HasTag ( subtag , ClientTags . TAG_DISPLAY_ACTUAL ) :
2017-02-08 22:27:00 +00:00
2018-09-12 21:36:26 +00:00
filename + = clean_tag_text ( subtag )
2017-02-08 22:27:00 +00:00
2017-01-04 22:48:23 +00:00
2020-01-29 22:08:37 +00:00
while filename . startswith ( os . path . sep ) :
filename = filename [ 1 : ]
2022-07-27 21:18:33 +00:00
# replace many consecutive (back)slash with single
2017-01-04 22:48:23 +00:00
if HC . PLATFORM_WINDOWS :
2022-07-27 21:18:33 +00:00
filename = re . sub ( r ' \\ + ' , r ' \\ ' , filename )
2017-01-04 22:48:23 +00:00
else :
2020-01-29 22:08:37 +00:00
filename = re . sub ( ' /+ ' , ' / ' , filename )
2017-01-04 22:48:23 +00:00
2022-07-27 21:18:33 +00:00
filename = HydrusPaths . SanitizePathForExport ( destination_directory , filename )
2017-08-23 21:34:25 +00:00
#
mime = media . GetMime ( )
2017-01-04 22:48:23 +00:00
ext = HC . mime_ext_lookup [ mime ]
2017-08-23 21:34:25 +00:00
if filename . endswith ( ext ) :
2017-01-04 22:48:23 +00:00
2017-08-23 21:34:25 +00:00
filename = filename [ : - len ( ext ) ]
2017-01-04 22:48:23 +00:00
2017-08-23 21:34:25 +00:00
example_dest_path = os . path . join ( destination_directory , filename + ext )
excess_chars = len ( example_dest_path ) - MAX_PATH_LENGTH
if excess_chars > 0 :
filename = filename [ : - excess_chars ]
2021-12-15 22:16:22 +00:00
if do_not_use_filenames is not None :
2019-09-05 00:05:32 +00:00
2021-12-15 22:16:22 +00:00
i = 1
possible_filename = ' {} {} ' . format ( filename , ext )
while possible_filename in do_not_use_filenames :
possible_filename = ' {} ( {} ) {} ' . format ( filename , i , ext )
i + = 1
filename = possible_filename
else :
filename + = ext
2019-09-05 00:05:32 +00:00
2017-08-23 21:34:25 +00:00
2017-01-04 22:48:23 +00:00
return filename
def GetExportPath ( ) :
2017-12-06 22:06:56 +00:00
portable_path = HG . client_controller . options [ ' export_path ' ]
2017-01-04 22:48:23 +00:00
if portable_path is None :
2021-03-17 21:59:28 +00:00
desired_path = os . path . join ( ' ~ ' , ' hydrus_export ' )
path = os . path . expanduser ( desired_path )
if path == desired_path :
# could not figure it out, probably crazy user setup atm
return None
2017-01-04 22:48:23 +00:00
HydrusPaths . MakeSureDirectoryExists ( path )
else :
path = HydrusPaths . ConvertPortablePathToAbsPath ( portable_path )
return path
def ParseExportPhrase ( phrase ) :
try :
terms = [ ( ' string ' , phrase ) ]
new_terms = [ ]
for ( term_type , term ) in terms :
if term_type == ' string ' :
while ' [ ' in term :
( pre , term ) = term . split ( ' [ ' , 1 )
( namespace , term ) = term . split ( ' ] ' , 1 )
new_terms . append ( ( ' string ' , pre ) )
new_terms . append ( ( ' namespace ' , namespace ) )
new_terms . append ( ( term_type , term ) )
terms = new_terms
new_terms = [ ]
for ( term_type , term ) in terms :
if term_type == ' string ' :
while ' { ' in term :
( pre , term ) = term . split ( ' { ' , 1 )
( predicate , term ) = term . split ( ' } ' , 1 )
new_terms . append ( ( ' string ' , pre ) )
new_terms . append ( ( ' predicate ' , predicate ) )
new_terms . append ( ( term_type , term ) )
terms = new_terms
new_terms = [ ]
for ( term_type , term ) in terms :
if term_type == ' string ' :
while ' ( ' in term :
( pre , term ) = term . split ( ' ( ' , 1 )
( tag , term ) = term . split ( ' ) ' , 1 )
new_terms . append ( ( ' string ' , pre ) )
new_terms . append ( ( ' tag ' , tag ) )
new_terms . append ( ( term_type , term ) )
terms = new_terms
2018-11-14 23:10:55 +00:00
except Exception as e :
2019-01-09 22:59:03 +00:00
raise Exception ( ' Could not parse that phrase: ' + str ( e ) )
2018-11-14 23:10:55 +00:00
2017-01-04 22:48:23 +00:00
return terms
class ExportFolder ( HydrusSerialisable . SerialisableBaseNamed ) :
SERIALISABLE_TYPE = HydrusSerialisable . SERIALISABLE_TYPE_EXPORT_FOLDER
2017-11-29 21:48:23 +00:00
SERIALISABLE_NAME = ' Export Folder '
2022-07-13 21:35:17 +00:00
SERIALISABLE_VERSION = 6
def __init__ (
self ,
name ,
path = ' ' ,
export_type = HC . EXPORT_FOLDER_TYPE_REGULAR ,
delete_from_client_after_export = False ,
file_search_context = None ,
metadata_routers = None ,
run_regularly = True ,
period = 3600 ,
phrase = None ,
last_checked = 0 ,
paused = False ,
run_now = False ,
last_error = ' '
) :
2017-01-04 22:48:23 +00:00
HydrusSerialisable . SerialisableBaseNamed . __init__ ( self , name )
2018-11-28 22:31:04 +00:00
if export_type == HC . EXPORT_FOLDER_TYPE_SYNCHRONISE :
delete_from_client_after_export = False
2017-01-04 22:48:23 +00:00
if file_search_context is None :
2022-03-23 20:57:10 +00:00
default_location_context = HG . client_controller . new_options . GetDefaultLocalLocationContext ( )
2021-04-28 21:43:16 +00:00
2022-01-19 21:28:59 +00:00
file_search_context = ClientSearch . FileSearchContext ( location_context = default_location_context )
2017-01-04 22:48:23 +00:00
2022-07-13 21:35:17 +00:00
if metadata_routers is None :
metadata_routers = [ ]
2017-03-29 19:39:34 +00:00
if phrase is None :
2017-12-06 22:06:56 +00:00
phrase = HG . client_controller . new_options . GetString ( ' export_phrase ' )
2017-03-29 19:39:34 +00:00
2017-01-04 22:48:23 +00:00
self . _path = path
self . _export_type = export_type
2018-11-28 22:31:04 +00:00
self . _delete_from_client_after_export = delete_from_client_after_export
2017-01-04 22:48:23 +00:00
self . _file_search_context = file_search_context
2022-07-13 21:35:17 +00:00
self . _metadata_routers = HydrusSerialisable . SerialisableList ( metadata_routers )
2019-05-01 21:24:42 +00:00
self . _run_regularly = run_regularly
2017-01-04 22:48:23 +00:00
self . _period = period
self . _phrase = phrase
2019-05-01 21:24:42 +00:00
self . _last_checked = last_checked
self . _paused = paused and not run_now
self . _run_now = run_now
2020-08-27 01:00:42 +00:00
self . _last_error = last_error
2017-01-04 22:48:23 +00:00
def _GetSerialisableInfo ( self ) :
serialisable_file_search_context = self . _file_search_context . GetSerialisableTuple ( )
2022-07-13 21:35:17 +00:00
serialisable_metadata_routers = self . _metadata_routers . GetSerialisableTuple ( )
2017-01-04 22:48:23 +00:00
2022-07-13 21:35:17 +00:00
return ( self . _path , self . _export_type , self . _delete_from_client_after_export , serialisable_file_search_context , serialisable_metadata_routers , self . _run_regularly , self . _period , self . _phrase , self . _last_checked , self . _paused , self . _run_now , self . _last_error )
2017-01-04 22:48:23 +00:00
def _InitialiseFromSerialisableInfo ( self , serialisable_info ) :
2022-07-13 21:35:17 +00:00
( self . _path , self . _export_type , self . _delete_from_client_after_export , serialisable_file_search_context , serialisable_metadata_routers , self . _run_regularly , self . _period , self . _phrase , self . _last_checked , self . _paused , self . _run_now , self . _last_error ) = serialisable_info
2018-11-28 22:31:04 +00:00
if self . _export_type == HC . EXPORT_FOLDER_TYPE_SYNCHRONISE :
self . _delete_from_client_after_export = False
2017-01-04 22:48:23 +00:00
self . _file_search_context = HydrusSerialisable . CreateFromSerialisableTuple ( serialisable_file_search_context )
2022-07-13 21:35:17 +00:00
self . _metadata_routers = HydrusSerialisable . CreateFromSerialisableTuple ( serialisable_metadata_routers )
2017-01-04 22:48:23 +00:00
def _UpdateSerialisableInfo ( self , version , old_serialisable_info ) :
if version == 1 :
( export_type , serialisable_file_search_context , period , phrase , last_checked ) = old_serialisable_info
path = self . _name
new_serialisable_info = ( path , export_type , serialisable_file_search_context , period , phrase , last_checked )
return ( 2 , new_serialisable_info )
2018-11-28 22:31:04 +00:00
if version == 2 :
( path , export_type , serialisable_file_search_context , period , phrase , last_checked ) = old_serialisable_info
delete_from_client_after_export = False
new_serialisable_info = ( path , export_type , delete_from_client_after_export , serialisable_file_search_context , period , phrase , last_checked )
return ( 3 , new_serialisable_info )
2019-05-01 21:24:42 +00:00
if version == 3 :
( path , export_type , delete_from_client_after_export , serialisable_file_search_context , period , phrase , last_checked ) = old_serialisable_info
run_regularly = True
paused = False
run_now = False
new_serialisable_info = ( path , export_type , delete_from_client_after_export , serialisable_file_search_context , run_regularly , period , phrase , last_checked , paused , run_now )
return ( 4 , new_serialisable_info )
2020-08-27 01:00:42 +00:00
if version == 4 :
( path , export_type , delete_from_client_after_export , serialisable_file_search_context , run_regularly , period , phrase , last_checked , paused , run_now ) = old_serialisable_info
last_error = ' '
new_serialisable_info = ( path , export_type , delete_from_client_after_export , serialisable_file_search_context , run_regularly , period , phrase , last_checked , paused , run_now , last_error )
return ( 5 , new_serialisable_info )
2022-07-13 21:35:17 +00:00
if version == 5 :
( path , export_type , delete_from_client_after_export , serialisable_file_search_context , run_regularly , period , phrase , last_checked , paused , run_now , last_error ) = old_serialisable_info
metadata_routers = HydrusSerialisable . SerialisableList ( )
serialisable_metadata_routers = metadata_routers . GetSerialisableTuple ( )
new_serialisable_info = ( path , export_type , delete_from_client_after_export , serialisable_file_search_context , serialisable_metadata_routers , run_regularly , period , phrase , last_checked , paused , run_now , last_error )
return ( 6 , new_serialisable_info )
2017-01-04 22:48:23 +00:00
2019-07-17 22:10:19 +00:00
def _DoExport ( self ) :
2017-01-04 22:48:23 +00:00
2023-02-01 21:20:47 +00:00
query_hash_ids = HG . client_controller . Read ( ' file_query_ids ' , self . _file_search_context , apply_implicit_limit = False )
2019-07-17 22:10:19 +00:00
media_results = [ ]
i = 0
base = 256
while i < len ( query_hash_ids ) :
2022-04-20 20:18:56 +00:00
if HG . client_controller . new_options . GetBoolean ( ' pause_export_folders_sync ' ) or HydrusThreading . IsThreadShuttingDown ( ) :
2019-07-17 22:10:19 +00:00
return
if i == 0 : ( last_i , i ) = ( 0 , base )
else : ( last_i , i ) = ( i , i + base )
sub_query_hash_ids = query_hash_ids [ last_i : i ]
more_media_results = HG . client_controller . Read ( ' media_results_from_ids ' , sub_query_hash_ids )
media_results . extend ( more_media_results )
2017-01-04 22:48:23 +00:00
2019-07-17 22:10:19 +00:00
media_results . sort ( key = lambda mr : mr . GetHashId ( ) )
#
terms = ParseExportPhrase ( self . _phrase )
previous_paths = set ( )
for ( root , dirnames , filenames ) in os . walk ( self . _path ) :
previous_paths . update ( ( os . path . join ( root , filename ) for filename in filenames ) )
2019-05-01 21:24:42 +00:00
2019-07-17 22:10:19 +00:00
sync_paths = set ( )
client_files_manager = HG . client_controller . client_files_manager
num_copied = 0
2022-09-28 17:15:23 +00:00
for ( i , media_result ) in enumerate ( media_results ) :
2019-05-01 21:24:42 +00:00
2022-04-20 20:18:56 +00:00
if HG . client_controller . new_options . GetBoolean ( ' pause_export_folders_sync ' ) or HydrusThreading . IsThreadShuttingDown ( ) :
2019-02-27 23:03:30 +00:00
2019-03-06 23:06:22 +00:00
return
2019-02-27 23:03:30 +00:00
2019-07-17 22:10:19 +00:00
hash = media_result . GetHash ( )
mime = media_result . GetMime ( )
size = media_result . GetSize ( )
2020-08-27 01:00:42 +00:00
try :
source_path = client_files_manager . GetFilePath ( hash , mime )
except HydrusExceptions . FileMissingException :
raise Exception ( ' A file to be exported, hash " {} " , was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder \' s search, and possibly all your files. ' )
2019-07-17 22:10:19 +00:00
2022-09-28 17:15:23 +00:00
filename = GenerateExportFilename ( self . _path , media_result , terms , i + 1 )
2019-07-17 22:10:19 +00:00
dest_path = os . path . normpath ( os . path . join ( self . _path , filename ) )
if not dest_path . startswith ( self . _path ) :
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
raise Exception ( ' It seems a destination path for export folder " {} " was above the main export directory! The file was " {} " and its destination path was " {} " . ' . format ( self . _path , hash . hex ( ) , dest_path ) )
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
dest_path_dir = os . path . dirname ( dest_path )
HydrusPaths . MakeSureDirectoryExists ( dest_path_dir )
if dest_path not in sync_paths :
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
copied = HydrusPaths . MirrorFile ( source_path , dest_path )
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
if copied :
2017-01-04 22:48:23 +00:00
2019-07-17 22:10:19 +00:00
num_copied + = 1
2017-01-04 22:48:23 +00:00
2022-01-12 22:14:50 +00:00
HydrusPaths . TryToGiveFileNicePermissionBits ( dest_path )
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
2022-07-13 21:35:17 +00:00
for metadata_router in self . _metadata_routers :
metadata_router . Work ( media_result , dest_path )
2019-07-17 22:10:19 +00:00
sync_paths . add ( dest_path )
if num_copied > 0 :
HydrusData . Print ( ' Export folder ' + self . _name + ' exported ' + HydrusData . ToHumanInt ( num_copied ) + ' files. ' )
if self . _export_type == HC . EXPORT_FOLDER_TYPE_SYNCHRONISE :
deletee_paths = previous_paths . difference ( sync_paths )
for deletee_path in deletee_paths :
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
ClientPaths . DeletePath ( deletee_path )
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
deletee_dirs = set ( )
for ( root , dirnames , filenames ) in os . walk ( self . _path , topdown = False ) :
2019-04-10 22:50:53 +00:00
2019-07-17 22:10:19 +00:00
if root == self . _path :
2019-04-10 22:50:53 +00:00
2019-07-17 22:10:19 +00:00
continue
2019-04-10 22:50:53 +00:00
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
no_files = len ( filenames ) == 0
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
useful_dirnames = [ dirname for dirname in dirnames if os . path . join ( root , dirname ) not in deletee_dirs ]
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
no_useful_dirs = len ( useful_dirnames ) == 0
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
if no_useful_dirs and no_files :
2018-09-12 21:36:26 +00:00
2019-07-17 22:10:19 +00:00
deletee_dirs . add ( root )
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
for deletee_dir in deletee_dirs :
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
if os . path . exists ( deletee_dir ) :
2019-04-10 22:50:53 +00:00
2019-07-17 22:10:19 +00:00
HydrusPaths . DeletePath ( deletee_dir )
2019-03-06 23:06:22 +00:00
2019-07-17 22:10:19 +00:00
if len ( deletee_paths ) > 0 :
HydrusData . Print ( ' Export folder {} deleted {} files and {} folders. ' . format ( self . _name , HydrusData . ToHumanInt ( len ( deletee_paths ) ) , HydrusData . ToHumanInt ( len ( deletee_dirs ) ) ) )
2017-01-04 22:48:23 +00:00
2019-07-17 22:10:19 +00:00
if self . _delete_from_client_after_export :
2021-04-28 21:43:16 +00:00
local_file_service_keys = HG . client_controller . services_manager . GetServiceKeys ( ( HC . LOCAL_FILE_DOMAIN , ) )
2019-07-17 22:10:19 +00:00
2021-04-28 21:43:16 +00:00
service_keys_to_deletee_hashes = collections . defaultdict ( list )
2019-07-17 22:10:19 +00:00
2021-04-28 21:43:16 +00:00
for media_result in media_results :
2022-07-20 19:17:03 +00:00
if media_result . IsDeleteLocked ( ) :
2021-05-19 21:30:28 +00:00
continue
2021-04-28 21:43:16 +00:00
hash = media_result . GetHash ( )
deletee_service_keys = media_result . GetLocationsManager ( ) . GetCurrent ( ) . intersection ( local_file_service_keys )
for deletee_service_key in deletee_service_keys :
service_keys_to_deletee_hashes [ deletee_service_key ] . append ( hash )
2019-07-17 22:10:19 +00:00
2021-04-28 21:43:16 +00:00
reason = ' Deleted after export to Export Folder " {} " . ' . format ( self . _path )
2019-07-17 22:10:19 +00:00
2021-04-28 21:43:16 +00:00
for ( service_key , deletee_hashes ) in service_keys_to_deletee_hashes . items ( ) :
chunks_of_hashes = HydrusData . SplitListIntoChunks ( deletee_hashes , 64 )
2019-07-17 22:10:19 +00:00
2021-04-28 21:43:16 +00:00
for chunk_of_hashes in chunks_of_hashes :
content_update = HydrusData . ContentUpdate ( HC . CONTENT_TYPE_FILES , HC . CONTENT_UPDATE_DELETE , chunk_of_hashes , reason = reason )
HG . client_controller . WriteSynchronous ( ' content_updates ' , { service_key : [ content_update ] } )
2019-07-17 22:10:19 +00:00
def DoWork ( self ) :
regular_run_due = self . _run_regularly and HydrusData . TimeHasPassed ( self . _last_checked + self . _period )
good_to_go = ( regular_run_due or self . _run_now ) and not self . _paused
if not good_to_go :
return
try :
if self . _path == ' ' :
raise Exception ( ' No path set for the folder! ' )
if not os . path . exists ( self . _path ) :
raise Exception ( ' The path, " {} " , does not exist! ' . format ( self . _path ) )
if not os . path . isdir ( self . _path ) :
raise Exception ( ' The path, " {} " , is not a directory! ' . format ( self . _path ) )
self . _DoExport ( )
2019-03-06 23:06:22 +00:00
2020-08-27 01:00:42 +00:00
self . _last_error = ' '
2018-11-14 23:10:55 +00:00
except Exception as e :
2019-05-01 21:24:42 +00:00
self . _paused = True
2017-01-04 22:48:23 +00:00
2020-08-27 01:00:42 +00:00
HydrusData . ShowText ( ' The export folder " ' + self . _name + ' " encountered an error! It has now been paused. Please check the folder \' s settings and maybe report to hydrus dev if the error is complicated! The error follows: ' )
2017-01-04 22:48:23 +00:00
2018-11-14 23:10:55 +00:00
HydrusData . ShowException ( e )
2020-08-27 01:00:42 +00:00
self . _last_error = str ( e )
2019-07-17 22:10:19 +00:00
finally :
self . _last_checked = HydrusData . GetNow ( )
self . _run_now = False
HG . client_controller . WriteSynchronous ( ' serialisable ' , self )
2017-01-04 22:48:23 +00:00
2020-08-27 01:00:42 +00:00
def GetLastError ( self ) - > str :
return self . _last_error
2022-10-26 20:43:00 +00:00
def GetMetadataRouters ( self ) - > typing . Collection [ ClientMetadataMigration . SingleFileMetadataRouter ] :
2022-07-13 21:35:17 +00:00
return self . _metadata_routers
2019-05-01 21:24:42 +00:00
def RunNow ( self ) :
self . _paused = False
self . _run_now = True
2017-01-04 22:48:23 +00:00
def ToTuple ( self ) :
2019-05-01 21:24:42 +00:00
return ( self . _name , self . _path , self . _export_type , self . _delete_from_client_after_export , self . _file_search_context , self . _run_regularly , self . _period , self . _phrase , self . _last_checked , self . _paused , self . _run_now )
2017-01-04 22:48:23 +00:00
HydrusSerialisable . SERIALISABLE_TYPES_TO_OBJECT_TYPES [ HydrusSerialisable . SERIALISABLE_TYPE_EXPORT_FOLDER ] = ExportFolder