Version 422

This commit is contained in:
Hydrus Network Developer 2020-12-16 16:29:51 -06:00
parent c3100f7a88
commit ff51cf492e
45 changed files with 1855 additions and 632 deletions

View File

@ -1,26 +0,0 @@
MiniUPnP Project
Copyright (c) 2005-2019, Thomas BERNARD
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

Binary file not shown.

Binary file not shown.

5
bin/upnpc_readme.txt Normal file
View File

@ -0,0 +1,5 @@
UPnPc is a program that can talk to your internet router to perform UPnP operations. Hydrus uses it to fetch and manage UPnP NAT port forwards when you open _network->data->manage upnp_ and keeps ports forwarded when you set up a server service or the Client API to stay open. It also fetches your external IP for some related 'figure out the external URL for this service' operations. Unless you do some UPnC stuff, hydrus does not touch it.
I used to bundle UPnPc here for all builds, but it threw anti-virus false positives every few months, so it is no longer included. If you are on Linux, you may already have it installed to your system.
If you need it, you can fetch it at http://miniupnp.tuxfamily.org/ (if you are on Linux, you can probably also get it with your package manager). Place 'upnpc-static' executable in this directory, or install to your system PATH as 'miniupnpc', and hydrus will be able to do UPnP things.

Binary file not shown.

View File

@ -31,8 +31,9 @@ try:
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
argparser.add_argument( '--db_synchronous_override', choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' )
argparser.add_argument( '--no_wal', action='store_true', help = 'OBSOLETE: run using TRUNCATE db journaling' )
argparser.add_argument( '--db_memory_journaling', action='store_true', help = 'OBSOLETE: run using MEMORY db journaling (DANGEROUS)' )
@ -105,6 +106,8 @@ try:
HG.no_db_temp_files = result.no_db_temp_files
HG.boot_debug = result.boot_debug
if result.temp_dir is not None:
HydrusPaths.SetEnvTempDir( result.temp_dir )

View File

@ -31,8 +31,9 @@ try:
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
argparser.add_argument( '--db_synchronous_override', choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' )
argparser.add_argument( '--no_wal', action='store_true', help = 'OBSOLETE: run using TRUNCATE db journaling' )
argparser.add_argument( '--db_memory_journaling', action='store_true', help = 'OBSOLETE: run using MEMORY db journaling (DANGEROUS)' )
@ -105,6 +106,8 @@ try:
HG.no_db_temp_files = result.no_db_temp_files
HG.boot_debug = result.boot_debug
if result.temp_dir is not None:
HydrusPaths.SetEnvTempDir( result.temp_dir )

View File

@ -8,6 +8,45 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 422</h3></li>
<ul>
<li>advanced tags:</li>
<li>fixed the search code for various 'total' autocomplete searches like '*' and 'namespace:*', which were broken around v419's optimised regular tag lookups. these search types also have a round of their own search optimisations and improved cancel latency. I am sorry for the trouble here</li>
<li>expanded the database autocomplete fetch unit tests to handle these total lookups so I do not accidentally kill them due to typo/ignorance again</li>
<li>updated the autocomplete result cache object to consult a search's advanced search options (as under _tags->manage tag display and search_) to test whether a search cache for 'char' or 'character:' is able to serve results for a later 'character:samus' input</li>
<li>optimised file and tag search code for cases where someone might somehow sneak an unoptimised raw '*:subtag' or 'namespace:*' search text in</li>
<li>updated and expanded the autocomplete result cache unit tests to handle the new tested options and the various 'total' tests, so they aren't disabled by accident again</li>
<li>cancelling a autocomplete query with a gigantic number of results should now cancel much quicker when you have a lot of siblings</li>
<li>the single-tag right-click menu now shows siblings and parents info for every service, and will work on taglists in the 'all known tags' domain. clicking on any item will copy it to clipboard. this might result in megatall submenus, but we'll see. tall seems easier to use than nested per-service for now</li>
<li>the more primitive 'siblings' submenu on the taglist 'copy' right-click menu is now removed</li>
<li>right-click should no longer raise an error on esoteric taglists (such as tag filters and namespace colours). you might get some funky copy strings, which is sort of fun too</li>
<li>the copy string for the special namespace predicate ('namespace:*anything*') is now 'namespace:*', making it easier to copy/paste this across pages</li>
<li>.</li>
<li>misc:</li>
<li>the thumbnail right-click 'copy/open known urls by url class' commands now exclude those urls that match a more specific url class (e.g. /post/123456 vs /post/123456/image.jpg)</li>
<li>miniupnpc is no longer bundled in the official builds. this executable is only used by a few advanced users and was a regular cause of anti-virus false positives, so I have decided new users will have to install it manually going forward.</li>
<li>the client now looks for miniupnpc in more places, including the system path. when missing, its error popups have better explanation, pointing users to a new readme in the bin directory</li>
<li>UPnP errors now have more explanation for 'No IGD UPnP Device' errortext</li>
<li>the database's boot-repair function now ensures indices are created for: non-sha256 hashes, sibling and parent lookups, storage tag cache, and display tag cache. some users may be missing indices here for unknown update logic or hard drive damage reasons, and this should speed them right back up. the boot-repair function now broadcasts 'checking database for faults' to the splash, which you will see if it needs some time to work</li>
<li>the duplicates page once again correctly updates the potential pairs count in the 'filter' tab when potential search finishes or filtering finishes</li>
<li>added the --boot_debug launch switch, which for now prints additional splash screen texts to the log</li>
<li>the global pixmaps object is no longer initialised in client model boot, but now on first request</li>
<li>fixed type of --db_synchronous_override launch parameter, which was throwing type errors</li>
<li>updated the client file readwrite lock logic and brushed up its unit tests</li>
<li>improved the error when the client database is asked for the id of an invalid tag that collapses to zero characters</li>
<li>the qss stylesheet directory is now mapped to the static dir in a way that will follow static directory redirects</li>
<li>.</li>
<li>downloaders and parsing (advanced):</li>
<li>started on better network redirection tech. if a post or gallery URL is 3XX redirected, hydrus now recognises this, and if the redirected url is the same type and parseable, the new url and parser are swapped in. if a gallery url is redirected to a non-gallery url, it will create a new file import object for that URL and say so in its gallery log note. this tentatively solves the 'booru redirects one-file gallery pages to post url' problem, but the whole thing is held together by prayer. I now have a plan to rejigger my pipelines to deal with this situation better, ultimately I will likely expose and log all redirects so we can always see better what is going on behind the scenes</li>
<li>added 'unicode escape characters' and 'html entities' string converter encode/decode types. the former does '\u0394'-to-'Δ', and the latter does '&amp;'-to-'&'</li>
<li>improved my string converter unit tests and added the above to them</li>
<li>in the parsing system, decoding from 'hex' or 'base64' is no longer needed for a 'file hash' content type. these string conversions are now no-ops and can be deleted. they converted to a non-string type, an artifact of the old way python 2 used to handle unicode, and were a sore thumb for a long time in the python 3 parsing system. 'file hash' content types now have a 'hex'/'base64' dropdown, and do decoding to raw bytes at a layer above string parsing. on update, existing file hash content parsers will default to hex and attempt to figure out if they were a base64 (however if the hex fails, base64 will be attempted as well anyway, so it is not critically important here if this update detection is imperfect). the 'hex' and 'base64' _encode_ types remain as they are still used in file lookup script hash initialisation, but they will likely be replaced similarly in future. hex or base64 conversion will return in a purely string-based form as technically needed in future</li>
<li>updated the make-a-downloader help and some screenshots regarding the new hash decoding</li>
<li>when the json parsing formula is told to get the 'json' of a parsed node, this no longer encodes unicode with escape characters (\u0394 etc...)</li>
<li>duplicating or importing nested gallery url generators now refreshes all internal reference ids, which should reduce the liklihood of accidentally linking with related but differently named existing GUGs</li>
<li>importing GUGs or NGUGs through Lain easy import does the same, ensuring the new objects 'seem' fresh to a client and should not incorrectly link up with renamed versions of related NGUGs or GUGs</li>
<li>added unit tests for hex and base64 string converter encoding</li>
</ul>
<li><h3>version 421</h3></li>
<ul>
<li>misc:</li>
@ -32,7 +71,7 @@
<li>misc cleanup for duplicates page</li>
<li>.</li>
<li>database modes:</li>
<li>a new 'program launch arguments' help page now talks about all the available command line switches, here: https://github.com/hydrusnetwork/hydrus/blob/master/help/launch_arguments.html</li>
<li>a new 'program launch arguments' help page now talks about all the available command line switches, here: https://hydrusnetwork.github.io/hydrus/help/launch_arguments.html</li>
<li>added the '--db_journal_mode' launch switch to set the SQLite journal mode. default is WAL, permitted values are also TRUNCATE, PERSIST, and MEMORY</li>
<li>ensured --db_synchronous_override was hooked up correctly</li>
<li>the old disk cache options under _speed and memory_ are removed, along with various deprecated disk cache load calls and code</li>

View File

@ -41,10 +41,10 @@
</li>
<li>
<h3>file hash</h3>
<p>This says 'this is the hash for the file otherwise referenced in this parser'. So, if you have another content parser finding a File or Post URL, this lets the client know early that that destination happens to have a particular MD5, for instance. The client will look for that hash in its own database, and if it finds a match, it can predetermine if it already has the file (or has previously deleted it) without ever having to download it. Furthermore, if it does find the file for this URL but has never seen the URL before, it will still associate it with that file's 'known urls' as if it <i>had</i> downloaded it!</p>
<p>This says 'this is the hash for the file otherwise referenced in this parser'. So, if you have another content parser finding a File or Post URL, this lets the client know early that that destination happens to have a particular MD5, for instance. The client will look for that hash in its own database, and if it finds a match, it can predetermine if it already has the file (or has previously deleted it) without ever having to download it. When this happens, it will still add tags and associate the file with the URL for it's 'known urls' just as if it <i>had</i> downloaded it!</p>
<p>If you understand this concept, it is great to include. It saves time and bandwidth for everyone. Many site APIs include a hash for this exact reason--they want you to be able to skip a needless download just as much as you do.</p>
<p><img src="edit_content_parser_panel_hash.png" /></p>
<p>The usual suite of hash types are supported: MD5, SHA1, SHA256, and SHA512. <b>This expects the hash as raw bytes</b>, so if your source provides it as hex or base64 (as above), make sure to decode it! In the area for test results, it will present the hash in hex for your convenience.</p>
<p>The usual suite of hash types are supported: MD5, SHA1, SHA256, and SHA512. An old version of this required some weird string decoding, but this is no longer true. Select 'hex' or 'base64' from the encoding type dropdown, and then just parse the 'e5af57a687f089894f5ecede50049458' or '5a9XpofwiYlPXs7eUASUWA==' text, and hydrus should handle the rest. It will present the parsed hash in hex.</p>
</li>
<li>
<h3>timestamp</h3>
@ -64,4 +64,4 @@
</ul>
</div>
</body>
</html>
</html>

View File

@ -43,9 +43,9 @@
<p>Skipping ?/-/+ characters can be a pain if you are lacking a nice tag-text class, in which case you can add a regex String Match to the HTML formula (as I do here, since Gelb offers '?' links for tag definitions) like [^\?\-+\s], which means "the text includes something other than just '?' or '-' or '+' or whitespace".</p>
<h3>md5 hash</h3>
<p>If you look at the Gelbooru File URL, <a href="https://gelbooru.com/images/38/6e/386e12e33726425dbd637e134c4c09b5.jpeg"><b>https://gelbooru.com/images/38/6e/386e12e33726425dbd637e134c4c09b5.jpeg</b></a>, you may notice the filename is all hexadecimal. It looks like they store their files under a two-deep folder structure, using the first four characters--386e here--as the key. It sure looks like '386e12e33726425dbd637e134c4c09b5' is not random ephemeral garbage!</p>
<p>In fact, Gelbooru use the MD5 of the file as the filename. Many storage systems do something like this (hydrus uses SHA256!), so if they don't offer a &lt;meta&gt; tag that explicitly states the md5 or sha1 or whatever, you can sometimes infer it from one of the file links:</p>
<p>In fact, Gelbooru use the MD5 of the file as the filename. Many storage systems do something like this (hydrus uses SHA256!), so if they don't offer a &lt;meta&gt; tag that explicitly states the md5 or sha1 or whatever, you can sometimes infer it from one of the file links. This screenshot is from the more recent version of hydrus, which has the more powerful 'string processing' system for string transformations. It has an intimidating number of nested dialogs, but we can stay simple for now, with only the one regex substitution step inside a string 'converter':</p>
<p><img src="downloader_post_example_md5.png" /></p>
<p>Here we are using the same property="og:image" rule to fetch the File URL, and then we are regexing the hex hash with .*([0-9a-f]{32}).* (MD5s are 32 hex characters) and decoding from hex to present the Content Parser with raw bytes (Hydrus handles hashes as bytes, not hex--although you'll see in the Content Parser test page it presents the hash neatly in English: "md5 hash: 386e12e33726425dbd637e134c4c09b5").</p>
<p>Here we are using the same property="og:image" rule to fetch the File URL, and then we are regexing the hex hash with .*([0-9a-f]{32}).* (MD5s are 32 hex characters). We select 'hex' as the encoding type. Hashes require a tiny bit more data handling behind the scenes, but in the Content Parser test page it presents the hash again neatly in English: "md5 hash: 386e12e33726425dbd637e134c4c09b5"), meaning everything parsed correct. It presents the hash in hex even if you select the encoding type as base64.</p>
<p>If you think you have found a hash string, you should obviously test your theory! The site might not be using the actual MD5 of file bytes, as hydrus does, but instead some proprietary scheme. Download the file and run it through a program like HxD (or hydrus!) to figure out its hashes, and then search the View Source for those hex strings--you might be surprised!</p>
<p>Finding the hash is hugely beneficial for a parser--it lets hydrus skip downloading files without ever having seen them before!</p>
<h3>source time</h3>
@ -67,4 +67,4 @@
<p>This is overall a decent parser. Some parts of it may fail when Gelbooru update to their next version, but that can be true of even very good parsers with multiple redundancy. For now, hydrus can use this to quickly and efficiently pull content from anything running Gelbooru 0.2.5., and the effort spent now can save millions of combined <i>right-click->save as</i> and manual tag copies in future. If you make something like this and share it about, you'll be doing a good service for those who could never figure it out.</p>
</div>
</body>
</html>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 132 KiB

After

Width:  |  Height:  |  Size: 167 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 39 KiB

View File

@ -51,6 +51,10 @@
<b>--no_db_temp_files</b>
<p>When SQLite performs very large queries, it may spool temporary table results to disk. These go in your temp directory. If your temp dir is slow but you have a <i>ton</i> of memory, set this to never spool to disk, as <a href="https://sqlite.org/pragma.html#pragma_temp_store">here</a>.</p>
</li>
<li>
<b>--boot_debug</b>
<p>Prints additional debug information to the log during the bootup phase of the application.</p>
</li>
<li>
<b>--no_daemons</b>
<p>Launch the program without some background workers. This is an old debug command and does not do much any more.</p>

View File

@ -433,21 +433,17 @@ class GlobalPixmaps( object ):
self._Initialise()
GlobalPixmaps.my_instance = self
@staticmethod
def instance() -> 'GlobalPixmaps':
if GlobalPixmaps.my_instance is None:
raise Exception( 'GlobalPixmaps is not yet initialised!' )
else:
return GlobalPixmaps.my_instance
GlobalPixmaps.my_instance = GlobalPixmaps()
return GlobalPixmaps.my_instance
def _Initialise( self ):

View File

@ -979,23 +979,14 @@ class Controller( HydrusController.HydrusController ):
#
self.frame_splash_status.SetSubtext( 'tag parents' )
self._managers[ 'undo' ] = ClientManagers.UndoManager( self )
def qt_code():
self._caches[ 'images' ] = ClientCaches.RenderedImageCache( self )
self._caches[ 'thumbnail' ] = ClientCaches.ThumbnailCache( self )
self.bitmap_manager = ClientManagers.BitmapManager( self )
CC.GlobalPixmaps()
self.frame_splash_status.SetSubtext( 'image caches' )
self.CallBlockingToQt(self._splash, qt_code)
# careful: outside of qt since they don't need qt for init, seems ok _for now_
self._caches[ 'images' ] = ClientCaches.RenderedImageCache( self )
self._caches[ 'thumbnail' ] = ClientCaches.ThumbnailCache( self )
self.bitmap_manager = ClientManagers.BitmapManager( self )
self.sub( self, 'ToClipboard', 'clipboard' )

View File

@ -2486,9 +2486,13 @@ class DB( HydrusDB.HydrusDB ):
return set( self._CacheTagSiblingsGetInterestedServiceIds( tag_service_id ) ).union( self._CacheTagParentsGetInterestedServiceIds( tag_service_id ) )
def _CacheTagDisplayGetSiblingsAndParentsForTags( self, service_key, tags ):
def _CacheTagDisplayGetSiblingsAndParentsForTags( self, tags ):
tags_to_siblings_and_parents = {}
tag_services = self._GetServices( HC.REAL_TAG_SERVICES )
service_keys = [ tag_service.GetServiceKey() for tag_service in tag_services ]
tags_to_service_keys_to_siblings_and_parents = {}
for tag in tags:
@ -2497,55 +2501,53 @@ class DB( HydrusDB.HydrusDB ):
descendants = set()
ancestors = set()
tags_to_siblings_and_parents[ tag ] = ( sibling_chain_members, ideal_tag, descendants, ancestors )
tags_to_service_keys_to_siblings_and_parents[ tag ] = { service_key : ( sibling_chain_members, ideal_tag, descendants, ancestors ) for service_key in service_keys }
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
for service_key in service_keys:
return tags_to_siblings_and_parents
tag_service_id = self._GetServiceId( service_key )
existing_tags = { tag for tag in tags if self._TagExists( tag ) }
existing_tag_ids = { self._GetTagId( tag ) for tag in existing_tags }
tag_ids_to_ideal_tag_ids = self._CacheTagSiblingsGetTagsToIdeals( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, existing_tag_ids )
ideal_tag_ids = set( tag_ids_to_ideal_tag_ids.values() )
ideal_tag_ids_to_sibling_chain_ids = self._CacheTagSiblingsGetIdealsToChains( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, ideal_tag_ids )
ideal_tag_ids_to_descendant_tag_ids = self._CacheTagParentsGetTagsToDescendants( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, ideal_tag_ids )
ideal_tag_ids_to_ancestor_tag_ids = self._CacheTagParentsGetTagsToAncestors( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, ideal_tag_ids )
all_tag_ids = set()
all_tag_ids.update( ideal_tag_ids_to_sibling_chain_ids.keys() )
all_tag_ids.update( itertools.chain.from_iterable( ideal_tag_ids_to_sibling_chain_ids.values() ) )
all_tag_ids.update( itertools.chain.from_iterable( ideal_tag_ids_to_descendant_tag_ids.values() ) )
all_tag_ids.update( itertools.chain.from_iterable( ideal_tag_ids_to_ancestor_tag_ids.values() ) )
self._PopulateTagIdsToTagsCache( all_tag_ids )
for tag_id in existing_tag_ids:
ideal_tag_id = tag_ids_to_ideal_tag_ids[ tag_id ]
sibling_chain_ids = ideal_tag_ids_to_sibling_chain_ids[ ideal_tag_id ]
descendant_tag_ids = ideal_tag_ids_to_descendant_tag_ids[ ideal_tag_id ]
ancestor_tag_ids = ideal_tag_ids_to_ancestor_tag_ids[ ideal_tag_id ]
tag = self._tag_ids_to_tags_cache[ tag_id ]
ideal_tag = self._tag_ids_to_tags_cache[ ideal_tag_id ]
sibling_chain_members = { self._tag_ids_to_tags_cache[ sibling_chain_id ] for sibling_chain_id in sibling_chain_ids }
descendants = { self._tag_ids_to_tags_cache[ descendant_tag_id ] for descendant_tag_id in descendant_tag_ids }
ancestors = { self._tag_ids_to_tags_cache[ ancestor_tag_id ] for ancestor_tag_id in ancestor_tag_ids }
tags_to_service_keys_to_siblings_and_parents[ tag ][ service_key ] = ( sibling_chain_members, ideal_tag, descendants, ancestors )
tag_service_id = self._GetServiceId( service_key )
existing_tags = { tag for tag in tags if self._TagExists( tag ) }
existing_tag_ids = { self._GetTagId( tag ) for tag in existing_tags }
tag_ids_to_ideal_tag_ids = self._CacheTagSiblingsGetTagsToIdeals( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, existing_tag_ids )
ideal_tag_ids = set( tag_ids_to_ideal_tag_ids.values() )
ideal_tag_ids_to_sibling_chain_ids = self._CacheTagSiblingsGetIdealsToChains( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, ideal_tag_ids )
ideal_tag_ids_to_descendant_tag_ids = self._CacheTagParentsGetTagsToDescendants( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, ideal_tag_ids)
ideal_tag_ids_to_ancestor_tag_ids = self._CacheTagParentsGetTagsToAncestors( ClientTags.TAG_DISPLAY_ACTUAL, tag_service_id, ideal_tag_ids )
all_tag_ids = set()
all_tag_ids.update( ideal_tag_ids_to_sibling_chain_ids.keys() )
all_tag_ids.update( itertools.chain.from_iterable( ideal_tag_ids_to_sibling_chain_ids.values() ) )
all_tag_ids.update( itertools.chain.from_iterable( ideal_tag_ids_to_descendant_tag_ids.values() ) )
all_tag_ids.update( itertools.chain.from_iterable( ideal_tag_ids_to_ancestor_tag_ids.values() ) )
self._PopulateTagIdsToTagsCache( all_tag_ids )
for tag_id in existing_tag_ids:
ideal_tag_id = tag_ids_to_ideal_tag_ids[ tag_id ]
sibling_chain_ids = ideal_tag_ids_to_sibling_chain_ids[ ideal_tag_id ]
descendant_tag_ids = ideal_tag_ids_to_descendant_tag_ids[ ideal_tag_id ]
ancestor_tag_ids = ideal_tag_ids_to_ancestor_tag_ids[ ideal_tag_id ]
tag = self._tag_ids_to_tags_cache[ tag_id ]
ideal_tag = self._tag_ids_to_tags_cache[ ideal_tag_id ]
sibling_chain_members = { self._tag_ids_to_tags_cache[ sibling_chain_id ] for sibling_chain_id in sibling_chain_ids }
descendants = { self._tag_ids_to_tags_cache[ descendant_tag_id ] for descendant_tag_id in descendant_tag_ids }
ancestors = { self._tag_ids_to_tags_cache[ ancestor_tag_id ] for ancestor_tag_id in ancestor_tag_ids }
tags_to_siblings_and_parents[ tag ] = ( sibling_chain_members, ideal_tag, descendants, ancestors )
return tags_to_siblings_and_parents
return tags_to_service_keys_to_siblings_and_parents
def _CacheTagDisplayGetTagsToImpliedBy( self, display_type, tag_service_id, tag_ids, tags_are_ideal = False ):
@ -7269,7 +7271,10 @@ class DB( HydrusDB.HydrusDB ):
def _GetAutocompleteTagIds( self, tag_display_type, tag_service_key, search_text, exact_match, job_key = None ):
tag_service_id = self._GetServiceId( tag_service_key )
if search_text == '':
return set()
( namespace, half_complete_searchable_subtag ) = HydrusTags.SplitTag( search_text )
@ -7278,6 +7283,13 @@ class DB( HydrusDB.HydrusDB ):
return set()
if namespace == '*':
namespace = ''
tag_service_id = self._GetServiceId( tag_service_key )
if exact_match:
table_join = 'subtags_searchable_map NATURAL JOIN tags'
@ -7310,29 +7322,65 @@ class DB( HydrusDB.HydrusDB ):
else:
subtag_ids = self._GetSubtagIdsFromWildcard( half_complete_searchable_subtag, job_key = job_key )
if namespace in ( '', '*' ):
if namespace == '':
tag_ids = self._GetTagIdsFromSubtagIds( subtag_ids, job_key = job_key )
namespace_ids = []
elif '*' in namespace:
namespace_ids = self._GetNamespaceIdsFromWildcard( namespace )
else:
if '*' in namespace:
if not self._NamespaceExists( namespace ):
namespace_ids = self._GetNamespaceIdsFromWildcard( namespace )
return set()
namespace_ids = ( self._GetNamespaceId( namespace ), )
if half_complete_searchable_subtag == '*':
if namespace == '':
if tag_service_id == self._combined_tag_service_id:
cursor = self._c.execute( 'SELECT tag_id FROM tags;' )
else:
combined_ac_cache_table_name = GenerateCombinedFilesMappingsACCacheTableName( tag_display_type, tag_service_id )
cursor = self._c.execute( 'SELECT tag_id FROM {};'.format( combined_ac_cache_table_name ) )
cancelled_hook = None
if job_key is not None:
cancelled_hook = job_key.IsCancelled
tag_ids = self._STL( HydrusDB.ReadFromCancellableCursor( cursor, 1024, cancelled_hook = cancelled_hook ) )
else:
if not self._NamespaceExists( namespace ):
return set()
namespace_ids = ( self._GetNamespaceId( namespace ), )
tag_ids = self._GetTagIdsFromNamespaceIds( namespace_ids, job_key = job_key )
tag_ids = self._GetTagIdsFromNamespaceIdsSubtagIds( namespace_ids, subtag_ids, job_key = job_key )
else:
subtag_ids = self._GetSubtagIdsFromWildcard( half_complete_searchable_subtag, job_key = job_key )
if namespace == '':
tag_ids = self._GetTagIdsFromSubtagIds( subtag_ids, job_key = job_key )
else:
tag_ids = self._GetTagIdsFromNamespaceIdsSubtagIds( namespace_ids, subtag_ids, job_key = job_key )
@ -7351,9 +7399,22 @@ class DB( HydrusDB.HydrusDB ):
for sibling_tag_service_id in sibling_tag_service_ids:
ideal_tag_ids = self._CacheTagSiblingsGetIdeals( ClientTags.TAG_DISPLAY_ACTUAL, sibling_tag_service_id, tag_ids )
seen_ideal_tag_ids = set()
final_tag_ids.update( self._CacheTagSiblingsGetChainsMembersFromIdeals( ClientTags.TAG_DISPLAY_ACTUAL, sibling_tag_service_id, ideal_tag_ids ) )
for batch_of_tag_ids in HydrusData.SplitIteratorIntoChunks( tag_ids, 10240 ):
if job_key is not None and job_key.IsCancelled():
return set()
ideal_tag_ids = self._CacheTagSiblingsGetIdeals( ClientTags.TAG_DISPLAY_ACTUAL, sibling_tag_service_id, batch_of_tag_ids )
ideal_tag_ids.difference_update( seen_ideal_tag_ids )
seen_ideal_tag_ids.update( ideal_tag_ids )
final_tag_ids.update( self._CacheTagSiblingsGetChainsMembersFromIdeals( ClientTags.TAG_DISPLAY_ACTUAL, sibling_tag_service_id, ideal_tag_ids ) )
return final_tag_ids
@ -7972,6 +8033,76 @@ class DB( HydrusDB.HydrusDB ):
return hash_ids
def _GetHashIdsAndNonZeroTagCounts( self, tag_display_type: int, file_service_key, tag_search_context: ClientSearch.TagSearchContext, hash_ids_table_name, namespace_wildcard = None, job_key = None ):
if namespace_wildcard == '*':
namespace_wildcard = None
if namespace_wildcard is None:
namespace_ids = []
else:
namespace_ids = self._GetNamespaceIdsFromWildcard( namespace_wildcard )
with HydrusDB.TemporaryIntegerTable( self._c, namespace_ids, 'namespace_id' ) as temp_namespace_ids_table_name:
# reason why I JOIN each table rather than join just the UNION is based on previous hell with having query planner figure out a "( UNION ALL ) NATURAL JOIN stuff" situation
# although this sometimes makes certifiable 2KB ( 6 UNION * 4-table ) queries, it actually works fast
table_names = self._GetMappingTables( tag_display_type, file_service_key, tag_search_context )
if namespace_wildcard is None:
# temp hashes to mappings
select_statements = [ 'SELECT hash_id, tag_id FROM {} CROSS JOIN {} USING ( hash_id )'.format( hash_ids_table_name, table_name ) for table_name in table_names ]
else:
# temp hashes to mappings to tags to namespaces
select_statements = [ 'SELECT hash_id, tag_id FROM {} CROSS JOIN {} USING ( hash_id ) CROSS JOIN tags USING ( tag_id ) CROSS JOIN {} USING ( namespace_id )'.format( hash_ids_table_name, table_name, temp_namespace_ids_table_name ) for table_name in table_names ]
union_all = '( {} )'.format( ' UNION ALL '.join( select_statements ) )
query = 'SELECT hash_id, COUNT( DISTINCT tag_id ) FROM {} GROUP BY hash_id;'.format( union_all )
results = []
cursor = self._c.execute( query )
cancelled_hook = None
if job_key is not None:
cancelled_hook = job_key.IsCancelled
results = HydrusDB.ReadFromCancellableCursor( cursor, 256, cancelled_hook = cancelled_hook )
return results
# old method, now we are going full meme
'''
if len( table_names ) == 0:
return []
table_union_to_select_from = '( ' + ' UNION ALL '.join( ( 'SELECT * FROM ' + table_name for table_name in table_names ) ) + ' )'
select_statement = 'SELECT hash_id, COUNT( DISTINCT tag_id ) FROM ' + table_union_to_select_from + ' WHERE hash_id = ?;'
hash_id_tag_counts = list( self._ExecuteManySelectSingleParam( select_statement, hash_ids ) )
return hash_id_tag_counts
'''
def _GetHashIdsFromFileViewingStatistics( self, view_type, viewing_locations, operator, viewing_value ):
# only works for positive values like '> 5'. won't work for '= 0' or '< 1' since those are absent from the table
@ -8615,7 +8746,7 @@ class DB( HydrusDB.HydrusDB ):
if query_hash_ids is None or ( is_inbox and len( query_hash_ids ) == len( self._inbox_hash_ids ) ):
namespace_query_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, namespace = namespace, job_key = job_key )
namespace_query_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, namespace_wildcard = namespace, job_key = job_key )
else:
@ -8623,7 +8754,7 @@ class DB( HydrusDB.HydrusDB ):
self._AnalyzeTempTable( temp_table_name )
namespace_query_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, namespace = namespace, hash_ids_table_name = temp_table_name, job_key = job_key )
namespace_query_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, namespace_wildcard = namespace, hash_ids_table_name = temp_table_name, job_key = job_key )
@ -8803,7 +8934,7 @@ class DB( HydrusDB.HydrusDB ):
self._AnalyzeTempTable( temp_table_name )
unwanted_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, namespace = namespace, hash_ids_table_name = temp_table_name, job_key = job_key )
unwanted_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, namespace_wildcard = namespace, hash_ids_table_name = temp_table_name, job_key = job_key )
query_hash_ids.difference_update( unwanted_hash_ids )
@ -9089,7 +9220,7 @@ class DB( HydrusDB.HydrusDB ):
if is_zero or is_anything_but_zero:
nonzero_tag_query_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, hash_ids_table_name = temp_table_name, namespace = namespace, job_key = job_key )
nonzero_tag_query_hash_ids = self._GetHashIdsThatHaveTags( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, hash_ids_table_name = temp_table_name, namespace_wildcard = namespace, job_key = job_key )
nonzero_tag_query_hash_ids_populated = True
if is_zero:
@ -9105,7 +9236,7 @@ class DB( HydrusDB.HydrusDB ):
if len( specific_number_tests ) > 0:
hash_id_tag_counts = self._GetHashIdsNonZeroTagCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, temp_table_name, namespace = namespace, job_key = job_key )
hash_id_tag_counts = self._GetHashIdsAndNonZeroTagCounts( ClientTags.TAG_DISPLAY_ACTUAL, file_service_key, tag_search_context, temp_table_name, namespace_wildcard = namespace, job_key = job_key )
good_tag_count_hash_ids = { hash_id for ( hash_id, count ) in hash_id_tag_counts if megalambda( count ) }
@ -9372,6 +9503,7 @@ class DB( HydrusDB.HydrusDB ):
for ( hash_id, url ) in self._c.execute( select ):
# this is actually insufficient, as more detailed url classes may match
if hash_id not in result_hash_ids and url_class.Matches( url ):
result_hash_ids.add( hash_id )
@ -9442,6 +9574,21 @@ class DB( HydrusDB.HydrusDB ):
( namespace_wildcard, subtag_wildcard ) = HydrusTags.SplitTag( wildcard )
if namespace_wildcard == '*':
namespace_wildcard = ''
if subtag_wildcard == '*':
if namespace_wildcard == '':
namespace_wildcard = None
return self._GetHashIdsThatHaveTags( tag_display_type, file_service_key, tag_search_context, namespace_wildcard = namespace_wildcard, hash_ids_table_name = hash_ids_table_name, job_key = job_key )
possible_subtag_ids = self._GetSubtagIdsFromWildcard( subtag_wildcard, job_key = job_key )
if namespace_wildcard != '':
@ -9456,80 +9603,20 @@ class DB( HydrusDB.HydrusDB ):
def _GetHashIdsNonZeroTagCounts( self, tag_display_type: int, file_service_key, tag_search_context: ClientSearch.TagSearchContext, hash_ids_table_name, namespace = None, job_key = None ):
def _GetHashIdsThatHaveTags( self, tag_display_type: int, file_service_key, tag_search_context: ClientSearch.TagSearchContext, namespace_wildcard = None, hash_ids_table_name = None, job_key = None ):
if namespace is None:
if namespace_wildcard == '*':
namespace_wildcard = None
if namespace_wildcard is None:
namespace_ids = []
else:
namespace_ids = self._GetNamespaceIdsFromWildcard( namespace )
with HydrusDB.TemporaryIntegerTable( self._c, namespace_ids, 'namespace_id' ) as temp_namespace_ids_table_name:
# reason why I JOIN each table rather than join just the UNION is based on previous hell with having query planner figure out a "( UNION ALL ) NATURAL JOIN stuff" situation
# although this sometimes makes certifiable 2KB ( 6 UNION * 4-table ) queries, it actually works fast
table_names = self._GetMappingTables( tag_display_type, file_service_key, tag_search_context )
if namespace is None:
# temp hashes to mappings
select_statements = [ 'SELECT hash_id, tag_id FROM {} CROSS JOIN {} USING ( hash_id )'.format( hash_ids_table_name, table_name ) for table_name in table_names ]
else:
# temp hashes to mappings to tags to namespaces
select_statements = [ 'SELECT hash_id, tag_id FROM {} CROSS JOIN {} USING ( hash_id ) CROSS JOIN tags USING ( tag_id ) CROSS JOIN {} USING ( namespace_id )'.format( hash_ids_table_name, table_name, temp_namespace_ids_table_name ) for table_name in table_names ]
union_all = '( {} )'.format( ' UNION ALL '.join( select_statements ) )
query = 'SELECT hash_id, COUNT( DISTINCT tag_id ) FROM {} GROUP BY hash_id;'.format( union_all )
results = []
cursor = self._c.execute( query )
cancelled_hook = None
if job_key is not None:
cancelled_hook = job_key.IsCancelled
results = HydrusDB.ReadFromCancellableCursor( cursor, 256, cancelled_hook = cancelled_hook )
return results
# old method, now we are going full meme
'''
if len( table_names ) == 0:
return []
table_union_to_select_from = '( ' + ' UNION ALL '.join( ( 'SELECT * FROM ' + table_name for table_name in table_names ) ) + ' )'
select_statement = 'SELECT hash_id, COUNT( DISTINCT tag_id ) FROM ' + table_union_to_select_from + ' WHERE hash_id = ?;'
hash_id_tag_counts = list( self._ExecuteManySelectSingleParam( select_statement, hash_ids ) )
return hash_id_tag_counts
'''
def _GetHashIdsThatHaveTags( self, tag_display_type: int, file_service_key, tag_search_context: ClientSearch.TagSearchContext, namespace = None, hash_ids_table_name = None, job_key = None ):
if namespace is None:
namespace_ids = []
else:
namespace_ids = self._GetNamespaceIdsFromWildcard( namespace )
namespace_ids = self._GetNamespaceIdsFromWildcard( namespace_wildcard )
with HydrusDB.TemporaryIntegerTable( self._c, namespace_ids, 'namespace_id' ) as temp_namespace_ids_table_name:
@ -9538,7 +9625,7 @@ class DB( HydrusDB.HydrusDB ):
if hash_ids_table_name is None:
if namespace is None:
if namespace_wildcard is None:
# hellmode
queries = [ 'SELECT DISTINCT hash_id FROM {};'.format( table_name ) for table_name in table_names ]
@ -9551,7 +9638,7 @@ class DB( HydrusDB.HydrusDB ):
else:
if namespace is None:
if namespace_wildcard is None:
queries = [ 'SELECT hash_id FROM {} WHERE EXISTS ( SELECT 1 FROM {} WHERE {}.hash_id = {}.hash_id );'.format( hash_ids_table_name, table_name, table_name, hash_ids_table_name ) for table_name in table_names ]
@ -10330,7 +10417,11 @@ class DB( HydrusDB.HydrusDB ):
def _GetNamespaceIdsFromWildcard( self, namespace_wildcard ):
if '*' in namespace_wildcard:
if namespace_wildcard == '*':
return self._STL( self._c.execute( 'SELECT namespace_id FROM namespaces;' ) )
elif '*' in namespace_wildcard:
like_param = ConvertWildcardToSQLiteLikeParameter( namespace_wildcard )
@ -11251,7 +11342,12 @@ class DB( HydrusDB.HydrusDB ):
if '*' in subtag_wildcard:
if ClientSearch.IsComplexWildcard( subtag_wildcard ):
if subtag_wildcard == '*':
# hellmode, but shouldn't be called, fingers crossed
cursor = self._c.execute( 'SELECT subtag_id FROM subtags;' )
elif ClientSearch.IsComplexWildcard( subtag_wildcard ):
# we search the 'searchable subtags', then link the various mappings back to real tags
@ -11316,11 +11412,18 @@ class DB( HydrusDB.HydrusDB ):
def _GetTagId( self, tag ):
tag = HydrusTags.CleanTag( tag )
clean_tag = HydrusTags.CleanTag( tag )
HydrusTags.CheckTagNotEmpty( tag )
try:
HydrusTags.CheckTagNotEmpty( clean_tag )
except HydrusExceptions.TagSizeException:
raise HydrusExceptions.TagSizeException( '"{}" tag seems not valid--when cleaned, it ends up with zero size!'.format( tag ) )
( namespace, subtag ) = HydrusTags.SplitTag( tag )
( namespace, subtag ) = HydrusTags.SplitTag( clean_tag )
namespace_id = self._GetNamespaceId( namespace )
subtag_id = self._GetSubtagId( subtag )
@ -11341,6 +11444,26 @@ class DB( HydrusDB.HydrusDB ):
return tag_id
def _GetTagIdsFromNamespaceIds( self, namespace_ids: typing.Collection[ int ], job_key = None ):
with HydrusDB.TemporaryIntegerTable( self._c, namespace_ids, 'namespace_id' ) as temp_namespace_ids_table_name:
# temp namespaces to tags
cursor = self._c.execute( 'SELECT DISTINCT tag_id FROM {} CROSS JOIN tags USING ( namespace_id );'.format( temp_namespace_ids_table_name ) )
cancelled_hook = None
if job_key is not None:
cancelled_hook = job_key.IsCancelled
tag_ids = self._STS( HydrusDB.ReadFromCancellableCursor( cursor, 128, cancelled_hook = cancelled_hook ) )
return tag_ids
def _GetTagIdsFromNamespaceIdsSubtagIds( self, namespace_ids: typing.Collection[ int ], subtag_ids: typing.Collection[ int ], job_key = None ):
with HydrusDB.TemporaryIntegerTable( self._c, subtag_ids, 'subtag_id' ) as temp_subtag_ids_table_name:
@ -12829,6 +12952,7 @@ class DB( HydrusDB.HydrusDB ):
self._FileMaintenanceAddJobs( ( hash_id, ), ClientFiles.REGENERATE_FILE_DATA_JOB_SIMILAR_FILES_METADATA )
def _PHashesEnsureFileOutOfSystem( self, hash_id ):
self._DuplicatesRemoveMediaIdMember( hash_id )
@ -15108,6 +15232,8 @@ class DB( HydrusDB.HydrusDB ):
def _RepairDB( self ):
self._controller.frame_splash_status.SetText( 'checking database for faults' )
( version, ) = self._c.execute( 'SELECT version FROM version;' ).fetchone()
HydrusDB.HydrusDB._RepairDB( self )
@ -15174,11 +15300,12 @@ class DB( HydrusDB.HydrusDB ):
BlockingSafeShowMessage( message )
self._c.execute( 'CREATE TABLE external_master.local_hashes ( hash_id INTEGER PRIMARY KEY, md5 BLOB_BYTES, sha1 BLOB_BYTES, sha512 BLOB_BYTES );' )
self._CreateIndex( 'external_master.local_hashes', [ 'md5' ] )
self._CreateIndex( 'external_master.local_hashes', [ 'sha1' ] )
self._CreateIndex( 'external_master.local_hashes', [ 'sha512' ] )
self._CreateIndex( 'external_master.local_hashes', [ 'md5' ] )
self._CreateIndex( 'external_master.local_hashes', [ 'sha1' ] )
self._CreateIndex( 'external_master.local_hashes', [ 'sha512' ] )
# mappings
existing_mapping_tables = self._STS( self._c.execute( 'SELECT name FROM external_mappings.sqlite_master WHERE type = ?;', ( 'table', ) ) )
@ -15280,6 +15407,9 @@ class DB( HydrusDB.HydrusDB ):
self._CacheTagSiblingsGenerate( tag_service_id )
self._CreateIndex( cache_actual_tag_siblings_lookup_table_name, [ 'ideal_tag_id' ] )
self._CreateIndex( cache_ideal_tag_siblings_lookup_table_name, [ 'ideal_tag_id' ] )
if len( missing_tag_sibling_cache_tables ) > 0:
@ -15321,6 +15451,9 @@ class DB( HydrusDB.HydrusDB ):
self._CacheTagParentsGenerate( tag_service_id )
self._CreateIndex( cache_actual_tag_parents_lookup_table_name, [ 'ancestor_tag_id' ] )
self._CreateIndex( cache_ideal_tag_parents_lookup_table_name, [ 'ancestor_tag_id' ] )
if len( missing_tag_parent_cache_tables ) > 0:
@ -15382,6 +15515,23 @@ class DB( HydrusDB.HydrusDB ):
self._RegenerateTagMappingsCache()
for ( file_service_id, tag_service_id ) in itertools.product( file_service_ids, tag_service_ids ):
( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name ) = GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id )
self._CreateIndex( cache_current_mappings_table_name, [ 'tag_id', 'hash_id' ], unique = True )
self._CreateIndex( cache_deleted_mappings_table_name, [ 'tag_id', 'hash_id' ], unique = True )
self._CreateIndex( cache_pending_mappings_table_name, [ 'tag_id', 'hash_id' ], unique = True )
if version >= 408:
( cache_display_current_mappings_table_name, cache_display_pending_mappings_table_name ) = GenerateSpecificDisplayMappingsCacheTableNames( file_service_id, tag_service_id )
self._CreateIndex( cache_display_current_mappings_table_name, [ 'tag_id', 'hash_id' ], unique = True )
self._CreateIndex( cache_display_pending_mappings_table_name, [ 'tag_id', 'hash_id' ], unique = True )
#
new_options = self._GetJSONDump( HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_OPTIONS )
@ -15466,6 +15616,8 @@ class DB( HydrusDB.HydrusDB ):
job_key.SetVariable( 'popup_text_1', message )
# now find an entirely new namespace_id, subtag_id pair for this tag
existing_tags = set()
potential_new_cleaned_tag = cleaned_tag
@ -16111,7 +16263,14 @@ class DB( HydrusDB.HydrusDB ):
def _TagExists( self, tag ):
tag = HydrusTags.CleanTag( tag )
try:
tag = HydrusTags.CleanTag( tag )
except:
return False
try:

View File

@ -68,6 +68,11 @@ class DuplicatesManager( object ):
def NotifyNewPotentialsSearchNumbers( self ):
HG.client_controller.pub( 'new_similar_files_potentials_search_numbers' )
def StartPotentialsSearch( self ):
with self._lock:
@ -186,6 +191,7 @@ class DuplicatesManager( object ):
self.RefreshMaintenanceNumbers()
self.NotifyNewPotentialsSearchNumbers()

View File

@ -2,6 +2,7 @@ import base64
import bs4
import calendar
import collections
import html
import json
import os
import re
@ -80,7 +81,20 @@ def ConvertParseResultToPrettyString( result ):
elif content_type == HC.CONTENT_TYPE_HASH:
return additional_info + ' hash: ' + parsed_text.hex()
( hash_type, hash_encoding ) = additional_info
try:
hash = GetHashFromParsedText( hash_encoding, parsed_text )
parsed_text = hash.hex()
except Exception as e:
parsed_text = 'Could not decode a hash from {}: {}'.format( parsed_text, str( e ) )
return '{} hash: {}'.format( hash_type, parsed_text )
elif content_type == HC.CONTENT_TYPE_TIMESTAMP:
@ -166,18 +180,9 @@ def ConvertParsableContentToPrettyString( parsable_content, include_veto = False
elif content_type == HC.CONTENT_TYPE_HASH:
if len( additional_infos ) == 1:
( hash_type, ) = additional_infos
pretty_strings.append( 'hash: ' + hash_type )
else:
hash_types = sorted( additional_infos )
pretty_strings.append( 'hashes: ' + ', '.join( hash_types ) )
s = 'hash: {}'.format( ', '.join( ( '{} in {}'.format( hash_type, hash_encoding ) for ( hash_type, hash_encoding ) in additional_infos ) ) )
pretty_strings.append( s )
elif content_type == HC.CONTENT_TYPE_TIMESTAMP:
@ -244,15 +249,67 @@ def GetChildrenContent( job_key, children, parsing_text, referral_url ):
return content
def GetHashFromParsedText( hash_encoding, parsed_text ) -> bytes:
encodings_to_attempt = []
if hash_encoding == 'hex':
encodings_to_attempt = [ 'hex', 'base64' ]
elif hash_encoding == 'base64':
encodings_to_attempt = [ 'base64' ]
main_error_text = None
for encoding_to_attempt in encodings_to_attempt:
try:
if encoding_to_attempt == 'hex':
return bytes.fromhex( parsed_text )
elif encoding_to_attempt == 'base64':
return base64.b64decode( parsed_text )
except Exception as e:
if main_error_text is None:
main_error_text = str( e )
continue
raise Exception( 'Could not decode hash: {}'.format( main_error_text ) )
def GetHashesFromParseResults( results ):
hash_results = []
for ( ( name, content_type, additional_info ), parsed_bytes ) in results:
for ( ( name, content_type, additional_info ), parsed_text ) in results:
if content_type == HC.CONTENT_TYPE_HASH:
hash_results.append( ( additional_info, parsed_bytes ) )
( hash_type, hash_encoding ) = additional_info
try:
hash = GetHashFromParsedText( hash_encoding, parsed_text )
except:
continue
hash_results.append( ( hash_type, hash ) )
@ -1609,7 +1666,7 @@ class ParseFormulaJSON( ParseFormula ):
elif self._content_to_fetch == JSON_CONTENT_JSON:
raw_text = json.dumps( root )
raw_text = json.dumps( root, ensure_ascii = False )
raw_texts.append( raw_text )
@ -1820,7 +1877,7 @@ class ContentParser( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_CONTENT_PARSER
SERIALISABLE_NAME = 'Content Parser'
SERIALISABLE_VERSION = 4
SERIALISABLE_VERSION = 5
def __init__( self, name = None, content_type = None, formula = None, sort_type = CONTENT_PARSER_SORT_TYPE_NONE, sort_asc = False, additional_info = None ):
@ -1968,6 +2025,29 @@ class ContentParser( HydrusSerialisable.SerialisableBase ):
return ( 4, new_serialisable_info )
if version == 4:
( name, content_type, serialisable_formula, sort_type, sort_asc, additional_info ) = old_serialisable_info
if content_type == HC.CONTENT_TYPE_HASH:
hash_encoding = 'hex'
if '"base64"' in json.dumps( serialisable_formula ): # lmao, top code
hash_encoding = 'base64'
hash_type = additional_info
additional_info = ( hash_type, hash_encoding )
new_serialisable_info = ( name, content_type, serialisable_formula, sort_type, sort_asc, additional_info )
return ( 5, new_serialisable_info )
def GetName( self ):
@ -1994,18 +2074,6 @@ class ContentParser( HydrusSerialisable.SerialisableBase ):
raise e
# let's just make sure the user did their decoding-from-hex right
if self._content_type == HC.CONTENT_TYPE_HASH:
we_want = bytes
else:
we_want = str
parsed_texts = [ parsed_text for parsed_text in parsed_texts if isinstance( parsed_text, we_want ) ]
if self._sort_type == CONTENT_PARSER_SORT_TYPE_LEXICOGRAPHIC:
parsed_texts.sort( reverse = not self._sort_asc )
@ -3133,23 +3201,37 @@ class StringConverter( StringProcessingStep ):
s = urllib.parse.quote( s, safe = '' )
elif encode_type == 'unicode escape characters':
s = s.encode( 'unicode-escape' ).decode( 'utf-8' )
elif encode_type == 'html entities':
s = html.escape( s )
else:
# due to py3, this is now a bit of a pain
# _for now_, let's convert to bytes and then spit out a str
# _for now_, let's convert to bytes if not already and then spit out a str
if isinstance( s, str ):
s = bytes( s, 'utf-8' )
s_bytes = bytes( s, 'utf-8' )
else:
s_bytes = s
if encode_type == 'hex':
s = s.hex()
s = s_bytes.hex()
elif encode_type == 'base64':
s = str( base64.b64encode( s ), 'utf-8' )
s_bytes = base64.b64encode( s_bytes )
s = str( s_bytes, 'utf-8' )
@ -3161,21 +3243,16 @@ class StringConverter( StringProcessingStep ):
s = urllib.parse.unquote( s )
else:
elif encode_type == 'unicode escape characters':
# due to py3, this is now a bit of a pain
# as this is mostly used for hash stuff, _for now_, let's spit out a **bytes**
# the higher up object will have responsibility for coercing to str if needed
s = s.encode( 'utf-8' ).decode( 'unicode-escape' )
if encode_type == 'hex':
s = bytes.fromhex( s )
elif encode_type == 'base64':
s = base64.b64decode( s )
elif encode_type == 'html entities':
s = html.unescape( s )
# the old 'hex' and 'base64' are now deprecated, no-ops
elif conversion_type == STRING_CONVERSION_REVERSE:
@ -3339,6 +3416,11 @@ class StringConverter( StringProcessingStep ):
elif conversion_type == STRING_CONVERSION_DECODE:
if data in ( 'hex', 'base64' ):
return 'deprecated {} decode, now a no-op, can be deleted'.format( data )
return 'decode from ' + data
elif conversion_type == STRING_CONVERSION_REVERSE:

View File

@ -2811,7 +2811,7 @@ class PredicateResultsCache( object ):
self._predicates = list( predicates )
def CanServeTagResults( self, strict_search_text: str, exact_match: bool ):
def CanServeTagResults( self, parsed_autocomplete_text: ParsedAutocompleteText, exact_match: bool ):
return False
@ -2855,7 +2855,9 @@ class PredicateResultsCacheTag( PredicateResultsCache ):
self._exact_match = exact_match
def CanServeTagResults( self, strict_search_text: str, exact_match: bool ):
def CanServeTagResults( self, parsed_autocomplete_text: ParsedAutocompleteText, exact_match: bool ):
strict_search_text = parsed_autocomplete_text.GetSearchText( False )
if self._exact_match:
@ -2870,20 +2872,57 @@ class PredicateResultsCacheTag( PredicateResultsCache ):
else:
# a cache for 'cha' is probably invalid for 'character:samus aran'
tag_autocomplete_options = parsed_autocomplete_text.GetTagAutocompleteOptions()
( strict_search_text_namespace, strict_search_text_subtag ) = HydrusTags.SplitTag( strict_search_text )
if strict_search_text_namespace == self._strict_search_text_namespace:
#
if SearchTextIsFetchAll( self._strict_search_text ):
# != '', because a cache for 'character:' probably can't match a search for 'character:samus aran'
# if '*' searches are ok, we should have all results
return tag_autocomplete_options.FetchAllAllowed()
return self._strict_search_text_subtag != '' and strict_search_text_subtag.startswith( self._strict_search_text_subtag )
#
subtag_to_namespace_search = self._strict_search_text_namespace == '' and self._strict_search_text_subtag != '' and strict_search_text_namespace != ''
if subtag_to_namespace_search:
else:
# if a user searches 'char*' and then later 'character:samus*', we may have the results
# namespace changed, so if we do not satisfy this slim case, we can't provide any results
we_searched_namespace_as_subtag = strict_search_text_namespace.startswith( self._strict_search_text_subtag )
return we_searched_namespace_as_subtag and tag_autocomplete_options.SearchNamespacesIntoFullTags()
#
if self._strict_search_text_namespace != strict_search_text_namespace:
return False
#
# if user searched 'character:' or 'character:*', we may have the results
# if we do, we have all possible results
if SearchTextIsNamespaceBareFetchAll( self._strict_search_text ):
return tag_autocomplete_options.NamespaceBareFetchAllAllowed()
if SearchTextIsNamespaceFetchAll( self._strict_search_text ):
return tag_autocomplete_options.NamespaceFetchAllAllowed()
#
# 'sam' will match 'samus', character:sam will match character:samus
return strict_search_text_subtag.startswith( self._strict_search_text_subtag )

View File

@ -422,7 +422,7 @@ class FileRWLock( object ):
# if there are no writers, we can start reading
if self.parent.num_waiting_writers == 0:
if not self.parent.there_is_an_active_writer and self.parent.num_waiting_writers == 0:
self.parent.num_readers += 1
@ -444,10 +444,10 @@ class FileRWLock( object ):
self.parent.num_readers -= 1
do_notify = self.parent.num_readers == 0
do_write_notify = self.parent.num_readers == 0 and self.parent.num_waiting_writers > 0
if do_notify:
if do_write_notify:
self.parent.write_available_event.set()
@ -476,7 +476,9 @@ class FileRWLock( object ):
# if nothing reading or writing atm, sieze the opportunity
if self.parent.num_readers == 0 and not self.parent.there_is_an_active_writer:
if not self.parent.there_is_an_active_writer and self.parent.num_readers == 0:
self.parent.num_waiting_writers -= 1
self.parent.there_is_an_active_writer = True
@ -498,8 +500,6 @@ class FileRWLock( object ):
self.parent.there_is_an_active_writer = False
self.parent.num_waiting_writers -= 1
do_read_notify = self.parent.num_waiting_writers == 0 # reading is now available
do_write_notify = self.parent.num_waiting_writers > 0 # another writer is waiting
@ -531,6 +531,30 @@ class FileRWLock( object ):
self.there_is_an_active_writer = False
def IsLocked( self ):
with self.lock:
return self.num_waiting_writers > 0 or self.there_is_an_active_writer or self.num_readers > 0
def ReadersAreWorking( self ):
with self.lock:
return self.num_readers > 0
def WritersAreWaitingOrWorking( self ):
with self.lock:
return self.num_waiting_writers > 0 or self.there_is_an_active_writer
class QtAwareJob( HydrusThreading.SingleJob ):
def __init__( self, controller, scheduler, window, initial_delay, work_callable ):

View File

@ -2951,6 +2951,8 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
if len( self._processed_pairs ) == 0:
HG.client_controller.pub( 'new_similar_files_potentials_search_numbers' )
QW.QMessageBox.critical( self, 'Error', 'Due to an unexpected series of events (likely a series of file deletes), the duplicate filter has no valid pair to back up to. It will now close.' )
self.window().deleteLater()
@ -3027,6 +3029,8 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
if len( self._processed_pairs ) == 0:
HG.client_controller.pub( 'new_similar_files_potentials_search_numbers' )
QW.QMessageBox.critical( self, 'Error', 'It seems an entire batch of pairs were unable to be displayed. The duplicate filter will now close.' )
self.window().deleteLater()
@ -3054,6 +3058,8 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
self._CommitProcessed( blocking = True )
HG.client_controller.pub( 'new_similar_files_potentials_search_numbers' )
self._TryToCloseWindow()
return
@ -3130,7 +3136,7 @@ class CanvasFilterDuplicates( CanvasWithHovers ):
def CleanBeforeDestroy( self ):
ClientDuplicates.DuplicatesManager.instance().RefreshMaintenanceNumbers()
HG.client_controller.pub( 'new_similar_files_potentials_search_numbers' )
ClientMedia.hashes_to_jpeg_quality = {} # clear the cache
ClientMedia.hashes_to_pixel_hashes = {} # clear the cache

View File

@ -534,6 +534,8 @@ class DialogManageUPnP( ClientGUIDialogs.Dialog ):
QP.CallAfter( QW.QMessageBox.critical, self, 'Error', 'Could not load mappings:'+os.linesep*2+str(e) )
self._status_st.setText( str( e ) )
return

View File

@ -1133,6 +1133,7 @@ class ManagementPanelDuplicateFilter( ManagementPanel ):
self.widget().setLayout( vbox )
self._controller.sub( self, 'NotifyNewMaintenanceNumbers', 'new_similar_files_maintenance_numbers' )
self._controller.sub( self, 'NotifyNewPotentialsSearchNumbers', 'new_similar_files_potentials_search_numbers' )
self._tag_autocomplete.searchChanged.connect( self.SearchChanged )
self._search_distance_spinctrl.valueChanged.connect( self.EventSearchDistanceChanged )
@ -1426,6 +1427,11 @@ class ManagementPanelDuplicateFilter( ManagementPanel ):
self._maintenance_numbers_need_redrawing = True
def NotifyNewPotentialsSearchNumbers( self ):
self._dupe_count_numbers_dirty = True
def PageHidden( self ):
ManagementPanel.PageHidden( self )

View File

@ -111,7 +111,8 @@ def CopyMediaURLClassURLs( medias, url_class ):
for url in media_urls:
if url_class.Matches( url ):
# can't do 'url_class.matches', as it will match too many
if HG.client_controller.network_engine.domain_manager.GetURLClass( url ) == url_class:
urls.add( url )
@ -324,7 +325,8 @@ def OpenMediaURLClassURLs( medias, url_class ):
for url in media_urls:
if url_class.Matches( url ):
# can't do 'url_class.matches', as it will match too many
if HG.client_controller.network_engine.domain_manager.GetURLClass( url ) == url_class:
urls.add( url )

View File

@ -1802,7 +1802,7 @@ class EditContentParserPanel( ClientGUIScrolledPanels.EditPanel ):
for permitted_content_type in permitted_content_types:
self._content_type.addItem( types_to_str[ permitted_content_type], permitted_content_type )
self._content_type.addItem( types_to_str[ permitted_content_type ], permitted_content_type )
self._content_type.currentIndexChanged.connect( self.EventContentTypeChange )
@ -1832,6 +1832,13 @@ class EditContentParserPanel( ClientGUIScrolledPanels.EditPanel ):
self._hash_type.addItem( hash_type, hash_type )
self._hash_encoding = ClientGUICommon.BetterChoice( self._hash_panel )
for hash_encoding in ( 'hex', 'base64' ):
self._hash_encoding.addItem( hash_encoding, hash_encoding )
self._timestamp_panel = QW.QWidget( self._content_panel )
self._timestamp_type = ClientGUICommon.BetterChoice( self._timestamp_panel )
@ -1890,9 +1897,10 @@ class EditContentParserPanel( ClientGUIScrolledPanels.EditPanel ):
elif content_type == HC.CONTENT_TYPE_HASH:
hash_type = additional_info
( hash_type, hash_encoding ) = additional_info
self._hash_type.SetValue( hash_type )
self._hash_encoding.SetValue( hash_encoding )
elif content_type == HC.CONTENT_TYPE_TIMESTAMP:
@ -1949,6 +1957,7 @@ class EditContentParserPanel( ClientGUIScrolledPanels.EditPanel ):
rows = []
rows.append( ( 'hash type: ', self._hash_type ) )
rows.append( ( 'hash encoding: ', self._hash_encoding ) )
gridbox = ClientGUICommon.WrapInGrid( self._hash_panel, rows )
@ -2141,8 +2150,9 @@ class EditContentParserPanel( ClientGUIScrolledPanels.EditPanel ):
elif content_type == HC.CONTENT_TYPE_HASH:
hash_type = self._hash_type.GetValue()
hash_encoding = self._hash_encoding.GetValue()
additional_info = hash_type
additional_info = ( hash_type, hash_encoding )
elif content_type == HC.CONTENT_TYPE_TIMESTAMP:

View File

@ -2126,6 +2126,11 @@ class ReviewDownloaderImport( ClientGUIScrolledPanels.ReviewPanel ):
if len( new_gugs ) > 0:
for gug in new_gugs:
gug.RegenerateGUGKey()
domain_manager.AddGUGs( new_gugs )

View File

@ -189,6 +189,11 @@ class FrameSplashStatus( object ):
def SetSubtext( self, text ):
if HG.boot_debug and self._updater is not None and len( text ) > 0:
HydrusData.Print( text )
with self._lock:
self._status_subtext = text

View File

@ -399,17 +399,23 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
self._data_text = QW.QLineEdit( self._control_panel )
self._data_number = QP.MakeQSpinBox( self._control_panel, min=0, max=65535 )
self._data_encoding = ClientGUICommon.BetterChoice( self._control_panel )
self._data_decoding = ClientGUICommon.BetterChoice( self._control_panel )
self._data_regex_repl = QW.QLineEdit( self._control_panel )
self._data_date_link = ClientGUICommon.BetterHyperLink( self._control_panel, 'link to date info', 'https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior' )
self._data_timezone_decode = ClientGUICommon.BetterChoice( self._control_panel )
self._data_timezone_encode = ClientGUICommon.BetterChoice( self._control_panel )
self._data_timezone_offset = QP.MakeQSpinBox( self._control_panel, min=-86400, max=86400 )
for e in ( 'hex', 'base64', 'url percent encoding' ):
for e in ( 'hex', 'base64', 'url percent encoding', 'unicode escape characters', 'html entities' ):
self._data_encoding.addItem( e, e )
for e in ( 'url percent encoding', 'unicode escape characters', 'html entities' ):
self._data_decoding.addItem( e, e )
self._data_timezone_decode.addItem( 'UTC', HC.TIMEZONE_GMT )
self._data_timezone_decode.addItem( 'Local', HC.TIMEZONE_LOCAL )
self._data_timezone_decode.addItem( 'Offset', HC.TIMEZONE_OFFSET )
@ -451,10 +457,14 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
#
if conversion_type in ( ClientParsing.STRING_CONVERSION_DECODE, ClientParsing.STRING_CONVERSION_ENCODE ):
if conversion_type == ClientParsing.STRING_CONVERSION_ENCODE:
self._data_encoding.SetValue( data )
elif conversion_type == ClientParsing.STRING_CONVERSION_DECODE:
self._data_decoding.SetValue( data )
elif conversion_type == ClientParsing.STRING_CONVERSION_REGEX_SUB:
( pattern, repl ) = data
@ -497,7 +507,8 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
self._data_text_label = ClientGUICommon.BetterStaticText( self, 'string data: ' )
self._data_number_label = ClientGUICommon.BetterStaticText( self, 'number data: ' )
self._data_encoding_label = ClientGUICommon.BetterStaticText( self, 'encoding data: ' )
self._data_encoding_label = ClientGUICommon.BetterStaticText( self, 'encoding type: ' )
self._data_decoding_label = ClientGUICommon.BetterStaticText( self, 'decoding type: ' )
self._data_regex_repl_label = ClientGUICommon.BetterStaticText( self, 'regex replacement: ' )
self._data_date_link_label = ClientGUICommon.BetterStaticText( self, 'date info: ' )
self._data_timezone_decode_label = ClientGUICommon.BetterStaticText( self, 'date decode timezone: ' )
@ -508,6 +519,7 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
rows.append( ( self._data_text_label, self._data_text ) )
rows.append( ( self._data_number_label, self._data_number ) )
rows.append( ( self._data_encoding_label, self._data_encoding ) )
rows.append( ( self._data_decoding_label, self._data_decoding ) )
rows.append( ( self._data_regex_repl_label, self._data_regex_repl ) )
rows.append( ( self._data_date_link_label, self._data_date_link ) )
rows.append( ( self._data_timezone_decode_label, self._data_timezone_decode ) )
@ -549,6 +561,7 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
self._data_text.textEdited.connect( self._UpdateExampleText )
self._data_number.valueChanged.connect( self._UpdateExampleText )
self._data_encoding.currentIndexChanged.connect( self._UpdateExampleText )
self._data_decoding.currentIndexChanged.connect( self._UpdateExampleText )
self._data_regex_repl.textEdited.connect( self._UpdateExampleText )
self._data_timezone_decode.currentIndexChanged.connect( self._UpdateExampleText )
self._data_timezone_offset.valueChanged.connect( self._UpdateExampleText )
@ -565,6 +578,7 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
self._data_text_label.setVisible( False )
self._data_number_label.setVisible( False )
self._data_encoding_label.setVisible( False )
self._data_decoding_label.setVisible( False )
self._data_regex_repl_label.setVisible( False )
self._data_date_link_label.setVisible( False )
self._data_timezone_decode_label.setVisible( False )
@ -574,6 +588,7 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
self._data_text.setVisible( False )
self._data_number.setVisible( False )
self._data_encoding.setVisible( False )
self._data_decoding.setVisible( False )
self._data_regex_repl.setVisible( False )
self._data_date_link.setVisible( False )
self._data_timezone_decode.setVisible( False )
@ -582,11 +597,16 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
conversion_type = self._conversion_type.GetValue()
if conversion_type in ( ClientParsing.STRING_CONVERSION_ENCODE, ClientParsing.STRING_CONVERSION_DECODE ):
if conversion_type == ClientParsing.STRING_CONVERSION_ENCODE:
self._data_encoding_label.setVisible( True )
self._data_encoding.setVisible( True )
elif conversion_type == ClientParsing.STRING_CONVERSION_DECODE:
self._data_decoding_label.setVisible( True )
self._data_decoding.setVisible( True )
elif conversion_type in ( ClientParsing.STRING_CONVERSION_PREPEND_TEXT, ClientParsing.STRING_CONVERSION_APPEND_TEXT, ClientParsing.STRING_CONVERSION_DATE_DECODE, ClientParsing.STRING_CONVERSION_DATE_ENCODE, ClientParsing.STRING_CONVERSION_REGEX_SUB ):
self._data_text_label.setVisible( True )
@ -708,10 +728,14 @@ class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
conversion_type = self._conversion_type.GetValue()
if conversion_type in ( ClientParsing.STRING_CONVERSION_ENCODE, ClientParsing.STRING_CONVERSION_DECODE ):
if conversion_type == ClientParsing.STRING_CONVERSION_ENCODE:
data = self._data_encoding.GetValue()
elif conversion_type == ClientParsing.STRING_CONVERSION_DECODE:
data = self._data_decoding.GetValue()
elif conversion_type in ( ClientParsing.STRING_CONVERSION_PREPEND_TEXT, ClientParsing.STRING_CONVERSION_APPEND_TEXT ):
data = self._data_text.text()

View File

@ -6,7 +6,7 @@ from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
STYLESHEET_DIR = os.path.join( HC.BASE_DIR, 'static', 'qss' )
STYLESHEET_DIR = os.path.join( HC.STATIC_DIR, 'qss' )
DEFAULT_HYDRUS_STYLESHEET = ''
ORIGINAL_STYLE_NAME = None

View File

@ -2006,10 +2006,17 @@ class ListBoxTags( ListBox ):
if isinstance( term, ClientSearch.Predicate ):
if term.GetType() in ( ClientSearch.PREDICATE_TYPE_TAG, ClientSearch.PREDICATE_TYPE_NAMESPACE, ClientSearch.PREDICATE_TYPE_WILDCARD ):
if term.GetType() in ( ClientSearch.PREDICATE_TYPE_TAG, ClientSearch.PREDICATE_TYPE_WILDCARD ):
tag = term.GetValue()
elif term.GetType() == ClientSearch.PREDICATE_TYPE_NAMESPACE:
namespace = term.GetValue()
# this is useful for workflow
tag = '{}:*'.format( namespace )
elif term.GetType() == ClientSearch.PREDICATE_TYPE_PARENT:
tag = HydrusTags.CleanTag( term.GetValue() )
@ -2083,18 +2090,9 @@ class ListBoxTags( ListBox ):
for term in self._selected_terms:
if isinstance( term, ClientSearch.Predicate ):
if term.GetType() == ClientSearch.PREDICATE_TYPE_TAG:
tag = term.GetValue()
selected_actual_tags.add( tag )
else:
tag = term
tag = self._GetTagFromTerm( term )
if tag is not None:
selected_actual_tags.add( tag )
@ -2355,7 +2353,7 @@ class ListBoxTags( ListBox ):
fallback_service_key = self._GetFallbackServiceKey()
can_launch_sibling_and_parent_dialogs = len( selected_actual_tags ) > 0 and self.can_spawn_new_windows
can_show_siblings_and_parents = len( selected_actual_tags ) == 1 and fallback_service_key != CC.COMBINED_TAG_SERVICE_KEY
can_show_siblings_and_parents = len( selected_actual_tags ) == 1
if can_show_siblings_and_parents or can_launch_sibling_and_parent_dialogs:
@ -2392,88 +2390,159 @@ class ListBoxTags( ListBox ):
def sp_work_callable():
selected_tag_to_siblings_and_parents = HG.client_controller.Read( 'tag_siblings_and_parents_lookup', fallback_service_key, ( selected_tag, ) )
selected_tag_to_service_keys_to_siblings_and_parents = HG.client_controller.Read( 'tag_siblings_and_parents_lookup', ( selected_tag, ) )
siblings_and_parents = selected_tag_to_siblings_and_parents[ selected_tag ]
service_keys_to_siblings_and_parents = selected_tag_to_service_keys_to_siblings_and_parents[ selected_tag ]
return siblings_and_parents
return service_keys_to_siblings_and_parents
def sp_publish_callable( siblings_and_parents ):
def sp_publish_callable( service_keys_to_siblings_and_parents ):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = siblings_and_parents
service_keys_in_order = HG.client_controller.services_manager.GetServiceKeys( HC.REAL_TAG_SERVICES )
if len( sibling_chain_members ) <= 1:
num_siblings = 0
num_parents = 0
num_children = 0
for ( sibling_chain_members, ideal_tag, descendants, ancestors ) in service_keys_to_siblings_and_parents.values():
num_siblings += len( sibling_chain_members ) - 1
num_parents += len( ancestors )
num_children += len( descendants )
if num_siblings == 0:
siblings_menu.setTitle( 'no siblings' )
else:
siblings_menu.setTitle( '{} siblings'.format( HydrusData.ToHumanInt( len( sibling_chain_members ) - 1 ) ) )
siblings_menu.setTitle( '{} siblings'.format( HydrusData.ToHumanInt( num_siblings ) ) )
if ideal_tag == selected_tag:
for service_key in service_keys_in_order:
ideal_label = 'this is the ideal tag'
else:
ideal_label = 'ideal: {}'.format( ideal_tag )
ClientGUIMenus.AppendMenuItem( siblings_menu, ideal_label, ideal_label, HG.client_controller.pub, 'clipboard', 'text', ideal_tag )
ClientGUIMenus.AppendSeparator( siblings_menu )
sibling_chain_members_list = list( sibling_chain_members )
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, sibling_chain_members_list )
for sibling in sibling_chain_members_list:
if sibling == ideal_tag:
if service_key not in service_keys_to_siblings_and_parents:
continue
ClientGUIMenus.AppendMenuLabel( siblings_menu, sibling )
( sibling_chain_members, ideal_tag, descendants, ancestors ) = service_keys_to_siblings_and_parents[ service_key ]
if len( sibling_chain_members ) <= 1:
continue
try:
service_name = HG.client_controller.services_manager.GetName( service_key )
except HydrusExceptions.DataMissing:
service_name = 'missing service'
ClientGUIMenus.AppendSeparator( siblings_menu )
ClientGUIMenus.AppendMenuLabel( siblings_menu, '{} ({} siblings)'.format( service_name, HydrusData.ToHumanInt( len( sibling_chain_members ) - 1 ) ) )
ClientGUIMenus.AppendSeparator( siblings_menu )
if ideal_tag == selected_tag:
ideal_label = 'this is the ideal tag'
else:
ideal_label = 'ideal: {}'.format( ideal_tag )
ClientGUIMenus.AppendMenuItem( siblings_menu, ideal_label, ideal_label, HG.client_controller.pub, 'clipboard', 'text', ideal_tag )
ClientGUIMenus.AppendSeparator( siblings_menu )
sibling_chain_members_list = list( sibling_chain_members )
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, sibling_chain_members_list )
for sibling in sibling_chain_members_list:
if sibling == ideal_tag:
continue
ClientGUIMenus.AppendMenuLabel( siblings_menu, sibling )
if len( descendants ) + len( ancestors ) == 0:
if num_parents + num_children == 0:
parents_menu.setTitle( 'no parents' )
else:
parents_menu.setTitle( '{} parents, {} children'.format( HydrusData.ToHumanInt( len( ancestors ) ), HydrusData.ToHumanInt( len( descendants ) ) ) )
parents_menu.setTitle( '{} parents, {} children'.format( HydrusData.ToHumanInt( num_parents ), HydrusData.ToHumanInt( num_children ) ) )
if len( ancestors ) > 0:
for service_key in service_keys_in_order:
ancestors_list = list( ancestors )
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, ancestors_list )
for ancestor in ancestors_list:
if service_key not in service_keys_to_siblings_and_parents:
ancestor_label = 'parent: {}'.format( ancestor )
ClientGUIMenus.AppendMenuItem( parents_menu, ancestor_label, ancestor_label, HG.client_controller.pub, 'clipboard', 'text', ancestor )
continue
if len( descendants ) > 0:
( sibling_chain_members, ideal_tag, descendants, ancestors ) = service_keys_to_siblings_and_parents[ service_key ]
if len( ancestors ) + len( descendants ) == 0:
continue
try:
service_name = HG.client_controller.services_manager.GetName( service_key )
except HydrusExceptions.DataMissing:
service_name = 'missing service'
ClientGUIMenus.AppendSeparator( parents_menu )
descendants_list = list( descendants )
ClientGUIMenus.AppendMenuLabel( parents_menu, '{} ({} parents, {} children)'.format( service_name, HydrusData.ToHumanInt( len( ancestors ) ), HydrusData.ToHumanInt( len( descendants ) ) ) )
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, descendants_list )
ClientGUIMenus.AppendSeparator( parents_menu )
for descendant in descendants_list:
if len( ancestors ) > 0:
descendant_label = 'child: {}'.format( descendant )
ancestors_list = list( ancestors )
ClientGUIMenus.AppendMenuItem( parents_menu, descendant_label, descendant_label, HG.client_controller.pub, 'clipboard', 'text', descendant )
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, ancestors_list )
for ancestor in ancestors_list:
ancestor_label = 'parent: {}'.format( ancestor )
ClientGUIMenus.AppendMenuItem( parents_menu, ancestor_label, ancestor_label, HG.client_controller.pub, 'clipboard', 'text', ancestor )
if len( descendants ) > 0:
ClientGUIMenus.AppendSeparator( parents_menu )
descendants_list = list( descendants )
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, descendants_list )
for descendant in descendants_list:
descendant_label = 'child: {}'.format( descendant )
ClientGUIMenus.AppendMenuItem( parents_menu, descendant_label, descendant_label, HG.client_controller.pub, 'clipboard', 'text', descendant )
@ -2522,74 +2591,6 @@ class ListBoxTags( ListBox ):
ClientGUIMenus.AppendMenuItem( copy_menu, sub_selection_string, 'Copy the selected subtags to your clipboard.', self._ProcessMenuCopyEvent, COPY_SELECTED_SUBTAGS )
siblings = []
if len( selected_actual_tags ) == 1:
siblings_copy_menu = QW.QMenu( copy_menu )
siblings_copy_menu_action = ClientGUIMenus.AppendMenu( copy_menu, siblings_copy_menu, 'loading siblings\u2026' )
( selected_tag, ) = selected_actual_tags
def s_work_callable():
selected_tag_to_siblings = HG.client_controller.Read( 'tag_siblings_lookup', CC.COMBINED_TAG_SERVICE_KEY, ( selected_tag, ) )
siblings = selected_tag_to_siblings[ selected_tag ]
return siblings
def s_publish_callable( siblings ):
( selected_namespace, selected_subtag ) = HydrusTags.SplitTag( selected_tag )
sibling_tags_seen = set()
sibling_tags_seen.add( selected_tag )
sibling_tags_seen.add( selected_subtag )
siblings.difference_update( sibling_tags_seen )
if len( siblings ) > 0:
siblings = HydrusTags.SortNumericTags( siblings )
for sibling in siblings:
if sibling not in sibling_tags_seen:
ClientGUIMenus.AppendMenuItem( siblings_copy_menu, sibling, 'Copy the selected tag sibling to your clipboard.', HG.client_controller.pub, 'clipboard', 'text', sibling )
sibling_tags_seen.add( sibling )
( sibling_namespace, sibling_subtag ) = HydrusTags.SplitTag( sibling )
if sibling_subtag not in sibling_tags_seen:
ClientGUIMenus.AppendMenuItem( siblings_copy_menu, sibling_subtag, 'Copy the selected sibling subtag to your clipboard.', HG.client_controller.pub, 'clipboard', 'text', sibling_subtag )
sibling_tags_seen.add( sibling_subtag )
siblings_copy_menu.setTitle( 'siblings' )
else:
copy_menu.removeAction( siblings_copy_menu_action )
ClientGUIMenus.DestroyMenu( siblings_copy_menu )
async_job = ClientGUIAsync.AsyncQtJob( siblings_copy_menu, s_work_callable, s_publish_callable )
async_job.start()
if self._HasCounts():
ClientGUIMenus.AppendSeparator( copy_menu )

View File

@ -204,7 +204,7 @@ def ReadFetch(
if small_exact_match_search:
if not results_cache.CanServeTagResults( strict_search_text, True ):
if not results_cache.CanServeTagResults( parsed_autocomplete_text, True ):
predicates = HG.client_controller.Read( 'autocomplete_predicates', ClientTags.TAG_DISPLAY_ACTUAL, tag_search_context, file_service_key, search_text = strict_search_text, exact_match = True, inclusive = parsed_autocomplete_text.inclusive, add_namespaceless = add_namespaceless, job_key = job_key )
@ -221,7 +221,7 @@ def ReadFetch(
else:
cache_valid = results_cache.CanServeTagResults( autocomplete_search_text, False )
cache_valid = results_cache.CanServeTagResults( parsed_autocomplete_text, False )
if cache_valid:
@ -429,7 +429,7 @@ def WriteFetch( win, job_key, results_callable, parsed_autocomplete_text: Client
if small_exact_match_search:
if not results_cache.CanServeTagResults( strict_search_text, True ):
if not results_cache.CanServeTagResults( parsed_autocomplete_text, True ):
predicates = HG.client_controller.Read( 'autocomplete_predicates', ClientTags.TAG_DISPLAY_STORAGE, tag_search_context, file_service_key, search_text = strict_search_text, exact_match = True, add_namespaceless = False, job_key = job_key )
@ -446,7 +446,7 @@ def WriteFetch( win, job_key, results_callable, parsed_autocomplete_text: Client
else:
cache_valid = results_cache.CanServeTagResults( strict_search_text, False )
cache_valid = results_cache.CanServeTagResults( parsed_autocomplete_text, False )
if cache_valid:

View File

@ -1163,6 +1163,20 @@ class FileSeed( HydrusSerialisable.SerialisableBase ):
parsing_text = network_job.GetContentText()
actual_fetched_url = network_job.GetActualFetchedURL()
if actual_fetched_url != url_to_check:
( url_type, match_name, can_parse ) = HG.client_controller.network_engine.domain_manager.GetURLParseCapability( actual_fetched_url )
if url_type == HC.URL_TYPE_POST and can_parse:
post_url = actual_fetched_url
( url_to_check, parser ) = HG.client_controller.network_engine.domain_manager.GetURLToFetchAndParser( post_url )
parsing_context = {}
parsing_context[ 'post_url' ] = post_url

View File

@ -320,7 +320,9 @@ class GallerySeed( HydrusSerialisable.SerialisableBase ):
try:
( url_type, match_name, can_parse ) = HG.client_controller.network_engine.domain_manager.GetURLParseCapability( self.url )
gallery_url = self.url
( url_type, match_name, can_parse ) = HG.client_controller.network_engine.domain_manager.GetURLParseCapability( gallery_url )
if url_type not in ( HC.URL_TYPE_GALLERY, HC.URL_TYPE_WATCHABLE ):
@ -332,11 +334,11 @@ class GallerySeed( HydrusSerialisable.SerialisableBase ):
raise HydrusExceptions.VetoException( 'Did not have a parser for this URL!' )
( url_to_check, parser ) = HG.client_controller.network_engine.domain_manager.GetURLToFetchAndParser( self.url )
( url_to_check, parser ) = HG.client_controller.network_engine.domain_manager.GetURLToFetchAndParser( gallery_url )
status_hook( 'downloading gallery page' )
if self._referral_url not in ( self.url, url_to_check ):
if self._referral_url not in ( gallery_url, url_to_check ):
referral_url = self._referral_url
@ -360,17 +362,55 @@ class GallerySeed( HydrusSerialisable.SerialisableBase ):
parsing_text = network_job.GetContentText()
parsing_context = {}
actual_fetched_url = network_job.GetActualFetchedURL()
parsing_context[ 'gallery_url' ] = self.url
parsing_context[ 'url' ] = url_to_check
parsing_context[ 'post_index' ] = '0'
do_parse = True
all_parse_results = parser.Parse( parsing_context, parsing_text )
if len( all_parse_results ) == 0:
if actual_fetched_url != url_to_check:
raise HydrusExceptions.VetoException( 'The parser found nothing in the document!' )
( url_type, match_name, can_parse ) = HG.client_controller.network_engine.domain_manager.GetURLParseCapability( actual_fetched_url )
if url_type == HC.URL_TYPE_GALLERY:
if can_parse:
gallery_url = actual_fetched_url
( url_to_check, parser ) = HG.client_controller.network_engine.domain_manager.GetURLToFetchAndParser( gallery_url )
else:
do_parse = False
from hydrus.client.importing import ClientImportFileSeeds
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, actual_fetched_url )
file_seed.SetReferralURL( gallery_url )
file_seeds = [ file_seed ]
all_parse_results = []
if do_parse:
parsing_context = {}
parsing_context[ 'gallery_url' ] = gallery_url
parsing_context[ 'url' ] = url_to_check
parsing_context[ 'post_index' ] = '0'
all_parse_results = parser.Parse( parsing_context, parsing_text )
if len( all_parse_results ) == 0:
raise HydrusExceptions.VetoException( 'The parser found nothing in the document!' )
file_seeds = ClientImporting.ConvertAllParseResultsToFileSeeds( all_parse_results, gallery_url, file_import_options )
title = ClientParsing.GetTitleFromAllParseResults( all_parse_results )
@ -380,8 +420,6 @@ class GallerySeed( HydrusSerialisable.SerialisableBase ):
title_hook( title )
file_seeds = ClientImporting.ConvertAllParseResultsToFileSeeds( all_parse_results, self.url, file_import_options )
for file_seed in file_seeds:
file_seed.SetExternalFilterableTags( self._external_filterable_tags )
@ -394,7 +432,14 @@ class GallerySeed( HydrusSerialisable.SerialisableBase ):
status = CC.STATUS_SUCCESSFUL_AND_NEW
note = HydrusData.ToHumanInt( num_urls_added ) + ' new urls found'
if do_parse:
note = HydrusData.ToHumanInt( num_urls_added ) + ' new urls found'
else:
note = 'was redirected to a non-gallery url, which has been queued as a file import'
if num_urls_already_in_file_seed_cache > 0:

View File

@ -2803,6 +2803,8 @@ class NestedGalleryURLGenerator( HydrusSerialisable.SerialisableBaseNamed ):
self._gallery_url_generator_key = HydrusData.GenerateKey()
self._gug_keys_and_names = [ ( HydrusData.GenerateKey(), name ) for ( gug_key, name ) in self._gug_keys_and_names ]
def RepairGUGs( self, available_gugs ):

View File

@ -160,6 +160,7 @@ class NetworkJob( object ):
self._body = body
self._referral_url = referral_url
self._actual_fetched_url = self._url
self._temp_path = temp_path
self._files = None
@ -837,6 +838,14 @@ class NetworkJob( object ):
def GetActualFetchedURL( self ):
with self._lock:
return self._actual_fetched_url
def GetContentBytes( self ):
with self._lock:
@ -1160,6 +1169,12 @@ class NetworkJob( object ):
response = self._SendRequestAndGetResponse()
# I think tbh I would rather tell requests not to do 3XX, which is possible with allow_redirects = False on request, and then just raise various 3XX exceptions with url info, so I can requeue easier and keep a record
# figuring out correct new url seems a laugh, requests has slight helpers, but lots of exceptions
# SessionRedirectMixin here https://requests.readthedocs.io/en/latest/_modules/requests/sessions/
# but this will do as a patch for now
self._actual_fetched_url = response.url
with self._lock:
if self._body is not None:

View File

@ -70,7 +70,7 @@ options = {}
# Misc
NETWORK_VERSION = 19
SOFTWARE_VERSION = 421
SOFTWARE_VERSION = 422
CLIENT_API_VERSION = 15
SERVER_THUMBNAIL_DIMENSIONS = ( 200, 200 )

View File

@ -11,6 +11,8 @@ no_daemons = False
db_journal_mode = 'WAL'
no_db_temp_files = False
boot_debug = False
# if this is set to 1, transactions are not immediately synced to the journal so multiple can be undone following a power-loss
# if set to 2, all transactions are synced, so once a new one starts you know the last one is on disk
# corruption cannot occur either way, but since we have multiple ATTACH dbs with diff journals, let's not mess around when power-cut during heavy file import or w/e

View File

@ -10,36 +10,76 @@ from hydrus.core import HydrusExceptions
from hydrus.core import HydrusText
from hydrus.core import HydrusThreading
# new stuff starts here
# the _win32, _linux, _osx stuff here is legacy, from when I used to bundle these exes. this cause anti-virus false positive wew
if HC.PLATFORM_LINUX:
if HC.PLATFORM_WINDOWS:
upnpc_path = os.path.join( HC.BIN_DIR, 'upnpc_linux' )
possible_bin_filenames = [ 'upnpc-static.exe', 'upnpc-static.exe', 'miniupnpc.exe', 'upnpc_win32.exe' ]
elif HC.PLATFORM_MACOS:
else:
upnpc_path = os.path.join( HC.BIN_DIR, 'upnpc_osx' )
possible_bin_filenames = [ 'upnpc-static', 'upnpc-shared', 'miniupnpc' ]
elif HC.PLATFORM_WINDOWS:
upnpc_path = os.path.join( HC.BIN_DIR, 'upnpc_win32.exe' )
if HC.PLATFORM_LINUX:
possible_bin_filenames.append( 'upnpc_linux' )
elif HC.PLATFORM_MACOS:
possible_bin_filenames.append( 'upnpc_osx' )
UPNPC_PATH = 'miniupnpc' # no exe, we'll assume installed to system
UPNPC_IS_MISSING = False
UPNPC_MANAGER_ERROR_PRINTED = False
for filename in possible_bin_filenames:
possible_path = os.path.join( HC.BIN_DIR, filename )
if os.path.exists( possible_path ):
UPNPC_PATH = possible_path
EXTERNAL_IP = {}
EXTERNAL_IP[ 'ip' ] = None
EXTERNAL_IP[ 'time' ] = 0
def RaiseMissingUPnPcError( operation ):
message = 'Unfortunately, the operation "{}" requires miniupnpc, which does not seem to be available for your system. You can install it yourself easily, please check install_dir/bin/upnpc_readme.txt for more information!'.format( operation )
global UPNPC_IS_MISSING
if not UPNPC_IS_MISSING:
HydrusData.ShowText( message )
UPNPC_IS_MISSING = True
raise FileNotFoundError( message )
def GetExternalIP():
if HydrusData.TimeHasPassed( EXTERNAL_IP[ 'time' ] + ( 3600 * 24 ) ):
cmd = [ upnpc_path, '-l' ]
cmd = [ UPNPC_PATH, '-l' ]
sbp_kwargs = HydrusData.GetSubprocessKWArgs( text = True )
HydrusData.CheckProgramIsNotShuttingDown()
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
try:
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
except FileNotFoundError:
RaiseMissingUPnPcError( 'fetch external IP' )
HydrusData.WaitForProcessToFinish( p, 30 )
@ -47,7 +87,7 @@ def GetExternalIP():
if stderr is not None and len( stderr ) > 0:
raise Exception( 'Problem while trying to fetch External IP:' + os.linesep * 2 + str( stderr ) )
raise Exception( 'Problem while trying to fetch External IP (if it says No IGD UPnP Device, you are either on a VPN or your router does not seem to support UPnP):' + os.linesep * 2 + str( stderr ) )
else:
@ -83,13 +123,20 @@ def GetLocalIP():
def AddUPnPMapping( internal_client, internal_port, external_port, protocol, description, duration = 3600 ):
cmd = [ upnpc_path, '-e', description, '-a', internal_client, str( internal_port ), str( external_port ), protocol, str( duration ) ]
cmd = [ UPNPC_PATH, '-e', description, '-a', internal_client, str( internal_port ), str( external_port ), protocol, str( duration ) ]
sbp_kwargs = HydrusData.GetSubprocessKWArgs( text = True )
HydrusData.CheckProgramIsNotShuttingDown()
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
try:
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
except FileNotFoundError:
RaiseMissingUPnPcError( 'add UPnP port forward' )
HydrusData.WaitForProcessToFinish( p, 30 )
@ -135,13 +182,20 @@ def AddUPnPMappingCheckResponse( internal_client, internal_port, external_port,
def GetUPnPMappings():
cmd = [ upnpc_path, '-l' ]
cmd = [ UPNPC_PATH, '-l' ]
sbp_kwargs = HydrusData.GetSubprocessKWArgs( text = True )
HydrusData.CheckProgramIsNotShuttingDown()
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
try:
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
except FileNotFoundError:
RaiseMissingUPnPcError( 'get current UPnP port forward mappings' )
HydrusData.WaitForProcessToFinish( p, 30 )
@ -149,7 +203,7 @@ def GetUPnPMappings():
if stderr is not None and len( stderr ) > 0:
raise Exception( 'Problem while trying to fetch UPnP mappings:' + os.linesep * 2 + stderr )
raise Exception( 'Problem while trying to fetch UPnP mappings (if it says No IGD UPnP Device, you are either on a VPN or your router does not seem to support UPnP):' + os.linesep * 2 + stderr )
else:
@ -237,13 +291,20 @@ def GetUPnPMappingsParseResponse( stdout ):
def RemoveUPnPMapping( external_port, protocol ):
cmd = [ upnpc_path, '-d', str( external_port ), protocol ]
cmd = [ UPNPC_PATH, '-d', str( external_port ), protocol ]
sbp_kwargs = HydrusData.GetSubprocessKWArgs( text = True )
HydrusData.CheckProgramIsNotShuttingDown()
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
try:
p = subprocess.Popen( cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, **sbp_kwargs )
except FileNotFoundError:
RaiseMissingUPnPcError( 'remove UPnP port forward' )
HydrusData.WaitForProcessToFinish( p, 30 )
@ -254,7 +315,6 @@ def RemoveUPnPMapping( external_port, protocol ):
raise Exception( 'Problem while trying to remove UPnP mapping:' + os.linesep * 2 + stderr )
class ServicesUPnPManager( object ):
def __init__( self, services ):
@ -266,29 +326,57 @@ class ServicesUPnPManager( object ):
def _RefreshUPnP( self, force_wipe = False ):
running_service_with_upnp = True in ( service.GetPort() is not None and service.GetUPnPPort() is not None for service in self._services )
if not force_wipe:
running_service_with_upnp = True in ( service.GetPort() is not None and service.GetUPnPPort() is not None for service in self._services )
if not running_service_with_upnp:
return
if running_service_with_upnp and UPNPC_IS_MISSING:
return # welp
try:
local_ip = GetLocalIP()
except:
return # can't get local IP, we are wewlad atm, probably some complicated multiple network situation we'll have to deal with later
try:
current_mappings = GetUPnPMappings()
our_mappings = { ( internal_client, internal_port ) : external_port for ( description, internal_client, internal_port, external_port, protocol, enabled ) in current_mappings }
except FileNotFoundError:
if not force_wipe:
global UPNPC_MANAGER_ERROR_PRINTED
if not UPNPC_MANAGER_ERROR_PRINTED:
HydrusData.ShowText( 'Hydrus was set up to manage your services\' port forwards with UPnP, but the miniupnpc executable is not available. Please check install_dir/bin/upnpc_readme.txt for more details.' )
UPNPC_MANAGER_ERROR_PRINTED = True
return # in this case, most likely miniupnpc could not be found, so skip for now
except:
return # This IGD probably doesn't support UPnP, so don't spam the user with errors they can't fix!
our_mappings = { ( internal_client, internal_port ) : external_port for ( description, internal_client, internal_port, external_port, protocol, enabled ) in current_mappings }
for service in self._services:
internal_port = service.GetPort()
@ -343,7 +431,7 @@ class ServicesUPnPManager( object ):
self._services = services
self._RefreshUPnP( force_wipe = True )
self._RefreshUPnP()

View File

@ -177,6 +177,57 @@ class TestClientDB( unittest.TestCase ):
self.assertEqual( result, [] )
#
result = self._read( 'autocomplete_predicates', ClientTags.TAG_DISPLAY_STORAGE, tag_search_context, CC.COMBINED_FILE_SERVICE_KEY, search_text = '*' )
preds = set()
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'car', min_current_count = 1 ) )
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'series:cars', min_current_count = 1 ) )
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'maker:ford', min_current_count = 1 ) )
for p in result: self.assertEqual( p.GetCount( HC.CONTENT_STATUS_CURRENT ), 1 )
self.assertEqual( set( result ), preds )
#
result = self._read( 'autocomplete_predicates', ClientTags.TAG_DISPLAY_STORAGE, tag_search_context, CC.COMBINED_FILE_SERVICE_KEY, search_text = 'series:*' )
preds = set()
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'series:cars', min_current_count = 1 ) )
for p in result: self.assertEqual( p.GetCount( HC.CONTENT_STATUS_CURRENT ), 1 )
self.assertEqual( set( result ), preds )
#
result = self._read( 'autocomplete_predicates', ClientTags.TAG_DISPLAY_STORAGE, tag_search_context, CC.COMBINED_FILE_SERVICE_KEY, search_text = 'c*r*' )
preds = set()
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'car', min_current_count = 1 ) )
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'series:cars', min_current_count = 1 ) )
for p in result: self.assertEqual( p.GetCount( HC.CONTENT_STATUS_CURRENT ), 1 )
self.assertEqual( set( result ), preds )
#
result = self._read( 'autocomplete_predicates', ClientTags.TAG_DISPLAY_STORAGE, tag_search_context, CC.COMBINED_FILE_SERVICE_KEY, search_text = 'ser*', search_namespaces_into_full_tags = True )
preds = set()
preds.add( ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'series:cars', min_current_count = 1 ) )
for p in result: self.assertEqual( p.GetCount( HC.CONTENT_STATUS_CURRENT ), 1 )
self.assertEqual( set( result ), preds )
def test_export_folders( self ):

View File

@ -665,7 +665,7 @@ class TestClientDBTags( unittest.TestCase ):
#
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, ( 'bodysuit', ) )[ 'bodysuit' ], ( {
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', ( 'bodysuit', ) )[ 'bodysuit' ][ self._my_service_key ], ( {
'bodysuit',
'clothing:bodysuit'
}, 'clothing:bodysuit', {
@ -705,7 +705,7 @@ class TestClientDBTags( unittest.TestCase ):
#
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, ( 'samus aran', ) )[ 'samus aran' ], ( {
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', ( 'samus aran', ) )[ 'samus aran' ][ self._my_service_key ], ( {
'character:samus aran',
'samus aran'
}, 'character:samus aran', {
@ -768,7 +768,7 @@ class TestClientDBTags( unittest.TestCase ):
#
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, ( 'pharah', ) )[ 'pharah' ], ( {
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', ( 'pharah', ) )[ 'pharah' ][ self._my_service_key ], ( {
'character:fareeha "pharah" amari',
'character:fareeha "pharah" amari (overwatch)',
'character:pharah',
@ -785,7 +785,7 @@ class TestClientDBTags( unittest.TestCase ):
'studio:blizzard entertainment'
} ) )
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, ( 'warcraft', ) )[ 'warcraft' ], ( {
self.assertEqual( self._read( 'tag_siblings_and_parents_lookup', ( 'warcraft', ) )[ 'warcraft' ][ self._my_service_key ], ( {
'series:warcraft',
'copyright:warcraft',
'warcraft',
@ -899,7 +899,7 @@ class TestClientDBTags( unittest.TestCase ):
'blonde'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for ( tag, expected_descendants, expected_ancestors ) in (
(
@ -929,7 +929,7 @@ class TestClientDBTags( unittest.TestCase ):
)
):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._my_service_key ]
self.assertEqual( sibling_chain_members, { tag } )
@ -956,7 +956,7 @@ class TestClientDBTags( unittest.TestCase ):
'artist'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._public_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for ( tag, expected_descendants, expected_ancestors ) in (
(
@ -1036,7 +1036,7 @@ class TestClientDBTags( unittest.TestCase ):
)
):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._public_service_key ]
self.assertEqual( sibling_chain_members, { tag } )
@ -1075,7 +1075,7 @@ class TestClientDBTags( unittest.TestCase ):
'lara croft'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for ( tag, expected_descendants, expected_ancestors ) in (
(
@ -1085,7 +1085,7 @@ class TestClientDBTags( unittest.TestCase ):
),
):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._my_service_key ]
self.assertEqual( sibling_chain_members, { tag } )
@ -1100,7 +1100,7 @@ class TestClientDBTags( unittest.TestCase ):
'myself'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._public_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for ( tag, expected_descendants, expected_ancestors ) in (
(
@ -1120,7 +1120,7 @@ class TestClientDBTags( unittest.TestCase ):
)
):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._public_service_key ]
self.assertEqual( sibling_chain_members, { tag } )
@ -1164,7 +1164,7 @@ class TestClientDBTags( unittest.TestCase ):
'lara croft'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for ( tag, expected_descendants, expected_ancestors ) in (
(
@ -1199,7 +1199,7 @@ class TestClientDBTags( unittest.TestCase ):
)
):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._my_service_key ]
self.assertEqual( sibling_chain_members, { tag } )
@ -1229,7 +1229,7 @@ class TestClientDBTags( unittest.TestCase ):
'myself'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._public_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for ( tag, expected_descendants, expected_ancestors ) in (
(
@ -1324,7 +1324,7 @@ class TestClientDBTags( unittest.TestCase ):
)
):
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._public_service_key ]
self.assertEqual( sibling_chain_members, { tag } )
@ -1437,7 +1437,7 @@ class TestClientDBTags( unittest.TestCase ):
'character:samus aran'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._my_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for tag in {
'sameus aran',
@ -1446,7 +1446,7 @@ class TestClientDBTags( unittest.TestCase ):
'character:samus aran'
}:
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._my_service_key ]
self.assertEqual( sibling_chain_members, {
'sameus aran',
@ -1475,7 +1475,7 @@ class TestClientDBTags( unittest.TestCase ):
'creator:splashbrush'
}
selected_tag_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', self._public_service_key, all_tags )
selected_tag_to_service_keys_to_siblings_and_parents = self._read( 'tag_siblings_and_parents_lookup', all_tags )
for tag in {
'bodysut',
@ -1484,7 +1484,7 @@ class TestClientDBTags( unittest.TestCase ):
'clothing:bodysuit',
}:
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._public_service_key ]
self.assertEqual( sibling_chain_members, {
'bodysut',
@ -1505,7 +1505,7 @@ class TestClientDBTags( unittest.TestCase ):
'series:metroid',
}:
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._public_service_key ]
self.assertEqual( sibling_chain_members, {
'metrod',
@ -1526,7 +1526,7 @@ class TestClientDBTags( unittest.TestCase ):
'creator:splashbrush'
}:
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_siblings_and_parents[ tag ]
( sibling_chain_members, ideal_tag, descendants, ancestors ) = selected_tag_to_service_keys_to_siblings_and_parents[ tag ][ self._public_service_key ]
self.assertEqual( sibling_chain_members, {
'splashbush',

View File

@ -9,6 +9,121 @@ class TestStringConverter( unittest.TestCase ):
def test_basics( self ):
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_REMOVE_TEXT_FROM_BEGINNING, 1 ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), '123456789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_REMOVE_TEXT_FROM_END, 1 ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), '012345678' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_CLIP_TEXT_FROM_BEGINNING, 7 ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), '0123456' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_CLIP_TEXT_FROM_END, 7 ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), '3456789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_PREPEND_TEXT, 'abc' ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), 'abc0123456789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_APPEND_TEXT, 'xyz' ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), '0123456789xyz' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_ENCODE, 'url percent encoding' ) ] )
self.assertEqual( string_converter.Convert( '01234 56789' ), '01234%2056789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_DECODE, 'url percent encoding' ) ] )
self.assertEqual( string_converter.Convert( '01234%2056789' ), '01234 56789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_ENCODE, 'unicode escape characters' ) ] )
self.assertEqual( string_converter.Convert( '01234\u039456789' ), '01234\\u039456789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_DECODE, 'unicode escape characters' ) ] )
self.assertEqual( string_converter.Convert( '01234\\u039456789' ), '01234\u039456789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_ENCODE, 'html entities' ) ] )
self.assertEqual( string_converter.Convert( '01234&56789' ), '01234&amp;56789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_DECODE, 'html entities' ) ] )
self.assertEqual( string_converter.Convert( '01234&amp;56789' ), '01234&56789' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_ENCODE, 'hex' ) ] )
self.assertEqual( string_converter.Convert( b'\xe5\xafW\xa6\x87\xf0\x89\x89O^\xce\xdeP\x04\x94X' ), 'e5af57a687f089894f5ecede50049458' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_ENCODE, 'base64' ) ] )
self.assertEqual( string_converter.Convert( b'\xe5\xafW\xa6\x87\xf0\x89\x89O^\xce\xdeP\x04\x94X' ), '5a9XpofwiYlPXs7eUASUWA==' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_REVERSE, None ) ] )
self.assertEqual( string_converter.Convert( '0123456789' ), '9876543210' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_REGEX_SUB, ( '\\d', 'd' ) ) ] )
self.assertEqual( string_converter.Convert( 'abc123' ), 'abcddd' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_DATE_DECODE, ( '%Y-%m-%d %H:%M:%S', HC.TIMEZONE_GMT, 0 ) ) ] )
self.assertEqual( string_converter.Convert( '1970-01-02 00:00:00' ), '86400' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_DATE_ENCODE, ( '%Y-%m-%d %H:%M:%S', 0 ) ) ] )
self.assertEqual( string_converter.Convert( '86400' ), '1970-01-02 00:00:00' )
#
string_converter = ClientParsing.StringConverter( conversions = [ ( ClientParsing.STRING_CONVERSION_INTEGER_ADDITION, 5 ) ] )
self.assertEqual( string_converter.Convert( '4' ), '9' )
def test_compound( self ):
conversions = []
conversions.append( ( ClientParsing.STRING_CONVERSION_REMOVE_TEXT_FROM_BEGINNING, 1 ) )
@ -89,30 +204,6 @@ class TestStringConverter( unittest.TestCase ):
self.assertEqual( string_converter.Convert( '0123456789' ), 'z xddddddcba' )
#
conversions = [ ( ClientParsing.STRING_CONVERSION_DATE_DECODE, ( '%Y-%m-%d %H:%M:%S', HC.TIMEZONE_GMT, 0 ) ) ]
string_converter = ClientParsing.StringConverter( conversions = conversions )
self.assertEqual( string_converter.Convert( '1970-01-02 00:00:00' ), '86400' )
#
conversions = [ ( ClientParsing.STRING_CONVERSION_DATE_ENCODE, ( '%Y-%m-%d %H:%M:%S', 0 ) ) ]
string_converter = ClientParsing.StringConverter( conversions = conversions )
self.assertEqual( string_converter.Convert( '86400' ), '1970-01-02 00:00:00' )
#
conversions = [ ( ClientParsing.STRING_CONVERSION_INTEGER_ADDITION, 5 ) ]
string_converter = ClientParsing.StringConverter( conversions = conversions )
self.assertEqual( string_converter.Convert( '4' ), '9' )
class TestStringMatch( unittest.TestCase ):
@ -234,35 +325,3 @@ class TestStringProcessor( unittest.TestCase ):
self.assertEqual( processor.ProcessStrings( [ '1,a,2,3', 'test', '123' ] ), expected_result )
def test_hex_fail( self ):
processor = ClientParsing.StringProcessor()
conversions = [ ( ClientParsing.STRING_CONVERSION_DECODE, 'hex' ) ]
string_converter = ClientParsing.StringConverter( conversions = conversions )
#
processing_steps = []
processing_steps.append( string_converter )
processing_steps.append( ClientParsing.StringMatch( match_type = ClientParsing.STRING_MATCH_FLEXIBLE, match_value = ClientParsing.NUMERIC ) )
processor.SetProcessingSteps( processing_steps )
self.assertEqual( processor.ProcessStrings( [ '0123456789abcdef' ] ), [] )
#
processing_steps = []
processing_steps.append( string_converter )
processing_steps.append( ClientParsing.StringSplitter( separator = ',' ) )
processor.SetProcessingSteps( processing_steps )
self.assertEqual( processor.ProcessStrings( [ '0123456789abcdef' ] ), [] )

View File

@ -855,24 +855,79 @@ class TestTagObjects( unittest.TestCase ):
bool_tests( parsed_autocomplete_text, [ True, True, False, True, True, False, True ] )
def test_predicate_results_cache( self ):
def test_predicate_results_cache_init( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
predicate_results_cache = ClientSearch.PredicateResultsCacheInit()
self.assertEqual( predicate_results_cache.CanServeTagResults( '', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
#
def test_predicate_results_cache_system( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
predicates = [ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_SYSTEM_INBOX ) ]
@ -880,20 +935,48 @@ class TestTagObjects( unittest.TestCase ):
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
#
def test_predicate_results_cache_subtag_normal( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
@ -907,20 +990,20 @@ class TestTagObjects( unittest.TestCase ):
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus br', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus br', False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_br, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_br, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus' ) ), { samus, samus_aran, character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus*' ) ), { samus, samus_aran, character_samus_aran } )
@ -928,7 +1011,39 @@ class TestTagObjects( unittest.TestCase ):
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus ar*' ) ), { samus_aran, character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus aran*' ) ), { samus_aran, character_samus_aran } )
#
def test_predicate_results_cache_subtag_exact( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
character_samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'character:samus aran' )
predicates = [ samus ]
@ -936,22 +1051,54 @@ class TestTagObjects( unittest.TestCase ):
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus' ) ), { samus } )
#
def test_predicate_results_cache_full_normal( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
character_samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'character:samus aran' )
predicates = [ character_samus_aran ]
@ -959,22 +1106,22 @@ class TestTagObjects( unittest.TestCase ):
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( '', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'samus ar', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus', False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus ar', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus ar', False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus br', True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'character:samus br', False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'metroid', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( 'series:samus', False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus*' ) ), { character_samus_aran } )
@ -983,6 +1130,440 @@ class TestTagObjects( unittest.TestCase ):
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus aran*' ) ), { character_samus_aran } )
def test_predicate_results_cache_namespace_explicit_fetch_all( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
character_samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'character:samus aran' )
predicates = [ character_samus_aran ]
predicate_results_cache = ClientSearch.PredicateResultsCacheTag( predicates, 'character:*', False )
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
#
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = True
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus ar*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus br*' ) ), set() )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus aran*' ) ), { character_samus_aran } )
def test_predicate_results_cache_namespace_bare_fetch_all( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
character_samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'character:samus aran' )
predicates = [ character_samus_aran ]
predicate_results_cache = ClientSearch.PredicateResultsCacheTag( predicates, 'character:', False )
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
#
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = True
namespace_fetch_all_allowed = True
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus ar*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus br*' ) ), set() )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus aran*' ) ), { character_samus_aran } )
def test_predicate_results_cache_namespaces_into_full_tags( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
character_samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'character:samus aran' )
predicates = [ character_samus_aran ]
predicate_results_cache = ClientSearch.PredicateResultsCacheTag( predicates, 'char', False )
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
#
search_namespaces_into_full_tags = True
namespace_bare_fetch_all_allowed = True
namespace_fetch_all_allowed = True
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus ar*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus br*' ) ), set() )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus aran*' ) ), { character_samus_aran } )
def test_predicate_results_cache_fetch_all_madness( self ):
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
search_namespaces_into_full_tags = False
namespace_bare_fetch_all_allowed = False
namespace_fetch_all_allowed = False
fetch_all_allowed = False
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
samus = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus' )
samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'samus aran' )
character_samus_aran = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'character:samus aran' )
predicates = [ samus, samus_aran, character_samus_aran ]
predicate_results_cache = ClientSearch.PredicateResultsCacheTag( predicates, '*', False )
self.assertEqual( predicate_results_cache.GetPredicates(), predicates )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), False )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), False )
#
search_namespaces_into_full_tags = True
namespace_bare_fetch_all_allowed = True
namespace_fetch_all_allowed = True
fetch_all_allowed = True
tag_autocomplete_options.SetTuple(
tag_autocomplete_options.GetWriteAutocompleteTagDomain(),
tag_autocomplete_options.OverridesWriteAutocompleteFileDomain(),
tag_autocomplete_options.GetWriteAutocompleteFileDomain(),
search_namespaces_into_full_tags,
namespace_bare_fetch_all_allowed,
namespace_fetch_all_allowed,
fetch_all_allowed
)
pat_empty = ClientSearch.ParsedAutocompleteText( '', tag_autocomplete_options, True )
pat_samus = ClientSearch.ParsedAutocompleteText( 'samus', tag_autocomplete_options, True )
pat_samus_ar = ClientSearch.ParsedAutocompleteText( 'samus ar', tag_autocomplete_options, True )
pat_samus_br = ClientSearch.ParsedAutocompleteText( 'samus br', tag_autocomplete_options, True )
pat_character_samus = ClientSearch.ParsedAutocompleteText( 'character:samus', tag_autocomplete_options, True )
pat_character_samus_ar = ClientSearch.ParsedAutocompleteText( 'character:samus ar', tag_autocomplete_options, True )
pat_character_samus_br = ClientSearch.ParsedAutocompleteText( 'character:samus br', tag_autocomplete_options, True )
pat_metroid = ClientSearch.ParsedAutocompleteText( 'metroid', tag_autocomplete_options, True )
pat_series_samus = ClientSearch.ParsedAutocompleteText( 'series:samus', tag_autocomplete_options, True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_empty, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_ar, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_character_samus_br, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_metroid, False ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, True ), True )
self.assertEqual( predicate_results_cache.CanServeTagResults( pat_series_samus, False ), True )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus ar*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus br*' ) ), set() )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus aran*' ) ), { character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus' ) ), { samus, samus_aran, character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus*' ) ), { samus, samus_aran, character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samas br*' ) ), set() )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus ar*' ) ), { samus_aran, character_samus_aran } )
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'samus aran*' ) ), { samus_aran, character_samus_aran } )
def test_predicate_strings_and_namespaces( self ):
p = ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'tag' )

View File

@ -5,13 +5,16 @@ from hydrus.core import HydrusGlobals as HG
from hydrus.client import ClientThreading
READ_JOB_DURATION = 0.1
WRITE_JOB_DURATION = 0.2
def do_read_job( rwlock, result_list, name ):
with rwlock.read:
result_list.append( 'begin read {}'.format( name ) )
time.sleep( 0.25 )
time.sleep( READ_JOB_DURATION )
result_list.append( 'end read {}'.format( name ) )
@ -23,7 +26,7 @@ def do_write_job( rwlock, result_list, name ):
result_list.append( 'begin write {}'.format( name ) )
time.sleep( 0.5 )
time.sleep( WRITE_JOB_DURATION )
result_list.append( 'end write {}'.format( name ) )
@ -65,14 +68,9 @@ class TestFileRWLock( unittest.TestCase ):
result_list = []
HG.test_controller.CallLater( 0.0, do_read_job, rwlock, result_list, '1' )
HG.test_controller.CallLater( 0.1, do_read_job, rwlock, result_list, '2' )
HG.test_controller.CallLater( 0.05, do_read_job, rwlock, result_list, '2' )
time.sleep( 0.2 )
with rwlock.write:
pass
time.sleep( READ_JOB_DURATION * 2 + 0.2 )
results = set( result_list )
@ -92,12 +90,7 @@ class TestFileRWLock( unittest.TestCase ):
HG.test_controller.CallLater( 0.0, do_read_job, rwlock, result_list, str( i ) )
time.sleep( 0.2 )
with rwlock.write:
pass
time.sleep( READ_JOB_DURATION * 10 + 0.2 )
expected_results = set()
@ -118,14 +111,9 @@ class TestFileRWLock( unittest.TestCase ):
result_list = []
HG.test_controller.CallLater( 0.0, do_write_job, rwlock, result_list, '1' )
HG.test_controller.CallLater( 0.1, do_write_job, rwlock, result_list, '2' )
HG.test_controller.CallLater( 0.05, do_write_job, rwlock, result_list, '2' )
time.sleep( 0.2 )
with rwlock.read:
pass
time.sleep( WRITE_JOB_DURATION * 2 + 0.2 )
expected_result = []
@ -143,12 +131,7 @@ class TestFileRWLock( unittest.TestCase ):
HG.test_controller.CallLater( 0.0, do_write_job, rwlock, result_list, str( i ) )
time.sleep( 0.2 )
with rwlock.read:
pass
time.sleep( WRITE_JOB_DURATION * 10 + 0.2 )
expected_results = set()
@ -177,29 +160,19 @@ class TestFileRWLock( unittest.TestCase ):
for i in range( 10 ):
HG.test_controller.CallLater( 0.0 * i, do_read_job, rwlock, result_list, str( i ) )
HG.test_controller.CallLater( 0.02 * i, do_read_job, rwlock, result_list, str( i ) )
all_expected_results.update( [ 'begin read {}'.format( i ), 'end read {}'.format( i ) ] )
for i in range( 5 ):
HG.test_controller.CallLater( 0.0 * i, do_write_job, rwlock, result_list, str( i ) )
HG.test_controller.CallLater( 0.05 * i, do_write_job, rwlock, result_list, str( i ) )
all_expected_results.update( [ 'begin write {}'.format( i ), 'end write {}'.format( i ) ] )
time.sleep( 0.2 )
with rwlock.read:
pass
with rwlock.write:
pass
time.sleep( READ_JOB_DURATION * 10 + WRITE_JOB_DURATION * 5 + 0.2 )
self.assertEqual( set( result_list ), all_expected_results )

View File

@ -733,7 +733,11 @@ class Controller( object ):
TestHydrusSerialisable,
TestHydrusSessions
]
module_lookup[ 'tags_fast' ] = [
TestClientTags
]
module_lookup[ 'tags' ] = [
TestClientTags,
TestClientDBTags

View File

@ -41,8 +41,9 @@ try:
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
argparser.add_argument( '--db_synchronous_override', choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
argparser.add_argument( '--no_daemons', action='store_true', help = 'run without background daemons' )
argparser.add_argument( '--no_wal', action='store_true', help = 'OBSOLETE: run using TRUNCATE db journaling' )
argparser.add_argument( '--db_memory_journaling', action='store_true', help = 'OBSOLETE: run using MEMORY db journaling (DANGEROUS)' )
@ -117,6 +118,8 @@ try:
HG.no_db_temp_files = result.no_db_temp_files
HG.boot_debug = result.boot_debug
if result.temp_dir is not None:
HydrusPaths.SetEnvTempDir( result.temp_dir )