commit 310f3015c062ff5bbe86fdb1fc6c574e2643555d Author: Tenshi Hinanawi Date: Mon Feb 18 16:11:43 2013 -0800 Initial commit from version 57 diff --git a/.directory b/.directory new file mode 100644 index 00000000..6797547d --- /dev/null +++ b/.directory @@ -0,0 +1,3 @@ +[Dolphin] +Timestamp=2013,2,18,16,11,30 +Version=3 diff --git a/Readme.txt b/Readme.txt new file mode 100755 index 00000000..06b7f864 --- /dev/null +++ b/Readme.txt @@ -0,0 +1,5 @@ +If you are reading this, you probably extracted rather than installed, so you were not prompted to read the help. + +The hydrus client can do a lot, so do please go into the help folder and open up index.html so you don't get lost. Thanks! + +I use a number of the Silk Icons by Mark James at famfamfam.com. \ No newline at end of file diff --git a/client.pyw b/client.pyw new file mode 100755 index 00000000..3254a02b --- /dev/null +++ b/client.pyw @@ -0,0 +1,25 @@ +# This program is free software. It comes without any warranty, to +# the extent permitted by applicable law. You can redistribute it +# and/or modify it under the terms of the Do What The Fuck You Want +# To Public License, Version 2, as published by Sam Hocevar. See +# http://sam.zoy.org/wtfpl/COPYING for more details. + +import os +from include import HydrusConstants as HC +from include import ClientController + +try: + + app = ClientController.Controller( True, HC.LOGS_DIR + os.path.sep + 'client.log' ) + + app.MainLoop() + +except: + + import traceback + print( traceback.format_exc() ) + + +HC.shutdown = True + +HC.pubsub.pubimmediate( 'shutdown' ) diff --git a/help/6c0ae65894c7a5ffd686f54cc052326b8ea188a691a1895b2f88b7c60a07f13f.jpg b/help/6c0ae65894c7a5ffd686f54cc052326b8ea188a691a1895b2f88b7c60a07f13f.jpg new file mode 100755 index 00000000..a4e29664 Binary files /dev/null and b/help/6c0ae65894c7a5ffd686f54cc052326b8ea188a691a1895b2f88b7c60a07f13f.jpg differ diff --git a/help/Database Diagrams - Old.svg b/help/Database Diagrams - Old.svg new file mode 100755 index 00000000..3fabf02e --- /dev/null +++ b/help/Database Diagrams - Old.svg @@ -0,0 +1,2308 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + accounts + + + + mappings + + + + bans + + + + petitions + + + + reasons + + + + deleted_mappings + + + + + + + + + tags + + + + + hashes + + + + + + + + + + + + + + + + + + + + + + + + + + + + tags + + + + namespaces + + + + public_mappings + + + + local_files + + + + local_thumbnails + + + + local_thumbnails_resized + + + + reasons + + + + remote_files + + + + remote_thumbnails + + + + remote_thumbnails_resized + + + + file_repositories + + + + file_repository_news + + + + pending_file_petitions + + + + pending_public_mapping_petitions + + + + pending_files + + librarium + file repository + + + accounts + + + + local_files + + + + options + + + + update_cache + + + + news + + + + bans + + + + petitions + + + + thumbnails + + + + reasons + + + + deleted_files + + + + + + + + + + options + + + + deleted_remote_files + + + + + file_downloads + + + + + version + + + + public_tag_repository + + + + public_tag_repository_news + + + + + ip_addresses + + + + deleted_local_files + + + + hashes + + + + + + + + + version + + + + hashes + + + + + hydrus repository + + + + options + + + + update_cache + + + + news + + + + version + + hydrus repository + tag repository + + + files + + + + + + files + + + + + + + + pending_public_mappings + + + + deleted_public_mappings + + + + diff --git a/help/Database Diagrams.svg b/help/Database Diagrams.svg new file mode 100755 index 00000000..fbd44cd1 --- /dev/null +++ b/help/Database Diagrams.svg @@ -0,0 +1,2835 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + server + + + + + + + + + + + + + + + + + + + + + + + + + + + + + account_map + + + + account_scores + + + + files_info + + + + mapping_petitions + + + + thumbnails + + + + account_type_map + + + + bans + + + + reasons + + + + tags + + + + version + + + + files + + + + services + + + + hashes + + + + ip_addresses + + + + mappings + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + deleted_mappings + + + + account_types + + + + update_cache + + + + news + + + + accounts + + + + deleted_files + + + + file_map + + + + file_petitions + + + + + message_statuses + + + + messages + + + + + + + tags + + + + namespaces + + + + reasons + + client + + + options + + + + version + + + + hashes + + + + files + + + + + addresses + + + + accounts + + + + repositories + + + + services + + + + news + + + + deleted_mappings + + + + mapping_petitions + + + + active_mappings + + + + deleted_files + + + + files_info + + + + file_transfers + + + + file_inbox + + + + thumbnails + + + + thumbnails_resized + + + + + + + + + + + + pending_mappings + + + + mappings + + + + service_info + + + + tag_service_precedence + + + + + message_depots + + + + message_keys + + + + message_attachments + + + + message_inbox + + + + statuses + + + + contacts + + + + message_downloads + + + + message_drafts + + + + messages + + + + message_statuses_to_apply + + + + + + + + + + + + + + + + + + message_destination_map + + + + + + file_petitions + + + + + message_bodies + + + + conversation_subjects + + + diff --git a/help/access_keys.html b/help/access_keys.html new file mode 100755 index 00000000..9336f13a --- /dev/null +++ b/help/access_keys.html @@ -0,0 +1,23 @@ + + + access keys + + + + +
+

first off

+

I have purposely not pre-baked this info into the client's install. You have to put it in yourself. It won't connect anywhere until you tell it to.

+

access keys

+

If this stuff be-fuzzles you, you can now go help->i don't know what I am doing->just set up some repositories for me, please and you _should_ be all set up automatically.

+

I run a public, objective tag repository that you are welcome to contribute to. I also run a read-only file repository that you can search to get a feel for the interface. The files on my file repo are appropriately tagged in my tag repo.

+

+

+

Here's the info so you can copy it:

+ +
+ + \ No newline at end of file diff --git a/help/advanced.html b/help/advanced.html new file mode 100755 index 00000000..b7690d0e --- /dev/null +++ b/help/advanced.html @@ -0,0 +1,69 @@ + + + advanced + + + + +
+

using flash in fullscreen view

+

Flash files are sometimes interested in inputs (like spacebar or mouse-scrollwheel) that mean something to hydrus's fullscreen view, and the libraries I have to use to show flash don't handle these events like normal windows. I now have it set so if your mouse is inside the flash window, the input will go to the flash, and if it is outside, it goes to the fullscreen window. Unless the flash is set otherwise, your mouse cursor should show up when it moves into the flash window.

+

So, if you want to play a flash game in fullscreen, keep your mouse inside the window. If you want to filter some flash files real quick, keep your cursor at the edge of your screen.

+

exclude deleted files

+

In the client's options is a checkbox to exclude deleted files. It occurs again pretty much anywhere you can import, under 'advanced import options'. If you select this, any file you ever deleted will be excluded from all future remote searches and import operations. This can stop you from importing/downloading and filtering out the same bad files several times over. The default is off. You may wish to have it set one way most of the time, but switch it the other just for one specific import or search.

+

importing and adding tags at the same time

+

Add tags before importing on file->import files lets you give tags to the files you import en masse, and intelligently, using regexes that parse filename:

+

+

This should be somewhat self-explanatory to anyone familiar with regexes. I hate them, personally, but I recognise they are powerful and exactly the right tool to use in this case. This is a good introduction, if you are not certain about regexes.

+

Once you are done, you'll get something neat like this:

+

+

Which you can more easily manage by collecting:

+

+

Collections have a small icon in the bottom left corner. Selecting them actually selects many files (see the status bar), and performing an action on them (like archiving, uploading) will do so to every file in the collection. Viewing collections fullscreen pages through their contents just like an uncollected search.

+

Here is a particularly zoomed out view, after importing volume 2:

+

+

Importing with tags is great for long-running series with well-formatted filenames, and will save you literally hours' finicky tagging.

+

custom filter

+

Once you are comfortable with the client's tagging and rating, you may be interested in performing a custom filter, which is essentially the fullscreen browser with custom shortcuts. You select it from the regular thumbnail right-click menu. First, it will show you a dialog:

+

+

Which has a sub-dialog to add and edit actions:

+

+

You can reassign the default shortcuts for regular things, like archive/delete and opening tag/ratings dialogs, and also add shortcuts for adding/removing a tag or setting/unsetting a rating. Shortcuts do not yet combine; they overwrite.

+

Once you hit ok on the parent dialog, the fullscreen browser will launch. Navigation and zooming happens as normal with the mouse and keyboard, unless you have overwritten a shortcut! Hitting any of the shortcuts you declared should carry out the action. Tags will pend/rescind pend or petition/rescind petition or add/delete as appropriate to the type of tag service and the tag's status for the particular file.

+

The shortcuts you set will be active only for that session; they will be forgotten as soon as you close the browser. This behaviour may change, if people desire it. (remembering the last settings, or having favourites, maybe? send me your thoughts.)

+

finding duplicates

+

system:similar_to takes two arguments: a hash and an integer representing max hamming distance (0 means exactly the same, 64 means everything. 5 is good for finding dupes). You can quick-select it from a file's right-click menu. It returns all images that are very similar to the hash. For example:

+

Here are a couple of duplicates, found despite their different resolution.

+

+

And some images of similar shape but not colour.

+

+

If you are careful, you can find images that look only somewhat like your hash. You get a lot of false positives with hamming distance of much more than 12, though.

+

+

PIL errors

+

At some point, you will probably encounter a PIL error when importing a file. PIL is the Python Image Library, the code I use to manipulate image files. Some files are kooky, and just won't load with it. I can't fix these errors, since PIL is not mine. It would take me thousands of hours to write my own image library, and even then it would have its own odd errors. Just gotta deal with it.

+

If the PIL error'ing file is one you particularly care about, I suggest you import it into photoshop or similar and save it again. Photoshop should be clever enough to parse the file's weirdness, and then it'll hopefully save again to a simpler format which PIL, and hence the client, will be able to understand.

+

busted up gifs

+

Animated gifs are a real pain in the neck. There are several loopholes in the standard that permit odd palettes and colourspaces, and PIL has a hard time parsing it all.

+

So, some gifs will have a coloured first frame but grey frames thereafter; or they will have odd washy noise all over; or they will just be black. The file isn't broken, but lib is looking at it wrong. Every ten versions or so, I gather enough enthusiasm to fix a few more.

+

setting a password

+

the client offers a very simple password system, enough to keep out noobs. You can set it at database->set a password. It will thereafter ask for the password every time you start the program, and will not open without it. However none of the database is encrypted, and someone with enough enthusiasm or a tool and access to your computer can still very easily see what files you have. The password is mainly to stop idle snoops checking your images if you are away from your machine.

+

backing up

+

All of the client's files, mappings, service credentials, options, everything, are stored in the /db folder beneath the main install directory. If you want to back your data up, just turn off the client and copy the directory somewhere. Copy it back to restore.

+

The same is true of a server. Turn it off and copy the db folder, or send a backup command via the server admin menu and copy the resultant .db.backup files and relevant subdirectories.

+

the client's server

+

The client runs a very simple http server. I want to do much more with it in future.

+

When you boot the client, it will try to host a service on port 45865, which will respond to /file and /thumbnail requests just like a file repository, but without needing an access key, and only to localhost (127.0.0.1).

+

For instance, the following image (6c0ae65894c7a5ffd686f54cc052326b8ea188a691a1895b2f88b7c60a07f13f.jpg, in the help dir) is served here from disk:

+

+

And here it will attempt to load from the client:

+

+

For more information, check the image's two urls. It will of course only display in the second case if you import it to the client and have it running when you load this page. You can copy the second image's url and replace the hash with that of any other image or swf in your collection and it should work.

+

Needless to say, this starts to break if you try to run several different copies of the client at once.

+

a note concerning memory usage

+

the client does a lot of caching. It eats plenty of memory, and if you go crazy you can crash the program. A default-sized 4000-strong thumbnail pane is really just a scrollable 250MB-ish bitmap, so opening up a dozen large searches will start pushing your OS's 32-bit upper memory limits. If you find the client crashing, slow down a little or reduce the max cache sizes and thumbnail dimensions in the options.

+

vacuum

+

After a lot of use, pretty much all of the client's database's pages will be fragged and inefficiently packed. If you think the client is running sluggish, going database->vacuum will rebuild the database entirely, making it as efficient as it can possibly be. It takes a minute or so.

+

If the db is sluggish even after a vacuum, please send me an email with your rough details. I am always interested in speeding up bad code.

+
+ + \ No newline at end of file diff --git a/help/asolutionthatmaximisesutility.gif b/help/asolutionthatmaximisesutility.gif new file mode 100755 index 00000000..948e40b5 Binary files /dev/null and b/help/asolutionthatmaximisesutility.gif differ diff --git a/help/autocomplete_dropdown.png b/help/autocomplete_dropdown.png new file mode 100755 index 00000000..14b4211c Binary files /dev/null and b/help/autocomplete_dropdown.png differ diff --git a/help/autocomplete_dropdown_overlay.png b/help/autocomplete_dropdown_overlay.png new file mode 100755 index 00000000..5d62ff56 Binary files /dev/null and b/help/autocomplete_dropdown_overlay.png differ diff --git a/help/autocomplete_dropdown_overlay.svg b/help/autocomplete_dropdown_overlay.svg new file mode 100755 index 00000000..9189e09a --- /dev/null +++ b/help/autocomplete_dropdown_overlay.svg @@ -0,0 +1,109 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + type here, get existing tags here + + + + + + diff --git a/help/basic_network_diagram.png b/help/basic_network_diagram.png new file mode 100755 index 00000000..ea488b48 Binary files /dev/null and b/help/basic_network_diagram.png differ diff --git a/help/basic_network_diagram.svg b/help/basic_network_diagram.svg new file mode 100755 index 00000000..50154028 --- /dev/null +++ b/help/basic_network_diagram.svg @@ -0,0 +1,637 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + publictagrepository + + + + filerepository + + + + filerepository + + + + filerepository + + + + filerepository + + + + + + + + + + + + + + + + + + librarium + + + + librarium + + + + librarium + + + + librarium + + + + librarium + + + + librarium + + + diff --git a/help/changelog.html b/help/changelog.html new file mode 100755 index 00000000..e07e2af4 --- /dev/null +++ b/help/changelog.html @@ -0,0 +1,835 @@ + + + changelog + + + + +
+

changelog

+ +
+ + \ No newline at end of file diff --git a/help/client_autism.png b/help/client_autism.png new file mode 100755 index 00000000..4918af42 Binary files /dev/null and b/help/client_autism.png differ diff --git a/help/client_autism_small.png b/help/client_autism_small.png new file mode 100755 index 00000000..bcb6d10d Binary files /dev/null and b/help/client_autism_small.png differ diff --git a/help/client_auto.png b/help/client_auto.png new file mode 100755 index 00000000..3f10a834 Binary files /dev/null and b/help/client_auto.png differ diff --git a/help/client_auto_small.png b/help/client_auto_small.png new file mode 100755 index 00000000..64acad91 Binary files /dev/null and b/help/client_auto_small.png differ diff --git a/help/client_database_diagram.png b/help/client_database_diagram.png new file mode 100755 index 00000000..13815cc5 Binary files /dev/null and b/help/client_database_diagram.png differ diff --git a/help/client_empty.png b/help/client_empty.png new file mode 100755 index 00000000..18be360c Binary files /dev/null and b/help/client_empty.png differ diff --git a/help/client_empty_small.png b/help/client_empty_small.png new file mode 100755 index 00000000..ed02624e Binary files /dev/null and b/help/client_empty_small.png differ diff --git a/help/client_fullscreen.png b/help/client_fullscreen.png new file mode 100755 index 00000000..395200a6 Binary files /dev/null and b/help/client_fullscreen.png differ diff --git a/help/client_fullscreen_small.png b/help/client_fullscreen_small.png new file mode 100755 index 00000000..0c83f663 Binary files /dev/null and b/help/client_fullscreen_small.png differ diff --git a/help/contact.html b/help/contact.html new file mode 100755 index 00000000..0fe1f449 --- /dev/null +++ b/help/contact.html @@ -0,0 +1,24 @@ + + + contact + + + + +
+

contact and links

+

Please send bug reports straight to my email or hydrus messaging account. Also welcome are your ideas and otherwise comment.

+

I don't really do chat, and I don't get caught up in the social stuff on twitter/tumblr. If you want to tell me something, please just send me a mail. I like to spend a day or so to think before replying to non-urgent emails, but I do reply to everything.

+

If you have a problem with something on someone's file repository, please, do not come to me, as I can in no way help with your problem. If your ex-gf's nudes have leaked onto the internet, or you find something terribly offensive, or you just plain hate the free flow of information, I cannot help you at all.

+

Anyway:

+ +

I try to make my work good and cheap. As a result, I am not very fast. Whenever I say 'x should be done within a week', please don't believe my optimism.

+
+ + \ No newline at end of file diff --git a/help/contacts.png b/help/contacts.png new file mode 100755 index 00000000..c65403d2 Binary files /dev/null and b/help/contacts.png differ diff --git a/help/content_post_update.jpg b/help/content_post_update.jpg new file mode 100755 index 00000000..fa11c077 Binary files /dev/null and b/help/content_post_update.jpg differ diff --git a/help/content_pre_update.jpg b/help/content_pre_update.jpg new file mode 100755 index 00000000..b960be43 Binary files /dev/null and b/help/content_pre_update.jpg differ diff --git a/help/custom_filter_child.png b/help/custom_filter_child.png new file mode 100755 index 00000000..9a49e851 Binary files /dev/null and b/help/custom_filter_child.png differ diff --git a/help/custom_filter_parent.png b/help/custom_filter_parent.png new file mode 100755 index 00000000..6e306207 Binary files /dev/null and b/help/custom_filter_parent.png differ diff --git a/help/db_diagrams.html b/help/db_diagrams.html new file mode 100755 index 00000000..c06fbeb3 --- /dev/null +++ b/help/db_diagrams.html @@ -0,0 +1,16 @@ + + + database diagrams + + + + +
+

database diagrams

+

I have included SQLite Studio with the releases so you can better understand, if you are so interested, how the programs work. You can drag a .db file straight onto the exe and it will open up for browsing.

+

Here are current diagrams for the databases. They are not strict UML, just a general guide. I refer to them while coding, so I thought I might as well include them here.

+

+

+
+ + \ No newline at end of file diff --git a/help/dc_devil.png b/help/dc_devil.png new file mode 100755 index 00000000..af269a0f Binary files /dev/null and b/help/dc_devil.png differ diff --git a/help/depots.html b/help/depots.html new file mode 100755 index 00000000..350bb8d3 --- /dev/null +++ b/help/depots.html @@ -0,0 +1,23 @@ + + + message depots + + + + +
+

how the hydrus network sends messages

+

Message depots mix hydrus's standard access key authentication with cryptograhic principles to store clients' messages privately. They work a little like repositories, except anyone can upload data, and they do so anonymously.

+

All a message depot knows about its users are their public keys and which encrypted messages are for them. It does not know their private keys, and cannot decrypt the messages it stores.

+

I have made the encryption work as best as I can, but it is a very difficult problem to get cryptography 100% correct. I use AES-256 and RSA-2048 with a simple mostly-random-byte padding scheme and OAEP respectively, along with python's os.urandom() for the PRNG. I am fairly certain I have made no major errors, but I cannot guarantee that a dedicated and well financed attacker cannot defeat it. Please feel free to check my source code (HydrusMessageHandling.py) if you are so interested. If you would like to know more about cryptography, go check out wikipedia. Hydrus uses both public-key and symmetric-key cryptography.

+

Contact keys are just sha256( PEM( public_key ) ).

+

I plan to extend this service to better guarantee anonymity; at the moment, it is trivial for someone to alter their server's source code to record IP addresses, so I will test onion-routing algorithms or similar in future.

+

Here are some diagrams:

+

+

+

+

+

There is a little more to it (applying statuses to a message), but I have only half-implemented it. I shall flush out the description once it is done.

+
+ + \ No newline at end of file diff --git a/help/downloads.png b/help/downloads.png new file mode 100755 index 00000000..578841e2 Binary files /dev/null and b/help/downloads.png differ diff --git a/help/dumping.png b/help/dumping.png new file mode 100755 index 00000000..99da338c Binary files /dev/null and b/help/dumping.png differ diff --git a/help/edit_repos_file_repo.png b/help/edit_repos_file_repo.png new file mode 100755 index 00000000..76bb4529 Binary files /dev/null and b/help/edit_repos_file_repo.png differ diff --git a/help/edit_repos_public_tag_repo.png b/help/edit_repos_public_tag_repo.png new file mode 100755 index 00000000..379b6dbc Binary files /dev/null and b/help/edit_repos_public_tag_repo.png differ diff --git a/help/example_convo.png b/help/example_convo.png new file mode 100755 index 00000000..55a4dd7b Binary files /dev/null and b/help/example_convo.png differ diff --git a/help/example_lib.png b/help/example_lib.png new file mode 100755 index 00000000..a3d7aeaa Binary files /dev/null and b/help/example_lib.png differ diff --git a/help/faq.html b/help/faq.html new file mode 100755 index 00000000..e7a92f66 --- /dev/null +++ b/help/faq.html @@ -0,0 +1,72 @@ + + + faq + + + + +
+

hold up, what is a repository?

+

A repository is a special kind of server in the hydrus network that stores a certain kind of informationfiles or tag mappings, for instanceas submitted by users all over the internet. Those users periodically synchronise with the repository so they know what it stores. Hydrus network clients never send queries to repositories; they download and cache all of a repository's searchable metadata and perform queries over that cache, locally, on the client's computer.

+

hold up, what is a tag?

+

wiki

+

A tag is a small bit of text describing a single property of something. They make searching easy. Good examples are "flower" or "nicolas cage" or "the sopranos" or "2003". By combining several tags together ( e.g. [ 'tiger woods', 'sports illustrated', '2008' ] or [ 'cosplay', 'the legend of zelda' ] ), a huge image collection is reduced to a tiny and easy-to-digest sample, usually in less than a second.

+

A good word for the connection of a particular tag to a particular file is mapping.

+

In the hydrus network, all tags are automatically converted to lower case. 'Sunset Drive' becomes 'sunset drive'. Why?

+
    +
  1. Although it may at first seem preferable to have proper capitalised titles, like 'The Lord of the Rings' rather than 'the lord of the rings', there are many, many special cases where style guides differ. There is no definitive correct capitalisation schema, so the simplest compromise is to not have any.
  2. +
  3. Searches become far easier when case is not matched. And when case does not matter, what point is there in recording it?
  4. +
+

Secondly, leading and trailing whitespace is removed, and multiple whitespace is collapsed to a single character.

'  yellow   dress '
becomes
'yellow dress'

+

Does this unjust censorship frustrate you?

+

why not use filenames and folders?

+

As a retrieval method, filenames and folders become worse and worse as the number of files increases. Why?

+ +

So, the client tracks files by their hash.

+

BTW: when exporting files, the client names them by their hexadecimalised hash, like so: f099b5823f4e36a4bd6562812582f60e49e818cf445902b504b5533c6a5dad94.jpg. This will probably change to tag-munged in future.

+

Please do not tag your files with their exact original 'filename.jpg' on my public tag repo. Shed the concept of filenames as you would chains.

+

hold up, what is a hash?

+

wiki

+

Hashes are a subject one usually has to be a software engineer to find interesting. If you don't care to digest the wiki page, the simple answer is that hashes are guaranteed unique names for things. It can be proven that f099b5823f4e36a4bd6562812582f60e49e818cf445902b504b5533c6a5dad94 refers to one particular file and no other. Hashes make excellentif uglyidentifiers. In the client's normal operation, you will never encounter a file's hash; if you like a thumbnail, double-click it; the software handles the mathematics.

+

For those who are interested: hydrus uses SHA-256, which spits out 32-byte (256-bit) hashes. The software stores and searches over the hash densely, as 32 bytes, only encoding it to 64 hex characters when the user views it or copies to clipboard. SHA-256 is not perfect, but it is a great compromise candidate; it is secure for now, it is reasonably fast, it is available for most programming languages, and newer CPUs perform it more efficiently all the time. Maybe when NIST decides on the SHA-3 winner we will have a grand switch over.

+

hold up, what is an access key?

+

The hydrus network's repositories do not use username/password, but instead a single combination identifier-password like this: 7ce4dbf18f7af8b420ee942bae42030aab344e91dc0e839260fcd71a4c9879e3

+

These hex numbers give you access to a particular account on a particular repository. They are long enough to be impossible to guess, and also randomly generated, so they reveal nothing personally identifying about you. Many people can use the same access key (and hence the same account) on a repository without consequence, although they will have to share bandwidth limits, and if one person screws around and gets the account banned, they will all lose access.

+

why shouldn't I use a more mature platform?

+

Some applications like ACDSee try to make finding files easier than browsing explorer, but they are all-too-often:

+

+

+

+

Some websites like flickr and danbooru have crowd-sourced tags and offer fairly effective retrieval, but then they all-too-often have:

+

+

+

+

The hydrus network attempts to combine the privacy and low latency of local searching with the efficiency of crowd-sourcing.

+

why can my friend not see what I just uploaded?

+

The repositories do not work like conventional search engines; it takes a short but predictable while for changes to propagate to other users.

+

Remember that the client's searches only ever happen over its local cache of what is on the repository. Those caches are updated about once a day, so any changes you make will be delayed for others until their next update occurs. At the moment, the update period is 100,000 seconds, which is about 1 day and 4 hours.

+
+ + \ No newline at end of file diff --git a/help/file_sync_1.png b/help/file_sync_1.png new file mode 100755 index 00000000..3dc0e527 Binary files /dev/null and b/help/file_sync_1.png differ diff --git a/help/file_sync_2.png b/help/file_sync_2.png new file mode 100755 index 00000000..ec9b3848 Binary files /dev/null and b/help/file_sync_2.png differ diff --git a/help/file_sync_3.png b/help/file_sync_3.png new file mode 100755 index 00000000..bc72098a Binary files /dev/null and b/help/file_sync_3.png differ diff --git a/help/file_sync_4.png b/help/file_sync_4.png new file mode 100755 index 00000000..7eed74ed Binary files /dev/null and b/help/file_sync_4.png differ diff --git a/help/file_sync_5.png b/help/file_sync_5.png new file mode 100755 index 00000000..23abf1ed Binary files /dev/null and b/help/file_sync_5.png differ diff --git a/help/file_sync_6.png b/help/file_sync_6.png new file mode 100755 index 00000000..2ad8f161 Binary files /dev/null and b/help/file_sync_6.png differ diff --git a/help/future.html b/help/future.html new file mode 100755 index 00000000..b14a0f9a --- /dev/null +++ b/help/future.html @@ -0,0 +1,54 @@ + + + ideas for the future + + + + +
+

ideas for the future

+ +

a looooong way into the future

+ +

never going to happen

+ +
+ + \ No newline at end of file diff --git a/help/getting_started_files.html b/help/getting_started_files.html new file mode 100755 index 00000000..236c6d77 --- /dev/null +++ b/help/getting_started_files.html @@ -0,0 +1,130 @@ + + + getting started - files + + + + +
+

<--- Back to the introduction

+

a warning

+

This is the real internet, not babby AOL. The hydrus client gives you the power to screw up your life. If you want to do private sexy slideshows of your shy wife that's fine, but don't upload the pictures anywhere you don't absolutely trust and don't give them public tags that'll identify anyone. It is impossible to contain leaks of private information.

+

the problem

+

If you have ever seen something like this

+

+

then you already know the problem: using a filesystem to manage a lot of images sucks.

+

Finding the right picture within a minute can be difficult. Finding all those by a particular artist or of a particular resolution within any reasonable time limit can be impossible. Adding new files into the whole mess is a further pain, and most operating systems bug out displaying folders with > 10,000 images.

+

so, what does the hydrus client do?

+

Let's first focus on storing and sharing files.

+

On booting the client for the first time, you will be faced with a blank screen and little idea of what to do next. I advise you simply drag-and-drop a folder with a hundred or so images onto the main window. After a little parsing, a dialog will appear affirming what you want to import. Ok that and a new page will open. Thumbnails will stream in as the software processes each file.

+

+

The files are being imported into the client's database. The client discards their filenames.

+

Notice your original folder and its files are untouched. You can move the originals somewhere else, delete them, and the client will still return searches fine. In the same way, you can delete from the client, and the original files will remain unchanged; import is a copy, not a move, operation. The client performs all its operations on its internal database. If you find yourself enjoying using the client and decide to completely switch over, you may delete the original files you import without worry. You can always export them back again later (albeit with different filenames).

+

Now:

+ +

The client currently supports the following mimetypes:

+ +

The client can also download files from several websites, including 4chan, many boorus, and gallery sites like deviant art. The different options are under F9->download.

+

+

Most of them have similar interfaces. Paste the url or type the query your are interested in, and press enter.

+

FAQ: why not use filenames and folders?

+

inbox and archiving

+

the client sends newly imported/downloaded files to an inbox so you may more easily decide what to do with them. Inbox acts like a tag, matched by 'system:inbox'. A small envelope icon is drawn in the top corner of all inbox files.

+

If you are sure you want to keep a file long-term, you should archive it, which will remove it from the inbox. You can archive from your selected thumbnails' right-click menu, or by pressing F7.

+

Anything you do not want to keep should be deleted.

+

A quick way of doing this is

+

filtering

+

Lets say you just downloaded a good thread, or perhaps you just imported an old folder of miscellany. You now have a whole bunch of files in your inboxsome good, some awful. You probably want to quickly go through them, saying yes, yes, yes, no, yes, no, no, yes, where yes means 'keep and archive' and no means 'delete this trash'. Filtering is the solution.

+

Select some thumbnails, and either choose filter from their right-click menu or hit F12. You will see this selection in fullscreen, with the following controls:

+ +

When done, you will be asked whether you want to commit your choices, forget them, or go back to filtering the current file.

+

Filtering saves time.

+

I have plans to make a filtering-like system to speed up certain kinds of tagging. Your thoughts would be appreciated.

+

exporting and uploading

+

There are many ways to export files from the client:

+ +

sharing files

+

The hydrus network has a service that lets clients share files anonymously, called a file repository.

+

It simply stores files in a big pool. Anyone who has an access key to the repository can see the pool's thumbnails and download anything they like. They may have permission to to upload to it as well. Admins can delete. I run a download-only file repository, which you are welcome to connect to to get a feel for the interface. Go services->add, remove or edit services.

+

+ +

Then go services->review services to see your client synchronise with the repository's file list.

+

+

Hit F9, and you'll see a new "files->" page. It works exactly like a local search, it just uses a different file list. Files you do not have will be drawn with a dark background, those you do will be drawn as normal:

+

+

To download a file, double- or middle-click it, or select from the right click menu.

+

If you have permission to upload files to a particular repository, that option will appear in the right-click menu for any local files. Selecting this will pend them for batch uploading; just select from the new pending menu to effect the upload when you are ready.

+

lastly

+

The hydrus client is not an image-editing program, nor is it particularly intended for half-finished images. Think of it as a giant archive, a library, for everything excellent you have decided to store away.

+

Now let's learn about tags! ---->

+
+ + \ No newline at end of file diff --git a/help/getting_started_messages.html b/help/getting_started_messages.html new file mode 100755 index 00000000..86308b85 --- /dev/null +++ b/help/getting_started_messages.html @@ -0,0 +1,56 @@ + + + getting started - messages + + + + +
+

<--- Back to ratings

+

messaging, you say?

+

This is somewhat prototype. I am actively working on improving it all right now.

+

Secondly: I use encryption to protect your privacy. Although I am confident my code is mostly good, cryptography is really difficult to get 100% right. I use the right random number generator and key lengths and everything, but don't work under the assumption a dedicated and well funded attacker will never be able to break what I have done. I can guarantee your guildmaster will not be able to read your messages, not the NSA. Don't Do Drugs™.

+

ok, let's messaging

+

With the aid of a service called a message depot, the clients can send messages to one another.

+

A message depot is a bit like an IMAP email server. It has a number of contacts registered with it, and any client who knows those contacts can upload messages to it. A client can have many contacts (you can be several different people on the same computer), and a contact many clients (you can be the same person on several different computers). Clients check their appropriate message depots regularly, and download any new messages.

+

how is it different from email?

+

All hydrus network messages are stored on the message depots in an encrypted format, and only the recipient's client(s) have the key to decrypt them. If someone hacks/steals/whatever a message depot, they cannot read the messages, nor tell who they are from.

+

Messages are verifiable, meaning the client knows for sure if they came from who they say they did.

+

A client can send messages anonymously. These messages cannot (right now) be replied to.

+

adding contacts

+

First, let's discern a couple differences:

+ +

The dialog under services->manage contacts and identities lets you add new contacts.

+

+

add manually lets you enter the information in each field, while add by contact address grabs the public key from the server for you after you put in the contact_key@host:port. If you do add manually, make sure you copy the public key very carefully, and check the resultant contact key is correct; if your OS converts newlines incorrectly, it'll all go wrong!

+

Your identities are listed here (and you can rename them), but you do not create them here.

+

how do i create a new identity?

+

If you want to send messages as anything other than Anonymous, you need an access key at a message depot. The access key is not the same as your eventual contact key; the first gives you access, the second is how people's clients will identity you. They are both random numbers.

+

Creating or adding a message depot works exactly the same as for any other type of service. When you add it, the client will do some heavy math in the background, which should freeze the interface for a few seconds. This is it generating the contact's private key, which is the secret that lets it decrypt messages.

+

+

If you want to use the same identity with several clients, don't try to add the service in the usual way on your extra clients, or they will generate their own private keys, overwriting each other! Instead, export the message depot from services->add, remove or edit services on the first client, and import (drag and drop the .yaml file onto the dialog) into the second, third, whatever client. This will copy the original private key across without any errors.

+

When you are done, the 'messages' F9 menu will show your new identity.

+

If you want to share your identity, you can either send people your contact_key@host:port, or you can just message them, which will add you to their contacts automatically.

+

composing messages

+

Composing messages is easy. Just hit the button on your messages page. The 'recipients can see each other' checkbox does email's cc vs bcc.

+

I will add file attachments in future.

+

finding messages

+

The interface is just like searching for files. Put in a normal word to search message bodies/subjects, or use the system predicates to perform more complicated queries. Conversations that match will appear on the top right, and any selected convo will appear below.

+

+

You can hit F7 or delete or just right click on a conversation above to archive or delete it. Beside each message, you can see its current status with each recipient. If it says failed, you can click on it to retry, and if your identity is the recipient, you can switch between read and unread.

+

I will add:

+ +

in future.

+

Go back to the index --->

+
+ + \ No newline at end of file diff --git a/help/getting_started_ratings.html b/help/getting_started_ratings.html new file mode 100755 index 00000000..435243a9 --- /dev/null +++ b/help/getting_started_ratings.html @@ -0,0 +1,51 @@ + + + getting started - ratings + + + + +
+

<--- Back to tags

+

This is all prototype! I'm still working on this document! The network will soon support remote, collaborative ratings!

+

what is the difference between tags and ratings?

+

To rate a file qualitatively, to say that it is funny or sexy or a wallpaper, tags are the tool to use. To rate a file quantitatively, to say that it is 3/5 stars or critically excellent or terrible, we use ratings.

+

The hydrus client supports two kinds of ratings: like/dislike and a ℤ out of ℤ + n numerical rating. Let's start with the simpler one:

+

like/dislike

+

For now, since the client only supports local ratings, this is not terribly useful! You can define it in the services->add, remove or edit services dialog like so:

+

+

You can set the words for like or dislike as you like!

+

ℤ out of ℤ + n

+

This is just a clever way of saying something like 3 out of 5 stars or 8/10. This other rating system is a numerical, integer-only input (whole numbers only, no 3.5/5) based system. You can set the range however you like:

+

+

You can change these limits at a later date, and the database will adjust existing ratings appropriately (3/5 will go to 6/10, 9/10 will go to 4.5/5). +

now what?

+

Once you are sorted with your services, you can edit ratings for one or more files with F4:

+

+

If you hit F4 on one file, the dialog will show that file's ratings. If you hit F4 on several files, it'll try its best to show you a summary of all the files' ratings. When you set new ratings with this dialog, the ratings will be applied to all files.

+

ratings filter

+

If you would like to rate many files quickly, the client now lets you 'filter' them, like with the inbox/archive filter. You select it from some thumbnails' right-click menu as usual.

+

+

This will launch a new fullscreen window that will show already rated files beside the images you wish to rate:

+

+

Once it has exhausted currently rated files, it will try to compare the unrated files with each other. The current controls are:

+ +

Every small decision you make will give the client another clue about how good a file is; e.g. if you say a file is better than a 7/10 file, it must be either 8/10, 9/10 or 10/10. The client will later show you a file in this range to narrow it down further.

+

so, what now?

+

Ratings will show in the preview screen and fullscreen views, like so:

+

+

You can search with system:rating, and sort in the normal manner:

+

+

And that's it! Remote ratings will make this a _little_ more complicated, but not much.

+

Read about messaging --->

+

Go back to the index --->

+
+ + \ No newline at end of file diff --git a/help/getting_started_tags.html b/help/getting_started_tags.html new file mode 100755 index 00000000..b10d04ce --- /dev/null +++ b/help/getting_started_tags.html @@ -0,0 +1,49 @@ + + + getting started - tags + + + + +
+

<--- Back to files

+

how do we find files?

+

So, we have stored our images and .swfs in a large database. Everything is hashed and cached. We can sort by inbox and resolution and size and a bunch of other quantitative metadata, but if we want to differentiate qualitatively, we shall have to use tags.

+

FAQ: hold up, what is a tag?

+

The client starts with a 'local tags' service, which stores tags only on your client's database, where only you can see them. This is useful, and you should keep it in mind for specific jobs as you read the rest of this page, but please do not try to tag everything you own by yourself; unless you have a tiny collection, it will take you hundreds of hours, and your time is better spent helping build tag collections with other people.

+

In order to share tags with others, you must connect to at least one tag repository. You can create your own, if you like, and share access with whoever you like. I run a public tag repository, which you are very welcome to access and contribute to:

+

+ +

Tags are rich, cpu-intensive metadata, and it will take a few minutes for the repository to synchronise completely. The client will lag a little as it processes each update. You can watch its progress in the services->review services dialog as usual.

+

now let's find some files

+

Once you are all set up, you start to type in a tag, and the autocomplete dropdown will offer suggestions for you:

+

+

Press up/down and enter to select, or double click with your mouse. Play with the buttons to see how they change your queries.

+

+

More tags will narrow a search:

+

+

Or, when prefixed with a hyphen, exclude:

+

+

When you are more familiar with namespaces, sorting and collections, you can easily find and display whole books with just a few keystrokes:

+

+

creating tags

+

Hit F3 on a selection of files to open up the tagging dialog.

+

+

The dialog shows the intersection of the current selection's tags, and will add tags to/remove tags from the entire selection. Type a new tag and press enter to add it, double click an existing tag to remove it. You may be prompted to give a reason for removing a tag, which an administrator will review. Hit Apply and the changes will be pended for upload, just like with files. Use the pending menu when you are ready.

+

Please do not upload tags to my public tag repo until you get a rough feel for the tag schema, or just lurk until you get the idea. I am only interested in objective tags. If you don't like my guidelines, start your own tag repo!

+

You can be connected to more than one tag repository. Press the up or down arrow keys on an empty input to quickly jump between your repositories. Each repo's beliefs about which tags go with which files will be applied according to a certain precedence that you can edit in services->manage tag service precedence.

+

FAQ: why can my friend not see what I just uploaded?

+

typical usage

+

I find the following import-cycle works well for me:

+ +

Read about ratings --->

+

Go back to the index --->

+
+ + \ No newline at end of file diff --git a/help/glossary.html b/help/glossary.html new file mode 100755 index 00000000..c7104511 --- /dev/null +++ b/help/glossary.html @@ -0,0 +1,26 @@ + + + glossary + + + + +
+

access key A 32-byte identifier-password that gives you certain permissions with a repository. Usually represented as a 64-character hex string like so: 7ce4dbf18f7af8b420ee942bae42030aab344e91dc0e839260fcd71a4c9879e3

+

address The pairing of both a server's host (be that an IP or a domain) with its port number, like so: 74.125.225.18:80, or google.com:80

+

archive The store of files you have chosen to keep.

+

file repository A service in the hydrus network that hosts files.

+

filtering A method of quickly deleting and archiving files within the client.

+

hash A file's unique identifier. The hydrus network uses SHA-256.

+

hydrus client An application that manages media and connects to services on the hydrus network.

+

hydrus network A loose collection of servers that attempt to make media management and distribution easier.

+

inbox A special tag the client gives to newly imported and downloaded files to make them easier to find and review.

+

mapping The pairing of a particular file with a particular tag.

+

message depot A service in the hydrus network that stores messages.

+

metadata Information about a file, but not stored within the file. Filename, size, hash, modified dates, tags and location are all good examples.

+

petition A request from an uploader for particular content to be removed from a repository.

+

tag A short string of text describing a file.

+

tag repository The service in the hydrus network that hosts mappings.

+
+ + \ No newline at end of file diff --git a/help/gunnerkrigg_chapter.png b/help/gunnerkrigg_chapter.png new file mode 100755 index 00000000..72e78c4f Binary files /dev/null and b/help/gunnerkrigg_chapter.png differ diff --git a/help/gunnerkrigg_import.png b/help/gunnerkrigg_import.png new file mode 100755 index 00000000..e177bb5f Binary files /dev/null and b/help/gunnerkrigg_import.png differ diff --git a/help/gunnerkrigg_page.png b/help/gunnerkrigg_page.png new file mode 100755 index 00000000..aec267fb Binary files /dev/null and b/help/gunnerkrigg_page.png differ diff --git a/help/gunnerkrigg_volume.png b/help/gunnerkrigg_volume.png new file mode 100755 index 00000000..5a5d7a89 Binary files /dev/null and b/help/gunnerkrigg_volume.png differ diff --git a/help/hidamari.png b/help/hidamari.png new file mode 100755 index 00000000..ae9c95eb Binary files /dev/null and b/help/hidamari.png differ diff --git a/help/hidamari_ac.png b/help/hidamari_ac.png new file mode 100755 index 00000000..76648d84 Binary files /dev/null and b/help/hidamari_ac.png differ diff --git a/help/hidamari_exclude.png b/help/hidamari_exclude.png new file mode 100755 index 00000000..4576827a Binary files /dev/null and b/help/hidamari_exclude.png differ diff --git a/help/hidamari_miyako.png b/help/hidamari_miyako.png new file mode 100755 index 00000000..74f459a2 Binary files /dev/null and b/help/hidamari_miyako.png differ diff --git a/help/hydrus.ico b/help/hydrus.ico new file mode 100755 index 00000000..6df08155 Binary files /dev/null and b/help/hydrus.ico differ diff --git a/help/import.png b/help/import.png new file mode 100755 index 00000000..25d7a08c Binary files /dev/null and b/help/import.png differ diff --git a/help/index.html b/help/index.html new file mode 100755 index 00000000..5e36fb97 --- /dev/null +++ b/help/index.html @@ -0,0 +1,39 @@ + + + hydrus help + + + + + + + + + + + +

hydrus help

+

Although the hydrus software's interface attempts to be simple, its underlying concepts are not. Please read the introduction and skim the getting started guide at the least, and you'll probably want to check out the access keys section to get started with my server.

+ + + \ No newline at end of file diff --git a/help/introduction.html b/help/introduction.html new file mode 100755 index 00000000..789c7ac8 --- /dev/null +++ b/help/introduction.html @@ -0,0 +1,42 @@ + + + introduction and statement of principles + + + + +
+

on being anonymous

+

I am convinced that anonymous speech is incredibly valuable to the modern development of free culture and society.

+

When people have no fear of personal repercussion, they reveal corruptions and admit truths they otherwise never would. Their words are insightful and stupid, convincing and hurtful, hilarious and ridiculous. Anons can discuss problems and collaborate on solutions without having to conform to laborious social norms. When they lie, it is usually less for vanity and more for fun.

+

Nearly all forums and social networking platforms use the same username/password archetype, and nearly all of them have the same problems with egotistical mods, sockpuppets, and drama. Everyone has a name to make and defend.

+

This is not to say that I believe in mandating anonymity; I think people should always have the option to be anonymous, and people should always have the choice to not view anonymously submitted material.

+

There are several online platforms that support anonymity, usually through a web browser, but most have terribly inefficient code, and their actual anonymity is often impotent window dressing, an afterthought. Collaboration is awkward and ephemeral.

+

I think we can do better.

+

the hydrus network

+

So! I'm developing a platform that helps people work together anonymously. My concern is in enabling you to do what you want. I don't want to record metrics on users, nor serve ads, nor charge for my software.

+

There are a number of new concepts, and it can get as complicated as you like. If you are totally new to it, I advise you start slow, go through the getting started guides, and experiment doing different things.

+

There is a server executable, which can run any of a number of different services I have designed, and a client application, which can plug into as many as you want (including none). I run a tag server that you are welcome to access and contribute to.

+

A server will have an address, such as 98.214.1.156 or hostname.net. A service hosted by that server will have a port, such as 45871. Access keys are non-identifing random numbers that grant a client certain permissions on a service. They look like this: 4a285629721ca442541ef2c15ea17d1f7f7578b0c3f4f5f2a05f8f0ab297786f.

+

Combining these three values like so:

+ +

gives your client sufficient credentials to access and use the service. All the clients connecting to a particular service can collaborate on a particular problem, such as "Post funny pictures of dogs." or "Let's collect our cosplay pictures and tag them."

+

I call what I have made the hydrus network. Right now, the platform lets you collect and manage your images, then share those files, and any tags you give them, with other users anonymously. Clients can also communicate with each other, although this feature is prototype. I have much more planned.

+

Here is a shot of the client, with a very general search:

+

+

statement of principles

+

Skip past this if you don't want to read some sanctimonious bullshit.

+

Anyone following internet news knows our rights are under constant attack. Some seriously bad dudes are very content to wreck others' lives for marginally larger slices of extremely inefficient political and economic markets. They are desperate to replace our net-liberty with old, ruinous broken systems. I personally don't think it is anyone else's business whether you search for queer pornography, religious iconography, or daisies. I have designed the hydrus network as such. I strongly believe that permitting anonymity makes for strong society.

+

I care about empowering you to do whatever you want with whatever you own, and I don't want anyone (including myself!) peeking in on it. Whenever someone wants your personal data and they don't absolutely need it to get their job done, you are about to get screwed. Your oddities will be measured and broadcast. Your vulnerability will be encouraged and leveraged. I think we are on the verge of some pretty cool stuff, and I reckon Amazon, Facebook et al. could well be some insidious runaway badness.

+

I think the internet works best where we have specialised servers run by different people (especially yourself!) doing specific jobs. One-stop-shops like your typical social network and the old Yahoo! and AOL portals are tepid at their best and promote biased ignorance at their worst. I very much admire the way irc has grown, for even though there are giant irc servers hosting countless channels, the standard is open and anyone can start their own server or write their own client with just a bit of brainpower. You can be anonymous and talk about whatever you like, or identified and talk about one particular subject politely, or any of the infinite combinations between. The people have the power. MediaWiki is similar: Don't like wikipedia? Make ED. Don't like OhI? Make ED.ch. Contrast this with the inflexibility of modern social networks. Or to how unreceptive large ISPs (who, these days, are nearly always also content providers) are to non-standard traffic.

+

My dream internet would have most users running their own servers. All their content and social media would be stored on and served from computers under their control. Their business would be no one else's.

+

Having said that, I use gmail and youtube just like pretty much everyone. But I would rather be using different systems, especially in ten years. No one seemed to be making what I wanted, so I decided to do it myself, and here we are.

+

Hand in hand with privacy goes free expression; I will never remove a mapping from my public tag repository because someone finds it not to their taste; I'll only grant petitions if they point to an objective error in tagging. Banning speech only makes a few ignorant more satisfied with their own fears. Please note that the guys running the file repositories might count as ISPs in their jurisdictions, so may have to bow to DMCA and whatever else is supposed to halt the tide. I wish I could have written the file repository to mandate IP-anonymity, but I can't, so I've given the individual administrators the option. They will likely make their own rules (e.g. no jb/cp, or 'copyrighted content') which you would be very wise to follow, or simple content guidelines ('This is a repository dedicated to slash of Hokuto no Ken. Only for fan-made MM images relating to Hokuto no Ken.') which you should not break if you want to keep your access key.

+

Another concern of mine is simple speed. If you want bells and whistles, go somewhere else. There will never be a skinnable client, nor one that posts your recent 'achievements' to some social network. I want rich queries that return thousands of results in moments. Easy navigation and sharing. No ads. That said, I realise I have zero aesthetic range, so if you are an artist/designer and the mood hits you, please do not shy from emailing me thoughts on usability or my diagrams or new icons or whatever.

+

I'd like to eventually set up a paypal/kickstarter-similar way for people to gibe moni plos, but it'll be totally voluntary.

+

license

+

These programs are free software. They come without any warranty, to the extent permitted by applicable law. You can redistribute them and/or modify them under the terms of the Do What The Fuck You Want To Public License, Version 2, as published by Sam Hocevar. See http://sam.zoy.org/wtfpl/COPYING for more details. Do what the fuck you want to, and if shit breaks, DEAL WITH IT.

+

Happy? Go on to the getting started guide ---->

+
+ + \ No newline at end of file diff --git a/help/lib_gc.png b/help/lib_gc.png new file mode 100755 index 00000000..59b52768 Binary files /dev/null and b/help/lib_gc.png differ diff --git a/help/lib_gc_small.png b/help/lib_gc_small.png new file mode 100755 index 00000000..705ef159 Binary files /dev/null and b/help/lib_gc_small.png differ diff --git a/help/lib_party_hard.png b/help/lib_party_hard.png new file mode 100755 index 00000000..bf19c510 Binary files /dev/null and b/help/lib_party_hard.png differ diff --git a/help/lib_party_hard_small.png b/help/lib_party_hard_small.png new file mode 100755 index 00000000..e4a07c0c Binary files /dev/null and b/help/lib_party_hard_small.png differ diff --git a/help/lib_rec.png b/help/lib_rec.png new file mode 100755 index 00000000..1e945f37 Binary files /dev/null and b/help/lib_rec.png differ diff --git a/help/lib_rec_small.png b/help/lib_rec_small.png new file mode 100755 index 00000000..eec94066 Binary files /dev/null and b/help/lib_rec_small.png differ diff --git a/help/librarium_database_diagram_old.png b/help/librarium_database_diagram_old.png new file mode 100755 index 00000000..e441e97a Binary files /dev/null and b/help/librarium_database_diagram_old.png differ diff --git a/help/manage_services_edit_message_depot.png b/help/manage_services_edit_message_depot.png new file mode 100755 index 00000000..63c4db37 Binary files /dev/null and b/help/manage_services_edit_message_depot.png differ diff --git a/help/manage_tags.png b/help/manage_tags.png new file mode 100755 index 00000000..3e49b707 Binary files /dev/null and b/help/manage_tags.png differ diff --git a/help/message_sync_1.png b/help/message_sync_1.png new file mode 100755 index 00000000..1543020a Binary files /dev/null and b/help/message_sync_1.png differ diff --git a/help/message_sync_2.png b/help/message_sync_2.png new file mode 100755 index 00000000..8e2addb1 Binary files /dev/null and b/help/message_sync_2.png differ diff --git a/help/message_sync_3.png b/help/message_sync_3.png new file mode 100755 index 00000000..cc5d9167 Binary files /dev/null and b/help/message_sync_3.png differ diff --git a/help/message_sync_4.png b/help/message_sync_4.png new file mode 100755 index 00000000..606712e0 Binary files /dev/null and b/help/message_sync_4.png differ diff --git a/help/messaging diagrams.svg b/help/messaging diagrams.svg new file mode 100755 index 00000000..96ac6b6a --- /dev/null +++ b/help/messaging diagrams.svg @@ -0,0 +1,1067 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + Alice's message depot + Alice's client + Bob's client + Alice wants to create an identity at her message depot. She fills in her access key and host:port in theadd, remove and edit services dialog.Her client generates a 2048bit RSA private key, and from that, derives a public key and hydrus contact key.Using the access key for her message depot, alice registers the public key.The service does not remember Alice's IP, and knows nothing more about her than her hydrus access key(hashed) and public key. It derives the hydrus contact key from the public key. + + + Alice's private keyAlice's public keyAlice's contact key + Associate Alice's access keywith Alice's public key + + + + + Alice's message depot + Alice's client + Bob's client + Alice gives her contact information (contact_key@host:port) to Bob.He puts this into his client, which contacts her message depot to fetch her public key. + + + + + 2. Alice's contact key + 3. Alice's public key + Alice's access key (hashed)Alice's public keyAlice's contact key + 1. Alice's contact key@host:port + Alice's public keyAlice's contact key + + + + + + + Alice's message depot + Alice's client + Bob writes a message to Alice.His client signs and encrypts this, then uploads it to her message depot.Her message depot forgets his IP. + + + Alice's access key (hashed)Alice's public keyAlice's contact key + Alice's public keyAlice's contact key + message = Bob->Alice: Hello Alice + message and signature (+) Alice's public key = + T̢̬̲͍̘̝̖͔̣̱̲͎ͧ̏ͭ̓̓ͮ̾̋́͘͜͟ớ̩̜̻̬̰̯͍̰͋͒͒̾̐̈́ͭ̀͢͝ ̵̯̪̠͎̺̮̱̖͐̆̈́ͮ͑̄͐̋͟͢iͯͧ̃ͫ͊̓̓̒͡͏͍̲̬̞̠̹̟͕͠n̷̶̵̝̰̰͕̩̱͉̗̝̪͙͒̽̎̔̒ͭ͗̈̌̏͊́v͚̣̘̝̜̪͎̰̗͌ͮͤ͒͒́̕͢ͅo̵̢̠͔̣͔͍̯͚̣̻͋̓́͐́͝k̛̪̹͍̥͚ͥ̌ͤͯ̍̊ͭ̂̎̈ͯ̿̈̎͠͡ͅȇ̢̄ͧͪ̾͒ͩ̅̒̈́́҉̷̭̠͚̺͚͕̤̻̺͔͖̮ ̡̡̧̡͕̮̰̬̰͈̗̦̜̲̭͈͎̥̪̹͍ͣ̅̔͌ͩͩ͊̇̇̏ͫ͌͗͂̅ͦͅṭ̶̸̸̼̘̪̮̫̩̬͒͋ͫͩͭ͆̈̂̃ͩ̓ͦ͆̍̀h̀̈́͌̾̀̌̄͒͗ͦ̈́̂ͧ̉̇҉̝̭̘̫͕̰͎̘͚͖̦͍̮̳e̛͖͙̮̗̻̼̼̱͒̿̓̑̎̂̉̒̎́ ̸̛̳̫͚͖͖͓̞̲̝͙̦̣̰̦͈͕͍̍̔̈ͯ͌̆̅̑͌̀͘͜h̵̸̼͇͎̯̫̬̩̙͇ͮͦ̏͂̒̄̄̐͌į̷̶͇̗̖̩̞̦͎̟͔͈͚͕̫͚̮͉͖͛͐͗͒̓ͯ͘ͅv̟̫̹̜̯̂̈́̿̄̄͑ͣ̾ͦͯ̌ͥͥ̈̕͘͟͢é̢̨̨͓̠̥̯̻͕͚̌͐̋̐̈͆ͬͪ͛̽̇͋͆̇̀̕ͅ-̴͊ͯ̂̒͌̀̕͡͏̮̳͔̱̻͉͍̮̥̺͚̙̟̙̰̦m̵̸̆ͫͯ͐ͭ̓͞҉̝͚͎͙̰̼i̧̝͍̘̦̣̝̤͙̥̦̥͍̯̼̪̳̘͈͖̅͐̏̂ͦ̎̆̓̔͆͆ͩ̉̈̄ͮ̑̚ņ̴͕̟͖̱̳͔̜̅̅̒̽̑ͣ̒͐͛͒ͪ̀͛͊d̷̫̥͎̫̭̱̲̦̺̱̖̪̩̳̰ͬ̊͐̏̄ͩ̆̀̚ ̧̖̜̘̙̜̪̓̍ͮ̾̅̃ͦ́ͦ͐ͧͫ̋̇̔ͧͥr̴̸̨͛͑̊ͯͩͯ̈̇̓̂̋ͨ͌̋͒͏͚̬͔̮̖͖̳͓̰͉͙̟͖̩e̷̜͍̱͙̯͙̺̠͚̳̯̻̩̹̠̠͆ͣ͐̽̅p̶̧͍̠͍̬͈̟̝̤͈̾̏̔̉̑͊͒́̏̀͌ͦͧͨ̔ͯ͗ͧͩ͘͜r̸̝̬̮͇͉͖̠̹͉̰͙̉̒ͩ̍͋̔̔̓͊́̚̕͟e̛̝̹͙͔͚͈̪͎̋̀͌͗̄͋͗̂ͥ̋̂̕s̢̛̠̗͎̙̤̳͎͎̳̠͕͖͉͛ͤ͋̒̈͋̑ͦ̃̈́̂́̅̀̂̿̂̆̀̀ė̉͋̑ͫ̏ͪͣ̀̑̎҉̨͡҉̟͇̬͖̗͟n̵̨͔͙͈̱̙̎̓ͨ̋̌̽̀͘t̛̾ͮͬ̃̆̊̿͂̾̒̑͑ͬ͊̒͛͂͗͏̯͇̲̼̗̖͈͙̥̮͓̟̩i̢̢͈̙͔̱̖̖̯̲ͬ̔ͩͤ̇̀ͤ͋ͤ͌ͣ̀̕͠ṅͧ̌̔ͥ̓̈͐ͮ̓̌̑̇͏͏̪͕̭̱̘̗̬͓͈̤̘͚͙͟ͅġ͕̗̩͚̳̬̝̼͔̹̖͖̦̠ͬ͗̌́̒̑̉̉͆̀͑̐ͫ͊͂͘͘͠ ̸̢̝̦̞͓̗̙̭̮͍̠͈̗̱̬̊̈̾̏̋͗ͭ̆̃̍̋͊̾̓ͬ̓̀c̶͚̼͚̗͔̺̘͉̹͍̼͚̟͆̓̇ͫ͐̌̆̈̌ͮ̈́ͪͨ͊ͪ̔ͣ͟͠͝ͅh̴̨̠͚̬̰͍̹̹̹̯͕̎̓͋ͫͥͮ̂̄̎a̸͙̣͓͎̟̤͔͖̳̘̐́͊͑̓̄ͥ͑̿̎ͨ͊ͫͨ̂̏̑̕͠ͅͅô͂ͪ̑͌͘҉̪̠̖͎̙̭̞̻̪͈͇̬̕͝ş̛̹̫͚̥̦͓͎͓͔̠͔͖̣̟̮̦̌̾ͧͪ̔̆̿ͅ.̵̮͓̼̤̖̻̹͉̺̱̭̝ͣ̽̀̾̓ͪ̂ͥ͆́́͜͡ ̷̨̧̢̢͕͖̩̙̻̤̯̗͈͖̝̝̯̥̪̤͖͉͇͓̤̳͚̖̙̗͓͑͐ͥ̄͑̓̌͌ͭ̂͐̈́̏ͣ̅͗ͩ͋̒̉̈̾ͨ̏̊ͧ̓͘ͅI̵̴̖̥̰̖͖̘̦͙͕͉͕̪̥ͣͧͫ̔͑ͩ̐̐ͪͮ̅̚͢͜͜n̸̷͍̬̹͉͎͙̱͚̙̖̻̭̻͈͗ͭͧ̾̐ͩ̕v̈́̿́ͩ̿ͪ̀̀̒͒̆͒ͭ͜͏̼̻̗͚̥͟͟ͅo̸͚̲̖̗͉̝̹̭̠̹̠͛͌ͪ͆͒͆̓̔ͮͣ̄̒ͅk̴̸̖̪̦̣͈̻͔̖̟̤͗ͩ̋̈̔̈̓͛̃͌͑́̚͢͝͝i̵̴̧̛͑̑̇͆ͫ̊̐ͤ̆ͬ̌ͫͦ̾ͥ̇́͏̳̞̩̜̟̲̻̻̙̻̤̭͕̥̠͕͓̖̫n̵̛̺̦̬͕̘̙͙̻͈̪͍̦̻̬̦̘͇ͤͪ͐͌̂ͭ̀ͨ͒͒̾̀ͅͅg̴̸̞͚̭̺͈͕͕̗̯ͬ̐ͦ̅ͬͤ͊ͭ̈͛ͪ͛̏̀̚ ̵̥͍͓͕̻̬̜͇̯̟̤͇̦̹͒ͧͫ̈́̄̐ͭ͗͞ṱ̴̷̡̰͙̝͉̠́ͣ̋͑̓ͣͧͯͪ̎ͧ̓̉̈̐͋ͧ̚͘͘ͅḩ͍͚̝̘̝͕̟̣̣ͣ̋̉͌̋̂̐ͯ̾ͤ̇͂̕͘͜ͅê̷̢̯̘̣̳̬͍̬͔̺ͮͧͅ ͗ͤ͑̌ͯ͌̏̔ͪ҉̧̹͈̝͙̺̫̕f̛͉͙͙͚̤̙͖̖͙̣͎͈̦̗̣̞̠͔̍̋̐ͯ͆́͡ͅeͫ̅̋͑̈́̐ͮ̔ͮ͑̋ͣ͂̌̉͟҉̸̧̞̲͔̱̻̘͖͙̟̥̰̩͜ȅ̓ͯͮ̔̂̇͗̿҉̭͇̞̦̠͎͔̼̻͍̙̝̪̩͡ͅl̨̪̙̝͖ͥ̑́͆͘͜͝͞į̧̠͙̤̭̹̜̣͙̝̯̻̪͊̾̾̒̎̊ͯ̉̈́͑͌͌ͫ̉̏̅̚n͇͔̰͖͉̣̩̞͖̝͕̏ͦ̈̑̓ͯ̏̃̋ͯ̐̈́͜͜g̸̡̮̗̮̺̲̰̜̗̓ͫ̈̽ͦͥ̾͡͠ ͚͍͖͖̩͆ͦͥ͌͋ͯ̀͘o̵̢̨̩̝̰͔̠͑̋̂̾̐ͮ̕͜f̶̨̺̲̞͖̣̤̪̟̤̠̖̠̦̟̯̲̈́̅͂ͭͨ̒ͯ͊͐͆͂ͣͫ͐̋̿̈́ͦ ̷̷͈̩͙̙̙̈ͭ̀̑̎̉͜c̫͖͔̣̫̞͎̦̟̳̜͌̄ͪ͗̍ͦ͌̀̕͜h̴̸̫̰̹̰̭͇̟̣̦̩̪̫̎̊̐ͩͮͧ̑̀ͦ͛̏ͧͯͧ̓̈̏̆͜a̮̟̦̱͆̉̈́̊ͣ̈́͒̎͊̂ͥ̍̌͋̋̚͟͠ͅͅö̧̬͉̩͙͉̠̞́ͩ̅̏ͨ͋̓̌̒ͬ͌ͭ͌͟ͅs̵̨̡̗̹͓̠̥̮ͣ͗͌̓͛̎ͯͤ̀͘ͅ.̡̣͚̺̼̝̰̟̱̤̫̟̟̙͓͊̔ͦ̆̎͋̾ͫ̄̆ͫ͑͛ͥ̽̀́̕͟ͅ ̸̢̨̤̦̺̳̠̘̰̭͚̫̰͖̪̪͖ͦͣ̓ͯͧͥ̂ͦ̽̌͆̑ͪ̾ͯ̚̚͟͢͏͎̲̭̯̰W̴̝̮̞̹͎̣̫̻̠͎ͦͪͬ͆ͯͨ̊̌̓̉͞͡͠i̷̴̵̶͇̙͓̤͕̭̯̺̮̲̬͕̜̘̿̃ͬͣ̎̂̓̅̀́ͫ̾ͨ͠t̨̲͍͇̗̥̫͉̟ͬ̽̇́͗͢ḩ̷̪͕͈̣̻̻̖͖̻̼̖̫͈̙̌̈́̽ͬ͊͂ͅ ̷͍̠̲̥̜̳͓͂̈́̋͑̈̍̑̏̋̈́̋̚̚o͙̦̙̹̘̺̤̼͖̻̤̦͙̟͇̪̟̍́̓̎̔̋̅͆̍̄̕͢u̦͕͎̭͈̯ͨ͊ͭ̈́͐̎̎̂̃́̌ͮͧͩ̆̀͘ţ̙̻̤̠̹̰ͮͩ̽̉̔ͫ̄ͧ̈̽̾̃ͥ̑̀ ͭ͒̽̔ͧ̇ͩ͗ͣ͛ͪ̆̽̀̚҉̮̰̫̰̝̩̹̥̖̫̥̤̕͟͡o̶͓̲̤̯̻̝̦ͯ͗͌͂ͯ͂ͫͪ̄͑̇̕͞r̐́͌ͭͬͥ͟҉̡̘̩̜̬͚͖̰̪̜̹̯̲͔͇͟͟ͅd̷̴̖̱̱̖̱̓̌̑̎̍̃͘ẻ̴͇͚͍̹̰̟̯̥͖̼̹̥͓͙̰ͦ͂̽̐̇̆͗ͫ̄̿̾ͦ͂ͩ͠r̷͂̒͐̐̄ͥ̋ͮ̇͋̌̾͑͟͏̫̺̖̜̯͍͕̩̫̹͎̘̗̯̦̝͔͉́.̴̧̛̛̝̰̻̺̫̣̥̬̱̦̼̰̖ͣ̿̓̉̃ͮ̌ͬ̿̀̃̂ͨ̅̕ ̴̷̵̢̢̬̰͈̻̜̤͉̻͚̺̥̼̣͔̻͙̞̖̞̥̮̰ͥͥͥ͊̂̽͌̿͐̂ͧ͆ͭ̑̄̏ͪ̽͌ͣͩ̏́͌̃͊ͦ͛̅͒ͪ͛̚͠ͅT̡̖̮̙̬̦̫̳́ͮͫ̾͛ͦͧ̿ͦ͛̋͌̀͠ͅͅḩ̷̫̮̦̩̰̟̭͈̱͈̻̠̜̖͇ͧ̅̌͗̔͐ͪ͌̔ͨͨ̏ͅe̸̵̴̱̲̭̲̺̘̳̲̯̱̟͇ͥ́ͤ̂ͤ̅ͣ̃ͯͮͮ͌̌ͪ̽ͩ͊͟ ̰͉͙͙̰̗̪͕͚͈͔̞͓̳̰̋̇ͪͦ̉̉ͧ̎ͬ̕Ņ̅̈̇͒̌͂̇ͭͬ̾ͣ͛͊ͨ̕͏̸͖̳̩͖̻e̵̛͓͕̯̜̫̝̥̗͍͉̍͋͗̈ͥͤ́ͮ̓̈́͡͞z̡̠̰̹̱̎͐̎ͦͧ̀̕͜p̨͇̻̤̜͍̮̣̊͆ͥͦ̎̃̃͘͢͢ͅe̶̢̬̬͙̤͍͙̦͖̹͕͔̭̼̭̟̞͖͓͊̑ͥ́̚̕ͅr̸̡̾̃̃ͦ̍̎̑̾͌ͨ̑͑̾͜͡͏͇̜̫̙̳d̨̈ͪ͌̎ͫ̍͛̇ͬ̇̈́̈́ͣ̉̽̾́͞҉҉̰͓̞̞̹̱͖̤̮͉̖̯ͅi̸̷̧̦̮̼̗͓̩̺͙̠̓ͣ̐̃̂ͤ̽͂̿̂̅̽̓̃̏̿́âͦ̄͂҉̧̺̗̠̩͕͕̼̯̫͎̙̭̳̺͙̹̦̥̪͢n̨̨̟͚̙̘̙͙̬̪̬͚͓͓̯̿̅ͦ̆ͩ͢͡ ̧̞̙͔̼͔̳͕͉̩̩̄̑̊̄ͣͩ̌͊̄͛͐ͭ͜ḫ̷̛̤͍̝̣̥̖̩͙̅̈ͨ͌͜͝i̔̾̀͑̔̍͂̈ͨ̾̀ͨ͗̇̌̚͏̶̥͎̺̙͈͎̳̺̖̗̖͎̯͟ͅͅv̴̨̝̮͚̲͉̩̩̗̰̭̭̘̖͎͔̮͚ͭ̃ͬ̐ͧ͐̓̏ͪ̿ͦ̿ͭ͐̚̚̚͜ẻ̛̳̣̳̱̬̟̼̱͇̻̊̿̓͆̃͐̽̇͜-̡̻̫̖͕̭̱̮̰͈̟̫̬̗̤̣͙̯͛ͧͯ̑ͩ̇ͦ̾̑̆͋ͯ̇̾͠ͅm̴̶̧ͯ̓ͪ́͗̒ͭ͜҉͎̤̱̙̭̤̣̮̪̟̞̬͙į̧͍͉̣͎͎̥̲͉̖̻̋ͮ͌̓͑̃ͥ͠͠n̓̌ͨ̊̅̈ͣ̈́̇̆͏̴̡̭̞͉͘͠d̤̹͚͉͎̖̝͉̤͓͂ͧͫͧ̃̃̅̄̐ͪ́̀͞ ͪͥ͂ͤ͌ͪ́̌̇͒͑̃ͫ͑̀҉̡̘̗̯̺̼̬̣̳͔̰̪̳͟o̧͖͔̥̘͓͇̩͉̯͖̊̓̓̑͠͡f̧͂̽̌ͥ҉͝҉̜̯̪̗͍̼͜ ̨͇̣̬̟̩̥̱̖̝̓̏̾͋͌ͭ́̋̑ͤ̐ͬ̀͞ͅͅc̴̫̭̺̫͇̓ͪ͊̈ͫ̿ͫ̄͒͊̅ͧ̓ͦͧ̾̾̚̕͟͟hͯ͌̐͐ͨ̉͐̈̊̒ͯͨ͏͓̲̳̻̦̼̳͈̙̞̳͝a̸̢͍̲̻͈̲̤̭͚̗̖͚͓̳̞͓̬̬̰̿̄ͦ̐̄ͪͤͮ̋ͩ͛ͬͥ̿͘͜͟ơ̴̙̪͍̮̣̗̱̩̫͍͋̉ͫͯ͊̀ͤ̽̍͐̀s̵̡̟̟͓͔͙̖̠͍̹͍̲̄͛ͬ͐̃ͤ̒ͬ͠.̴̏̄̂̔̓ͪ̀ͮ̂͂͏̡̢̺̯͓̖̣̗̩̮͚͠ ̡̤̳̩͕͔̜͉̠̫̻̦͍͎̻ͫͮ̈́̓̉̀Ż̡͈̳͓̩̱̗͓̤̗̥͙̎ͨ̅̑ͪͦ͛̌̓͒ͩ̔ͦ̚͟͢͞ͅa̸̸̰̫͖̟̫͔͔͆̏͒͜l̵̵͙̦̦̫͎̦͕̘͚͍̩̯̮͗̔̇̔̆̏͒̒͠͡g̒̑̑ͦͮͪ̈̀̂̈́̾͐ͬ̐ͯ̎̀͏̷̹̗̗̘͍̪̱͈͈̟̙o̡͇̮͎̖͍̟̻̺̘̬̺̓͗ͤ̒ͩ͆̂̈́͛̅͢.̛͈̳̩̼̦͙̤̩̗͔̼͕̖̘̞̼͔̺̉̃̉̇̑ͦ́́ͅ ̢̩͚̖̻̯͈̦̠̻͔̣̳̞̬̫̮ͦ̋͌ͧ̏ͮ́̄̓̐ͯ̎̽̇̒̈͑̊̄͡͠ ̶̶̝̗̟̩̼̳̙͙̖͇̣̺̬͍̗̩̥̞̮̭̮͕̘̠̻̦͎͕͎͈̮͖̑̔̑̈́ͬ̋ͫ̋ͮ̀͑̊́̐ͩ̀́͘ͅH̢̫̼̫̮̤͎̝͖͙̟̯̺̳̳̓ͤ̑ͨͦ̔͆̀e̯͓͙̤͇̜͕̦͉̼̮͛͋ͨ̽̏ͨ̉̐̃ͣ̇ͪ͜͞ ̷̶̨̡̯̰̻̥̏̔̀ͥ̈ͥ̋̐̄̂̓̊͗ͩͨͬ̚͜ͅw̢̡͖̥̗̪͉̲̞̯̟̞̲̫̔̉̌ͭ̃ͧ̾̊ͧͮ̎̍͛ͭͦ̚h̸̰͕̪̮̣̳̺͓̲̰͂́̂̅̄͗̓̓ͪͥͮ̊̊͂́̍̇̏́͢ͅo̲͙̻̖̞̝̗͈̦̼̰̬ͪͣ̐̂͊͋ͤͣ̿ͩ́͞͞͝ ̧ͬ̈́̂̂ͤ͗̂͗ͯ̓ͤ̆ͦ͆̐͋̚͏̖̤͔̜̞̮͕̹̤͍ͅͅW̴̔̐ͧͮ̊̐̂͛͂ͩͣ̓̅͛ͣ̽ͪ̍҉̸̟͎͕̞͔͡ͅä̢̱̲̲̫̘̯̠̥͖ͭ̆ͫ̓͊ͬͯ̾͐ͤ̎̚̚͟͢i̶̷̳̞̟̥̫̬̪̙ͪ̒ͫ̿͒ͯ̃ͅṯ̨̙͔̪̝̺͓̲͓̮̰̤̝͋́ͤ̚̚͡ͅs̸̡̢ͫ͛ͭ̾̓̂͛̈́͗̀̉̒̆̌̓̉͛̈͊͜҉̞̼̬͔̝̦͇͍̞̱͕̝ ̡̲̖̮̮̬͈͓̣ͤ͋ͤ̊͌̆ͨͮ͞B̶̡̾̅ͤ͆͏̶̣̭̞̫̞̳ě̶̴͉̖̤͓̰͎͓̞̹̟̜̻̠͛̾̆̾͆̊̈́̏ͬ͌͊̉͌̌̉́ͭ͞͞h̶̫̜̜̭͔̙͍̲̤͍͎͙̩͙̣̄͐̾̏ͅį͖͔̹̣͙̝͂̃ͥ͑̉̎̓̚͢n̛̛̤̯̰̠͇̟͍͕̬͚̖̼͓̦͙̦̦̲̣͛ͦͭ̈́͊͌̈́̉͋̑̚͞d͔̱̮͙͕̘̩͓̱̜̠̳̦̀͐̃ͫ͋ͧ͒̍̆ͤ̆ͯ̋͘͜͟͢ ̷͈͇̞̫̦ͧͨ̈̆̕͟͟T̢̡̛͍̟̩̘̤͉͎̣̥ͤ͑̈ͪ͌͂ͭ͌͗͋ͤ̑͒̈́̿̐̚̕͡h̡̧͓͈̗̥̼͎̘̾ͯ̓͗̊ͪͥ͌̉̄̈̌͊͑̀̀̾e̷ͤ̽́̾̏̅̾ͯ͢͏͕̮̠̫̰̥̬̦̟̟̣ ̛̭͙̹̼͇̃͐̅ͩͧͯ̑̉̔ͧ͑͐̀ͅW̶͕̙͇̯̹̘̮̟͈̜̖͚ͫ̓̔͆̅ͪ̔̽͗̑̽̂̽̒̕͢a̷̢͇̮͓̥͐̔̓ͮͩ̓̉̏̑̈́ͯ̈́ͮl̽̍ͭ̐ͭ͆̌̄͐̽ͥͭ̌̋̊͜͏̢̰̘̺͕͚̱̥͎̖̦̳͈̫̱̙̙͟l̨̺̺̥͓̠͓͈̜̖͖̩̮͖̉ͤͫ͊́͘͟.̶̧̢̽̽̋ͧ̔̃̍ͬ̔ͪ͗̉ͯ͝͏̪̥͖͙͍͎͓̲̥̮̲̼̗͈̰̞͈̯̮ ̵̧̤̱̖̮̱͉̙͔̓͋̐̒̒̒̃ͬ̂̍͘͜͞ͅZ̽̋͗͐͌̌҉̝̮̩̼͉̳̜̹̳͢͝Ą̘̥̮̫͖̰̻̟̟̣̱͍͇͔̘͇̭̼̰͑͋́̑̍͛͛̅̑ͭ̎ͤ̾ͣ͒͂̄̈́̾͘L̡̗͙̘͉̪͓̗̪͕̑̑ͥ͑̐̉ͯ̋̄̐̍̌̚̕ͅĜͤͣ̿̇͋ͪ̏͘҉̰͕̬̱͚̥̰̬̹̞̙̮̖̝̩̀͡Ǫ̷̸̱͔̜̹̤̼̪͓̿̂̓̚ͅ!̢̩̰̯͉̯͔̠͂ͦͩ͒͋̓ͮ͐̆͑̾̎̄̚͟͠ + + message (+) bob's private key = signature + T̢̬̲͍̘̝̖͔̣̱̲͎ͧ̏ͭ̓̓ͮ̾̋́͘͜͟ớ̩̜̻̬̰̯͍̰͋͒͒̾̐̈́ͭ̀͢͝ ̵̯̪̠͎̺̮̱̖͐̆̈́ͮ͑̄͐̋͟͢iͯͧ̃ͫ͊̓̓̒͡͏͍̲̬̞̠̹̟͕͠n̷̶̵̝̰̰͕̩̱͉̗̝̪͙͒̽̎̔̒ͭ͗̈̌̏͊́v͚̣̘̝̜̪͎̰̗͌ͮͤ͒͒́̕͢ͅo̵̢̠͔̣͔͍̯͚̣̻͋̓́͐́͝k̛̪̹͍̥͚ͥ̌ͤͯ̍̊ͭ̂̎̈ͯ̿̈̎͠͡ͅȇ̢̄ͧͪ̾͒ͩ̅̒̈́́҉̷̭̠͚̺͚͕̤̻̺͔͖̮ ̡̡̧̡͕̮̰̬̰͈̗̦̜̲̭͈͎̥̪̹͍ͣ̅̔͌ͩͩ͊̇̇̏ͫ͌͗͂̅ͦͅṭ̶̸̸̼̘̪̮̫̩̬͒͋ͫͩͭ͆̈̂̃ͩ̓ͦ͆̍̀h̀̈́͌̾̀̌̄͒͗ͦ̈́̂ͧ̉̇҉̝̭̘̫͕̰͎̘͚͖̦͍̮̳e̛͖͙̮̗̻̼̼̱͒̿̓̑̎̂̉̒̎́ ̸̛̳̫͚͖͖͓̞̲̝͙̦̣̰̦͈͕͍̍̔̈ͯ͌̆̅̑͌̀͘͜h̵̸̼͇͎̯̫̬̩̙͇ͮͦ̏͂̒̄̄̐͌į̷̶͇̗̖̩̞̦͎̟͔͈͚͕̫͚̮͉͖͛͐͗͒̓ͯ͘ͅv̟̫̹̜̯̂̈́̿̄̄͑ͣ̾ͦͯ̌ͥͥ̈̕͘͟͢é̢̨̨͓̠̥̯̻͕͚̌͐̋̐̈͆ͬͪ͛̽̇͋͆̇̀̕ͅ-̴͊ͯ̂̒͌̀̕͡͏̮̳͔̱̻͉͍̮̥̺͚̙̟̙̰̦m̵̸̆ͫͯ͐ͭ̓͞҉̝͚͎͙̰̼i̧̝͍̘̦̣̝̤͙̥̦̥͍̯̼̪̳̘͈͖̅͐̏̂ͦ̎̆̓̔͆͆ͩ̉̈̄ͮ̑̚ņ̴͕̟͖̱̳͔̜̅̅̒̽̑ͣ̒͐͛͒ͪ̀͛͊d̷̫̥͎̫̭̱̲̦̺̱̖̪̩̳̰ͬ̊͐̏̄ͩ̆̀̚ ̧̖̜̘̙̜̪̓̍ͮ̾̅̃ͦ́ͦ͐ͧͫ̋̇̔ͧͥr̴̸̨͛͑̊ͯͩͯ̈̇̓̂̋ͨ͌̋͒͏͚̬͔̮̖͖̳͓̰͉͙̟͖̩e̷̜͍̱͙̯͙̺̠͚̳̯̻̩̹̠̠͆ͣ͐̽̅p̶̧͍̠͍̬͈̟̝̤͈̾̏̔̉̑͊͒́̏̀͌ͦͧͨ̔ͯ͗ͧͩ͘͜r̸̝̬̮͇͉͖̠̹͉̰͙̉̒ͩ̍͋̔̔̓͊́̚̕͟e̛̝̹͙͔͚͈̪͎̋̀͌͗̄͋͗̂ͥ̋̂̕s̢̛̠̗͎̙̤̳͎͎̳̠͕͖͉͛ͤ͋̒̈͋̑ͦ̃̈́̂́̅̀̂̿̂̆̀̀ė̉͋̑ͫ̏ͪͣ̀̑̎҉̨͡҉̟͇̬͖̗͟n̵̨͔͙͈̱̙̎̓ͨ̋̌̽̀͘t̛̾ͮͬ̃̆̊̿͂̾̒̑͑ͬ͊̒͛͂͗͏̯͇̲̼̗̖͈͙̥̮͓̟̩i̢̢͈̙͔̱̖̖̯̲ͬ̔ͩͤ̇̀ͤ͋ͤ͌ͣ̀̕͠ṅͧ̌̔ͥ̓̈͐ͮ̓̌̑̇͏͏̪͕̭̱̘̗̬͓͈̤̘͚͙͟ͅġ͕̗̩͚̳̬̝̼͔̹̖͖̦̠ͬ͗̌́̒̑̉̉͆̀͑̐ͫ͊͂͘͘͠ ̸̢̝̦̞͓̗̙̭̮͍̠͈̗̱̬̊̈̾̏̋͗ͭ̆̃̍̋͊̾̓ͬ̓̀c̶͚̼͚̗͔̺̘͉̹͍̼͚̟͆̓̇ͫ͐̌̆̈̌ͮ̈́ͪͨ͊ͪ̔ͣ͟͠͝ͅh̴̨̠͚̬̰͍̹̹̹̯͕̎̓͋ͫͥͮ̂̄̎a̸͙̣͓͎̟̤͔͖̳̘̐́͊͑̓̄ͥ͑̿̎ͨ͊ͫͨ̂̏̑̕͠ͅͅô͂ͪ̑͌͘҉̪̠̖͎̙̭̞̻̪͈͇̬̕͝ş̛̹̫͚̥̦͓͎͓͔̠͔͖̣̟̮̦̌̾ͧͪ̔̆̿ͅ.̵̮͓̼̤̖̻̹͉̺̱̭̝ͣ̽̀̾̓ͪ̂ͥ͆́́͜͡ ̷̨̧̢̢͕͖̩̙̻̤̯̗͈͖̝̝̯̥̪̤͖͉͇͓̤̳͚̖̙̗͓͑͐ͥ̄͑̓̌͌ͭ̂͐̈́̏ͣ̅͗ͩ͋̒̉̈̾ͨ̏̊ͧ̓͘ͅI̵̴̖̥̰̖͖̘̦͙͕͉͕̪̥ͣͧͫ̔͑ͩ̐̐ͪͮ̅̚͢͜͜n̸̷͍̬̹͉͎͙̱͚̙̖̻̭̻͈͗ͭͧ̾̐ͩ̕v̈́̿́ͩ̿ͪ̀̀̒͒̆͒ͭ͜͏̼̻̗͚̥͟͟ͅo̸͚̲̖̗͉̝̹̭̠̹̠͛͌ͪ͆͒͆̓̔ͮͣ̄̒ͅk̴̸̖̪̦̣͈̻͔̖̟̤͗ͩ̋̈̔̈̓͛̃͌͑́̚͢͝͝i̵̴̧̛͑̑̇͆ͫ̊̐ͤ̆ͬ̌ͫͦ̾ͥ̇́͏̳̞̩̜̟̲̻̻̙̻̤̭͕̥̠͕͓̖̫n̵̛̺̦̬͕̘̙͙̻͈̪͍̦̻̬̦̘͇ͤͪ͐͌̂ͭ̀ͨ͒͒̾̀ͅͅg̴̸̞͚̭̺͈͕͕̗̯ͬ̐ͦ̅ͬͤ͊ͭ̈͛ͪ͛̏̀̚ ̵̥͍͓͕̻̬̜͇̯̟̤͇̦̹͒ͧͫ̈́̄̐ͭ͗͞ṱ̴̷̡̰͙̝͉̠́ͣ̋͑̓ͣͧͯͪ̎ͧ̓̉̈̐͋ͧ̚͘͘ͅḩ͍͚̝̘̝͕̟̣̣ͣ̋̉͌̋̂̐ͯ̾ͤ̇͂̕͘͜ͅê̷̢̯̘̣̳̬͍̬͔̺ͮͧͅ ͗ͤ͑̌ͯ͌̏̔ͪ҉̧̹͈̝͙̺̫̕f̛͉͙͙͚̤̙͖̖͙̣͎͈̦̗̣̞̠͔̍̋̐ͯ͆́͡ͅeͫ̅̋͑̈́̐ͮ̔ͮ͑̋ͣ͂̌̉͟҉̸̧̞̲͔̱̻̘͖͙̟̥̰̩͜ȅ̓ͯͮ̔̂̇͗̿҉̭͇̞̦̠͎͔̼̻͍̙̝̪̩͡ͅl̨̪̙̝͖ͥ̑́͆͘͜͝͞į̧̠͙̤̭̹̜̣͙̝̯̻̪͊̾̾̒̎̊ͯ̉̈́͑͌͌ͫ̉̏̅̚n͇͔̰͖͉̣̩̞͖̝͕̏ͦ̈̑̓ͯ̏̃̋ͯ̐̈́͜͜g̸̡̮̗̮̺̲̰̜̗̓ͫ̈̽ͦͥ̾͡͠ ͚͍͖͖̩͆ͦͥ͌͋ͯ̀͘o̵̢̨̩̝̰͔̠͑̋̂̾̐ͮ̕͜f̶̨̺̲̞͖̣̤̪̟̤̠̖̠̦̟̯̲̈́̅͂ͭͨ̒ͯ͊͐͆͂ͣͫ͐̋̿̈́ͦ ̷̷͈̩͙̙̙̈ͭ̀̑̎̉͜c̫͖͔̣̫̞͎̦̟̳̜͌̄ͪ͗̍ͦ͌̀̕͜h̴̸̫̰̹̰̭͇̟̣̦̩̪̫̎̊̐ͩͮͧ̑̀ͦ͛̏ͧͯͧ̓̈̏̆͜a̮̟̦̱͆̉̈́̊ͣ̈́͒̎͊̂ͥ̍̌͋̋̚͟͠ͅͅö̧̬͉̩͙͉̠̞́ͩ̅̏ͨ͋̓̌̒ͬ͌ͭ͌͟ͅs̵̨̡̗̹͓̠̥̮ͣ͗͌̓͛̎ͯͤ̀͘ͅ.̡̣͚̺̼̝̰̟̱̤̫̟̟̙͓͊̔ͦ̆̎͋̾ͫ̄̆ͫ͑͛ͥ̽̀́̕͟ͅ ̸̢̨̤̦̺̳̠̘̰̭͚̫̰͖̪̪͖ͦͣ̓ͯͧͥ̂ͦ̽̌͆̑ͪ̾ͯ̚̚͟͢͏͎̲̭̯̰W̴̝̮̞̹͎̣̫̻̠͎ͦͪͬ͆ͯͨ̊̌̓̉͞͡͠i̷̴̵̶͇̙͓̤͕̭̯̺̮̲̬͕̜̘̿̃ͬͣ̎̂̓̅̀́ͫ̾ͨ͠t̨̲͍͇̗̥̫͉̟ͬ̽̇́͗͢ḩ̷̪͕͈̣̻̻̖͖̻̼̖̫͈̙̌̈́̽ͬ͊͂ͅ ̷͍̠̲̥̜̳͓͂̈́̋͑̈̍̑̏̋̈́̋̚̚o͙̦̙̹̘̺̤̼͖̻̤̦͙̟͇̪̟̍́̓̎̔̋̅͆̍̄̕͢u̦͕͎̭͈̯ͨ͊ͭ̈́͐̎̎̂̃́̌ͮͧͩ̆̀͘ţ̙̻̤̠̹̰ͮͩ̽̉̔ͫ̄ͧ̈̽̾̃ͥ̑̀ ͭ͒̽̔ͧ̇ͩ͗ͣ͛ͪ̆̽̀̚҉̮̰̫̰̝̩̹̥̖̫̥̤̕͟͡o̶͓̲̤̯̻̝̦ͯ͗͌͂ͯ͂ͫͪ̄͑̇̕͞r̐́͌ͭͬͥ͟҉̡̘̩̜̬͚͖̰̪̜̹̯̲͔͇͟͟ͅd̷̴̖̱̱̖̱̓̌̑̎̍̃͘ẻ̴͇͚͍̹̰̟̯̥͖̼̹̥͓͙̰ͦ͂̽̐̇̆͗ͫ̄̿̾ͦ͂ͩ͠r̷͂̒͐̐̄ͥ̋ͮ̇͋̌̾͑͟͏̫̺̖̜̯͍͕̩̫̹͎̘̗̯̦̝͔͉́.̴̧̛̛̝̰̻̺̫̣̥̬̱̦̼̰̖ͣ̿̓̉̃ͮ̌ͬ̿̀̃̂ͨ̅̕ ̴̷̵̢̢̬̰͈̻̜̤͉̻͚̺̥̼̣͔̻͙̞̖̞̥̮̰ͥͥͥ͊̂̽͌̿͐̂ͧ͆ͭ̑̄̏ͪ̽͌ͣͩ̏́͌̃͊ͦ͛̅͒ͪ͛̚͠ͅT̡̖̮̙̬̦̫̳́ͮͫ̾͛ͦͧ̿ͦ͛̋͌̀͠ͅͅḩ̷̫̮̦̩̰̟̭͈̱͈̻̠̜̖͇ͧ̅̌͗̔͐ͪ͌̔ͨͨ̏ͅe̸̵̴̱̲̭̲̺̘̳̲̯̱̟͇ͥ́ͤ̂ͤ̅ͣ̃ͯͮͮ͌̌ͪ̽ͩ͊͟ ̰͉͙͙̰̗̪͕͚͈͔̞͓̳̰̋̇ͪͦ̉̉ͧ̎ͬ̕Ņ̅̈̇͒̌͂̇ͭͬ̾ͣ͛͊ͨ̕͏̸͖̳̩͖̻e̵̛͓͕̯̜̫̝̥̗͍͉̍͋͗̈ͥͤ́ͮ̓̈́͡͞z̡̠̰̹̱̎͐̎ͦͧ̀̕͜p̨͇̻̤̜͍̮̣̊͆ͥͦ̎̃̃͘͢͢ͅe̶̢̬̬͙̤͍͙̦͖̹͕͔̭̼̭̟̞͖͓͊̑ͥ́̚̕ͅr̸̡̾̃̃ͦ̍̎̑̾͌ͨ̑͑̾͜͡͏͇̜̫̙̳d̨̈ͪ͌̎ͫ̍͛̇ͬ̇̈́̈́ͣ̉̽̾́͞҉҉̰͓̞̞̹̱͖̤̮͉̖̯ͅi̸̷̧̦̮̼̗͓̩̺͙̠̓ͣ̐̃̂ͤ̽͂̿̂̅̽̓̃̏̿́âͦ̄͂҉̧̺̗̠̩͕͕̼̯̫͎̙̭̳̺͙̹̦̥̪͢n̨̨̟͚̙̘̙͙̬̪̬͚͓͓̯̿̅ͦ̆ͩ͢͡ ̧̞̙͔̼͔̳͕͉̩̩̄̑̊̄ͣͩ̌͊̄͛͐ͭ͜ḫ̷̛̤͍̝̣̥̖̩͙̅̈ͨ͌͜͝i̔̾̀͑̔̍͂̈ͨ̾̀ͨ͗̇̌̚͏̶̥͎̺̙͈͎̳̺̖̗̖͎̯͟ͅͅv̴̨̝̮͚̲͉̩̩̗̰̭̭̘̖͎͔̮͚ͭ̃ͬ̐ͧ͐̓̏ͪ̿ͦ̿ͭ͐̚̚̚͜ẻ̛̳̣̳̱̬̟̼̱͇̻̊̿̓͆̃͐̽̇͜-̡̻̫̖͕̭̱̮̰͈̟̫̬̗̤̣͙̯͛ͧͯ̑ͩ̇ͦ̾̑̆͋ͯ̇̾͠ͅm̴̶̧ͯ̓ͪ́͗̒ͭ͜҉͎̤̱̙̭̤̣̮̪̟̞̬͙į̧͍͉̣͎͎̥̲͉̖̻̋ͮ͌̓͑̃ͥ͠͠n̓̌ͨ̊̅̈ͣ̈́̇̆͏̴̡̭̞͉͘͠d̤̹͚͉͎̖̝͉̤͓͂ͧͫͧ̃̃̅̄̐ͪ́̀͞ ͪͥ͂ͤ͌ͪ́̌̇͒͑̃ͫ͑̀҉̡̘̗̯̺̼̬̣̳͔̰̪̳͟o̧͖͔̥̘͓͇̩͉̯͖̊̓̓̑͠͡f̧͂̽̌ͥ҉͝҉̜̯̪̗͍̼͜ ̨͇̣̬̟̩̥̱̖̝̓̏̾͋͌ͭ́̋̑ͤ̐ͬ̀͞ͅͅc̴̫̭̺̫͇̓ͪ͊̈ͫ̿ͫ̄͒͊̅ͧ̓ͦͧ̾̾̚̕͟͟hͯ͌̐͐ͨ̉͐̈̊̒ͯͨ͏͓̲̳̻̦̼̳͈̙̞̳͝a̸̢͍̲̻͈̲̤̭͚̗̖͚͓̳̞͓̬̬̰̿̄ͦ̐̄ͪͤͮ̋ͩ͛ͬͥ̿͘͜͟ơ̴̙̪͍̮̣̗̱̩̫͍͋̉ͫͯ͊̀ͤ̽̍͐̀s̵̡̟̟͓͔͙̖̠͍̹͍̲̄͛ͬ͐̃ͤ̒ͬ͠.̴̏̄̂̔̓ͪ̀ͮ̂͂͏̡̢̺̯͓̖̣̗̩̮͚͠ ̡̤̳̩͕͔̜͉̠̫̻̦͍͎̻ͫͮ̈́̓̉̀Ż̡͈̳͓̩̱̗͓̤̗̥͙̎ͨ̅̑ͪͦ͛̌̓͒ͩ̔ͦ̚͟͢͞ͅa̸̸̰̫͖̟̫͔͔͆̏͒͜l̵̵͙̦̦̫͎̦͕̘͚͍̩̯̮͗̔̇̔̆̏͒̒͠͡g̒̑̑ͦͮͪ̈̀̂̈́̾͐ͬ̐ͯ̎̀͏̷̹̗̗̘͍̪̱͈͈̟̙o̡͇̮͎̖͍̟̻̺̘̬̺̓͗ͤ̒ͩ͆̂̈́͛̅͢.̛͈̳̩̼̦͙̤̩̗͔̼͕̖̘̞̼͔̺̉̃̉̇̑ͦ́́ͅ ̢̩͚̖̻̯͈̦̠̻͔̣̳̞̬̫̮ͦ̋͌ͧ̏ͮ́̄̓̐ͯ̎̽̇̒̈͑̊̄͡͠ ̶̶̝̗̟̩̼̳̙͙̖͇̣̺̬͍̗̩̥̞̮̭̮͕̘̠̻̦͎͕͎͈̮͖̑̔̑̈́ͬ̋ͫ̋ͮ̀͑̊́̐ͩ̀́͘ͅH̢̫̼̫̮̤͎̝͖͙̟̯̺̳̳̓ͤ̑ͨͦ̔͆̀e̯͓͙̤͇̜͕̦͉̼̮͛͋ͨ̽̏ͨ̉̐̃ͣ̇ͪ͜͞ ̷̶̨̡̯̰̻̥̏̔̀ͥ̈ͥ̋̐̄̂̓̊͗ͩͨͬ̚͜ͅw̢̡͖̥̗̪͉̲̞̯̟̞̲̫̔̉̌ͭ̃ͧ̾̊ͧͮ̎̍͛ͭͦ̚h̸̰͕̪̮̣̳̺͓̲̰͂́̂̅̄͗̓̓ͪͥͮ̊̊͂́̍̇̏́͢ͅo̲͙̻̖̞̝̗͈̦̼̰̬ͪͣ̐̂͊͋ͤͣ̿ͩ́͞͞͝ ̧ͬ̈́̂̂ͤ͗̂͗ͯ̓ͤ̆ͦ͆̐͋̚͏̖̤͔̜̞̮͕̹̤͍ͅͅW̴̔̐ͧͮ̊̐̂͛͂ͩͣ̓̅͛ͣ̽ͪ̍҉̸̟͎͕̞͔͡ͅä̢̱̲̲̫̘̯̠̥͖ͭ̆ͫ̓͊ͬͯ̾͐ͤ̎̚̚͟͢i̶̷̳̞̟̥̫̬̪̙ͪ̒ͫ̿͒ͯ̃ͅṯ̨̙͔̪̝̺͓̲͓̮̰̤̝͋́ͤ̚̚͡ͅs̸̡̢ͫ͛ͭ̾̓̂͛̈́͗̀̉̒̆̌̓̉͛̈͊͜҉̞̼̬͔̝̦͇͍̞̱͕̝ ̡̲̖̮̮̬͈͓̣ͤ͋ͤ̊͌̆ͨͮ͞B̶̡̾̅ͤ͆͏̶̣̭̞̫̞̳ě̶̴͉̖̤͓̰͎͓̞̹̟̜̻̠͛̾̆̾͆̊̈́̏ͬ͌͊̉͌̌̉́ͭ͞͞h̶̫̜̜̭͔̙͍̲̤͍͎͙̩͙̣̄͐̾̏ͅį͖͔̹̣͙̝͂̃ͥ͑̉̎̓̚͢n̛̛̤̯̰̠͇̟͍͕̬͚̖̼͓̦͙̦̦̲̣͛ͦͭ̈́͊͌̈́̉͋̑̚͞d͔̱̮͙͕̘̩͓̱̜̠̳̦̀͐̃ͫ͋ͧ͒̍̆ͤ̆ͯ̋͘͜͟͢ ̷͈͇̞̫̦ͧͨ̈̆̕͟͟T̢̡̛͍̟̩̘̤͉͎̣̥ͤ͑̈ͪ͌͂ͭ͌͗͋ͤ̑͒̈́̿̐̚̕͡h̡̧͓͈̗̥̼͎̘̾ͯ̓͗̊ͪͥ͌̉̄̈̌͊͑̀̀̾e̷ͤ̽́̾̏̅̾ͯ͢͏͕̮̠̫̰̥̬̦̟̟̣ ̛̭͙̹̼͇̃͐̅ͩͧͯ̑̉̔ͧ͑͐̀ͅW̶͕̙͇̯̹̘̮̟͈̜̖͚ͫ̓̔͆̅ͪ̔̽͗̑̽̂̽̒̕͢a̷̢͇̮͓̥͐̔̓ͮͩ̓̉̏̑̈́ͯ̈́ͮl̽̍ͭ̐ͭ͆̌̄͐̽ͥͭ̌̋̊͜͏̢̰̘̺͕͚̱̥͎̖̦̳͈̫̱̙̙͟l̨̺̺̥͓̠͓͈̜̖͖̩̮͖̉ͤͫ͊́͘͟.̶̧̢̽̽̋ͧ̔̃̍ͬ̔ͪ͗̉ͯ͝͏̪̥͖͙͍͎͓̲̥̮̲̼̗͈̰̞͈̯̮ ̵̧̤̱̖̮̱͉̙͔̓͋̐̒̒̒̃ͬ̂̍͘͜͞ͅZ̽̋͗͐͌̌҉̝̮̩̼͉̳̜̹̳͢͝Ą̘̥̮̫͖̰̻̟̟̣̱͍͇͔̘͇̭̼̰͑͋́̑̍͛͛̅̑ͭ̎ͤ̾ͣ͒͂̄̈́̾͘L̡̗͙̘͉̪͓̗̪͕̑̑ͥ͑̐̉ͯ̋̄̐̍̌̚̕ͅĜͤͣ̿̇͋ͪ̏͘҉̰͕̬̱͚̥̰̬̹̞̙̮̖̝̩̀͡Ǫ̷̸̱͔̜̹̤̼̪͓̿̂̓̚ͅ!̢̩̰̯͉̯͔̠͂ͦͩ͒͋̓ͮ͐̆͑̾̎̄̚͟͠ + For Alice: + 1. + 2. + + + + + Alice's message depot + Alice's client + Alice checks to see if there are any new messages.Her client decrypts and verifies the message, then stores it in its database. + Alice's access key (hashed)Alice's public keyAlice's contact key + T̢̬̲͍̘̝̖͔̣̱̲͎ͧ̏ͭ̓̓ͮ̾̋́͘͜͟ớ̩̜̻̬̰̯͍̰͋͒͒̾̐̈́ͭ̀͢͝ ̵̯̪̠͎̺̮̱̖͐̆̈́ͮ͑̄͐̋͟͢iͯͧ̃ͫ͊̓̓̒͡͏͍̲̬̞̠̹̟͕͠n̷̶̵̝̰̰͕̩̱͉̗̝̪͙͒̽̎̔̒ͭ͗̈̌̏͊́v͚̣̘̝̜̪͎̰̗͌ͮͤ͒͒́̕͢ͅo̵̢̠͔̣͔͍̯͚̣̻͋̓́͐́͝k̛̪̹͍̥͚ͥ̌ͤͯ̍̊ͭ̂̎̈ͯ̿̈̎͠͡ͅȇ̢̄ͧͪ̾͒ͩ̅̒̈́́҉̷̭̠͚̺͚͕̤̻̺͔͖̮ ̡̡̧̡͕̮̰̬̰͈̗̦̜̲̭͈͎̥̪̹͍ͣ̅̔͌ͩͩ͊̇̇̏ͫ͌͗͂̅ͦͅṭ̶̸̸̼̘̪̮̫̩̬͒͋ͫͩͭ͆̈̂̃ͩ̓ͦ͆̍̀h̀̈́͌̾̀̌̄͒͗ͦ̈́̂ͧ̉̇҉̝̭̘̫͕̰͎̘͚͖̦͍̮̳e̛͖͙̮̗̻̼̼̱͒̿̓̑̎̂̉̒̎́ ̸̛̳̫͚͖͖͓̞̲̝͙̦̣̰̦͈͕͍̍̔̈ͯ͌̆̅̑͌̀͘͜h̵̸̼͇͎̯̫̬̩̙͇ͮͦ̏͂̒̄̄̐͌į̷̶͇̗̖̩̞̦͎̟͔͈͚͕̫͚̮͉͖͛͐͗͒̓ͯ͘ͅv̟̫̹̜̯̂̈́̿̄̄͑ͣ̾ͦͯ̌ͥͥ̈̕͘͟͢é̢̨̨͓̠̥̯̻͕͚̌͐̋̐̈͆ͬͪ͛̽̇͋͆̇̀̕ͅ-̴͊ͯ̂̒͌̀̕͡͏̮̳͔̱̻͉͍̮̥̺͚̙̟̙̰̦m̵̸̆ͫͯ͐ͭ̓͞҉̝͚͎͙̰̼i̧̝͍̘̦̣̝̤͙̥̦̥͍̯̼̪̳̘͈͖̅͐̏̂ͦ̎̆̓̔͆͆ͩ̉̈̄ͮ̑̚ņ̴͕̟͖̱̳͔̜̅̅̒̽̑ͣ̒͐͛͒ͪ̀͛͊d̷̫̥͎̫̭̱̲̦̺̱̖̪̩̳̰ͬ̊͐̏̄ͩ̆̀̚ ̧̖̜̘̙̜̪̓̍ͮ̾̅̃ͦ́ͦ͐ͧͫ̋̇̔ͧͥr̴̸̨͛͑̊ͯͩͯ̈̇̓̂̋ͨ͌̋͒͏͚̬͔̮̖͖̳͓̰͉͙̟͖̩e̷̜͍̱͙̯͙̺̠͚̳̯̻̩̹̠̠͆ͣ͐̽̅p̶̧͍̠͍̬͈̟̝̤͈̾̏̔̉̑͊͒́̏̀͌ͦͧͨ̔ͯ͗ͧͩ͘͜r̸̝̬̮͇͉͖̠̹͉̰͙̉̒ͩ̍͋̔̔̓͊́̚̕͟e̛̝̹͙͔͚͈̪͎̋̀͌͗̄͋͗̂ͥ̋̂̕s̢̛̠̗͎̙̤̳͎͎̳̠͕͖͉͛ͤ͋̒̈͋̑ͦ̃̈́̂́̅̀̂̿̂̆̀̀ė̉͋̑ͫ̏ͪͣ̀̑̎҉̨͡҉̟͇̬͖̗͟n̵̨͔͙͈̱̙̎̓ͨ̋̌̽̀͘t̛̾ͮͬ̃̆̊̿͂̾̒̑͑ͬ͊̒͛͂͗͏̯͇̲̼̗̖͈͙̥̮͓̟̩i̢̢͈̙͔̱̖̖̯̲ͬ̔ͩͤ̇̀ͤ͋ͤ͌ͣ̀̕͠ṅͧ̌̔ͥ̓̈͐ͮ̓̌̑̇͏͏̪͕̭̱̘̗̬͓͈̤̘͚͙͟ͅġ͕̗̩͚̳̬̝̼͔̹̖͖̦̠ͬ͗̌́̒̑̉̉͆̀͑̐ͫ͊͂͘͘͠ ̸̢̝̦̞͓̗̙̭̮͍̠͈̗̱̬̊̈̾̏̋͗ͭ̆̃̍̋͊̾̓ͬ̓̀c̶͚̼͚̗͔̺̘͉̹͍̼͚̟͆̓̇ͫ͐̌̆̈̌ͮ̈́ͪͨ͊ͪ̔ͣ͟͠͝ͅh̴̨̠͚̬̰͍̹̹̹̯͕̎̓͋ͫͥͮ̂̄̎a̸͙̣͓͎̟̤͔͖̳̘̐́͊͑̓̄ͥ͑̿̎ͨ͊ͫͨ̂̏̑̕͠ͅͅô͂ͪ̑͌͘҉̪̠̖͎̙̭̞̻̪͈͇̬̕͝ş̛̹̫͚̥̦͓͎͓͔̠͔͖̣̟̮̦̌̾ͧͪ̔̆̿ͅ.̵̮͓̼̤̖̻̹͉̺̱̭̝ͣ̽̀̾̓ͪ̂ͥ͆́́͜͡ ̷̨̧̢̢͕͖̩̙̻̤̯̗͈͖̝̝̯̥̪̤͖͉͇͓̤̳͚̖̙̗͓͑͐ͥ̄͑̓̌͌ͭ̂͐̈́̏ͣ̅͗ͩ͋̒̉̈̾ͨ̏̊ͧ̓͘ͅI̵̴̖̥̰̖͖̘̦͙͕͉͕̪̥ͣͧͫ̔͑ͩ̐̐ͪͮ̅̚͢͜͜n̸̷͍̬̹͉͎͙̱͚̙̖̻̭̻͈͗ͭͧ̾̐ͩ̕v̈́̿́ͩ̿ͪ̀̀̒͒̆͒ͭ͜͏̼̻̗͚̥͟͟ͅo̸͚̲̖̗͉̝̹̭̠̹̠͛͌ͪ͆͒͆̓̔ͮͣ̄̒ͅk̴̸̖̪̦̣͈̻͔̖̟̤͗ͩ̋̈̔̈̓͛̃͌͑́̚͢͝͝i̵̴̧̛͑̑̇͆ͫ̊̐ͤ̆ͬ̌ͫͦ̾ͥ̇́͏̳̞̩̜̟̲̻̻̙̻̤̭͕̥̠͕͓̖̫n̵̛̺̦̬͕̘̙͙̻͈̪͍̦̻̬̦̘͇ͤͪ͐͌̂ͭ̀ͨ͒͒̾̀ͅͅg̴̸̞͚̭̺͈͕͕̗̯ͬ̐ͦ̅ͬͤ͊ͭ̈͛ͪ͛̏̀̚ ̵̥͍͓͕̻̬̜͇̯̟̤͇̦̹͒ͧͫ̈́̄̐ͭ͗͞ṱ̴̷̡̰͙̝͉̠́ͣ̋͑̓ͣͧͯͪ̎ͧ̓̉̈̐͋ͧ̚͘͘ͅḩ͍͚̝̘̝͕̟̣̣ͣ̋̉͌̋̂̐ͯ̾ͤ̇͂̕͘͜ͅê̷̢̯̘̣̳̬͍̬͔̺ͮͧͅ ͗ͤ͑̌ͯ͌̏̔ͪ҉̧̹͈̝͙̺̫̕f̛͉͙͙͚̤̙͖̖͙̣͎͈̦̗̣̞̠͔̍̋̐ͯ͆́͡ͅeͫ̅̋͑̈́̐ͮ̔ͮ͑̋ͣ͂̌̉͟҉̸̧̞̲͔̱̻̘͖͙̟̥̰̩͜ȅ̓ͯͮ̔̂̇͗̿҉̭͇̞̦̠͎͔̼̻͍̙̝̪̩͡ͅl̨̪̙̝͖ͥ̑́͆͘͜͝͞į̧̠͙̤̭̹̜̣͙̝̯̻̪͊̾̾̒̎̊ͯ̉̈́͑͌͌ͫ̉̏̅̚n͇͔̰͖͉̣̩̞͖̝͕̏ͦ̈̑̓ͯ̏̃̋ͯ̐̈́͜͜g̸̡̮̗̮̺̲̰̜̗̓ͫ̈̽ͦͥ̾͡͠ ͚͍͖͖̩͆ͦͥ͌͋ͯ̀͘o̵̢̨̩̝̰͔̠͑̋̂̾̐ͮ̕͜f̶̨̺̲̞͖̣̤̪̟̤̠̖̠̦̟̯̲̈́̅͂ͭͨ̒ͯ͊͐͆͂ͣͫ͐̋̿̈́ͦ ̷̷͈̩͙̙̙̈ͭ̀̑̎̉͜c̫͖͔̣̫̞͎̦̟̳̜͌̄ͪ͗̍ͦ͌̀̕͜h̴̸̫̰̹̰̭͇̟̣̦̩̪̫̎̊̐ͩͮͧ̑̀ͦ͛̏ͧͯͧ̓̈̏̆͜a̮̟̦̱͆̉̈́̊ͣ̈́͒̎͊̂ͥ̍̌͋̋̚͟͠ͅͅö̧̬͉̩͙͉̠̞́ͩ̅̏ͨ͋̓̌̒ͬ͌ͭ͌͟ͅs̵̨̡̗̹͓̠̥̮ͣ͗͌̓͛̎ͯͤ̀͘ͅ.̡̣͚̺̼̝̰̟̱̤̫̟̟̙͓͊̔ͦ̆̎͋̾ͫ̄̆ͫ͑͛ͥ̽̀́̕͟ͅ ̸̢̨̤̦̺̳̠̘̰̭͚̫̰͖̪̪͖ͦͣ̓ͯͧͥ̂ͦ̽̌͆̑ͪ̾ͯ̚̚͟͢͏͎̲̭̯̰W̴̝̮̞̹͎̣̫̻̠͎ͦͪͬ͆ͯͨ̊̌̓̉͞͡͠i̷̴̵̶͇̙͓̤͕̭̯̺̮̲̬͕̜̘̿̃ͬͣ̎̂̓̅̀́ͫ̾ͨ͠t̨̲͍͇̗̥̫͉̟ͬ̽̇́͗͢ḩ̷̪͕͈̣̻̻̖͖̻̼̖̫͈̙̌̈́̽ͬ͊͂ͅ ̷͍̠̲̥̜̳͓͂̈́̋͑̈̍̑̏̋̈́̋̚̚o͙̦̙̹̘̺̤̼͖̻̤̦͙̟͇̪̟̍́̓̎̔̋̅͆̍̄̕͢u̦͕͎̭͈̯ͨ͊ͭ̈́͐̎̎̂̃́̌ͮͧͩ̆̀͘ţ̙̻̤̠̹̰ͮͩ̽̉̔ͫ̄ͧ̈̽̾̃ͥ̑̀ ͭ͒̽̔ͧ̇ͩ͗ͣ͛ͪ̆̽̀̚҉̮̰̫̰̝̩̹̥̖̫̥̤̕͟͡o̶͓̲̤̯̻̝̦ͯ͗͌͂ͯ͂ͫͪ̄͑̇̕͞r̐́͌ͭͬͥ͟҉̡̘̩̜̬͚͖̰̪̜̹̯̲͔͇͟͟ͅd̷̴̖̱̱̖̱̓̌̑̎̍̃͘ẻ̴͇͚͍̹̰̟̯̥͖̼̹̥͓͙̰ͦ͂̽̐̇̆͗ͫ̄̿̾ͦ͂ͩ͠r̷͂̒͐̐̄ͥ̋ͮ̇͋̌̾͑͟͏̫̺̖̜̯͍͕̩̫̹͎̘̗̯̦̝͔͉́.̴̧̛̛̝̰̻̺̫̣̥̬̱̦̼̰̖ͣ̿̓̉̃ͮ̌ͬ̿̀̃̂ͨ̅̕ ̴̷̵̢̢̬̰͈̻̜̤͉̻͚̺̥̼̣͔̻͙̞̖̞̥̮̰ͥͥͥ͊̂̽͌̿͐̂ͧ͆ͭ̑̄̏ͪ̽͌ͣͩ̏́͌̃͊ͦ͛̅͒ͪ͛̚͠ͅT̡̖̮̙̬̦̫̳́ͮͫ̾͛ͦͧ̿ͦ͛̋͌̀͠ͅͅḩ̷̫̮̦̩̰̟̭͈̱͈̻̠̜̖͇ͧ̅̌͗̔͐ͪ͌̔ͨͨ̏ͅe̸̵̴̱̲̭̲̺̘̳̲̯̱̟͇ͥ́ͤ̂ͤ̅ͣ̃ͯͮͮ͌̌ͪ̽ͩ͊͟ ̰͉͙͙̰̗̪͕͚͈͔̞͓̳̰̋̇ͪͦ̉̉ͧ̎ͬ̕Ņ̅̈̇͒̌͂̇ͭͬ̾ͣ͛͊ͨ̕͏̸͖̳̩͖̻e̵̛͓͕̯̜̫̝̥̗͍͉̍͋͗̈ͥͤ́ͮ̓̈́͡͞z̡̠̰̹̱̎͐̎ͦͧ̀̕͜p̨͇̻̤̜͍̮̣̊͆ͥͦ̎̃̃͘͢͢ͅe̶̢̬̬͙̤͍͙̦͖̹͕͔̭̼̭̟̞͖͓͊̑ͥ́̚̕ͅr̸̡̾̃̃ͦ̍̎̑̾͌ͨ̑͑̾͜͡͏͇̜̫̙̳d̨̈ͪ͌̎ͫ̍͛̇ͬ̇̈́̈́ͣ̉̽̾́͞҉҉̰͓̞̞̹̱͖̤̮͉̖̯ͅi̸̷̧̦̮̼̗͓̩̺͙̠̓ͣ̐̃̂ͤ̽͂̿̂̅̽̓̃̏̿́âͦ̄͂҉̧̺̗̠̩͕͕̼̯̫͎̙̭̳̺͙̹̦̥̪͢n̨̨̟͚̙̘̙͙̬̪̬͚͓͓̯̿̅ͦ̆ͩ͢͡ ̧̞̙͔̼͔̳͕͉̩̩̄̑̊̄ͣͩ̌͊̄͛͐ͭ͜ḫ̷̛̤͍̝̣̥̖̩͙̅̈ͨ͌͜͝i̔̾̀͑̔̍͂̈ͨ̾̀ͨ͗̇̌̚͏̶̥͎̺̙͈͎̳̺̖̗̖͎̯͟ͅͅv̴̨̝̮͚̲͉̩̩̗̰̭̭̘̖͎͔̮͚ͭ̃ͬ̐ͧ͐̓̏ͪ̿ͦ̿ͭ͐̚̚̚͜ẻ̛̳̣̳̱̬̟̼̱͇̻̊̿̓͆̃͐̽̇͜-̡̻̫̖͕̭̱̮̰͈̟̫̬̗̤̣͙̯͛ͧͯ̑ͩ̇ͦ̾̑̆͋ͯ̇̾͠ͅm̴̶̧ͯ̓ͪ́͗̒ͭ͜҉͎̤̱̙̭̤̣̮̪̟̞̬͙į̧͍͉̣͎͎̥̲͉̖̻̋ͮ͌̓͑̃ͥ͠͠n̓̌ͨ̊̅̈ͣ̈́̇̆͏̴̡̭̞͉͘͠d̤̹͚͉͎̖̝͉̤͓͂ͧͫͧ̃̃̅̄̐ͪ́̀͞ ͪͥ͂ͤ͌ͪ́̌̇͒͑̃ͫ͑̀҉̡̘̗̯̺̼̬̣̳͔̰̪̳͟o̧͖͔̥̘͓͇̩͉̯͖̊̓̓̑͠͡f̧͂̽̌ͥ҉͝҉̜̯̪̗͍̼͜ ̨͇̣̬̟̩̥̱̖̝̓̏̾͋͌ͭ́̋̑ͤ̐ͬ̀͞ͅͅc̴̫̭̺̫͇̓ͪ͊̈ͫ̿ͫ̄͒͊̅ͧ̓ͦͧ̾̾̚̕͟͟hͯ͌̐͐ͨ̉͐̈̊̒ͯͨ͏͓̲̳̻̦̼̳͈̙̞̳͝a̸̢͍̲̻͈̲̤̭͚̗̖͚͓̳̞͓̬̬̰̿̄ͦ̐̄ͪͤͮ̋ͩ͛ͬͥ̿͘͜͟ơ̴̙̪͍̮̣̗̱̩̫͍͋̉ͫͯ͊̀ͤ̽̍͐̀s̵̡̟̟͓͔͙̖̠͍̹͍̲̄͛ͬ͐̃ͤ̒ͬ͠.̴̏̄̂̔̓ͪ̀ͮ̂͂͏̡̢̺̯͓̖̣̗̩̮͚͠ ̡̤̳̩͕͔̜͉̠̫̻̦͍͎̻ͫͮ̈́̓̉̀Ż̡͈̳͓̩̱̗͓̤̗̥͙̎ͨ̅̑ͪͦ͛̌̓͒ͩ̔ͦ̚͟͢͞ͅa̸̸̰̫͖̟̫͔͔͆̏͒͜l̵̵͙̦̦̫͎̦͕̘͚͍̩̯̮͗̔̇̔̆̏͒̒͠͡g̒̑̑ͦͮͪ̈̀̂̈́̾͐ͬ̐ͯ̎̀͏̷̹̗̗̘͍̪̱͈͈̟̙o̡͇̮͎̖͍̟̻̺̘̬̺̓͗ͤ̒ͩ͆̂̈́͛̅͢.̛͈̳̩̼̦͙̤̩̗͔̼͕̖̘̞̼͔̺̉̃̉̇̑ͦ́́ͅ ̢̩͚̖̻̯͈̦̠̻͔̣̳̞̬̫̮ͦ̋͌ͧ̏ͮ́̄̓̐ͯ̎̽̇̒̈͑̊̄͡͠ ̶̶̝̗̟̩̼̳̙͙̖͇̣̺̬͍̗̩̥̞̮̭̮͕̘̠̻̦͎͕͎͈̮͖̑̔̑̈́ͬ̋ͫ̋ͮ̀͑̊́̐ͩ̀́͘ͅH̢̫̼̫̮̤͎̝͖͙̟̯̺̳̳̓ͤ̑ͨͦ̔͆̀e̯͓͙̤͇̜͕̦͉̼̮͛͋ͨ̽̏ͨ̉̐̃ͣ̇ͪ͜͞ ̷̶̨̡̯̰̻̥̏̔̀ͥ̈ͥ̋̐̄̂̓̊͗ͩͨͬ̚͜ͅw̢̡͖̥̗̪͉̲̞̯̟̞̲̫̔̉̌ͭ̃ͧ̾̊ͧͮ̎̍͛ͭͦ̚h̸̰͕̪̮̣̳̺͓̲̰͂́̂̅̄͗̓̓ͪͥͮ̊̊͂́̍̇̏́͢ͅo̲͙̻̖̞̝̗͈̦̼̰̬ͪͣ̐̂͊͋ͤͣ̿ͩ́͞͞͝ ̧ͬ̈́̂̂ͤ͗̂͗ͯ̓ͤ̆ͦ͆̐͋̚͏̖̤͔̜̞̮͕̹̤͍ͅͅW̴̔̐ͧͮ̊̐̂͛͂ͩͣ̓̅͛ͣ̽ͪ̍҉̸̟͎͕̞͔͡ͅä̢̱̲̲̫̘̯̠̥͖ͭ̆ͫ̓͊ͬͯ̾͐ͤ̎̚̚͟͢i̶̷̳̞̟̥̫̬̪̙ͪ̒ͫ̿͒ͯ̃ͅṯ̨̙͔̪̝̺͓̲͓̮̰̤̝͋́ͤ̚̚͡ͅs̸̡̢ͫ͛ͭ̾̓̂͛̈́͗̀̉̒̆̌̓̉͛̈͊͜҉̞̼̬͔̝̦͇͍̞̱͕̝ ̡̲̖̮̮̬͈͓̣ͤ͋ͤ̊͌̆ͨͮ͞B̶̡̾̅ͤ͆͏̶̣̭̞̫̞̳ě̶̴͉̖̤͓̰͎͓̞̹̟̜̻̠͛̾̆̾͆̊̈́̏ͬ͌͊̉͌̌̉́ͭ͞͞h̶̫̜̜̭͔̙͍̲̤͍͎͙̩͙̣̄͐̾̏ͅį͖͔̹̣͙̝͂̃ͥ͑̉̎̓̚͢n̛̛̤̯̰̠͇̟͍͕̬͚̖̼͓̦͙̦̦̲̣͛ͦͭ̈́͊͌̈́̉͋̑̚͞d͔̱̮͙͕̘̩͓̱̜̠̳̦̀͐̃ͫ͋ͧ͒̍̆ͤ̆ͯ̋͘͜͟͢ ̷͈͇̞̫̦ͧͨ̈̆̕͟͟T̢̡̛͍̟̩̘̤͉͎̣̥ͤ͑̈ͪ͌͂ͭ͌͗͋ͤ̑͒̈́̿̐̚̕͡h̡̧͓͈̗̥̼͎̘̾ͯ̓͗̊ͪͥ͌̉̄̈̌͊͑̀̀̾e̷ͤ̽́̾̏̅̾ͯ͢͏͕̮̠̫̰̥̬̦̟̟̣ ̛̭͙̹̼͇̃͐̅ͩͧͯ̑̉̔ͧ͑͐̀ͅW̶͕̙͇̯̹̘̮̟͈̜̖͚ͫ̓̔͆̅ͪ̔̽͗̑̽̂̽̒̕͢a̷̢͇̮͓̥͐̔̓ͮͩ̓̉̏̑̈́ͯ̈́ͮl̽̍ͭ̐ͭ͆̌̄͐̽ͥͭ̌̋̊͜͏̢̰̘̺͕͚̱̥͎̖̦̳͈̫̱̙̙͟l̨̺̺̥͓̠͓͈̜̖͖̩̮͖̉ͤͫ͊́͘͟.̶̧̢̽̽̋ͧ̔̃̍ͬ̔ͪ͗̉ͯ͝͏̪̥͖͙͍͎͓̲̥̮̲̼̗͈̰̞͈̯̮ ̵̧̤̱̖̮̱͉̙͔̓͋̐̒̒̒̃ͬ̂̍͘͜͞ͅZ̽̋͗͐͌̌҉̝̮̩̼͉̳̜̹̳͢͝Ą̘̥̮̫͖̰̻̟̟̣̱͍͇͔̘͇̭̼̰͑͋́̑̍͛͛̅̑ͭ̎ͤ̾ͣ͒͂̄̈́̾͘L̡̗͙̘͉̪͓̗̪͕̑̑ͥ͑̐̉ͯ̋̄̐̍̌̚̕ͅĜͤͣ̿̇͋ͪ̏͘҉̰͕̬̱͚̥̰̬̹̞̙̮̖̝̩̀͡Ǫ̷̸̱͔̜̹̤̼̪͓̿̂̓̚ͅ!̢̩̰̯͉̯͔̠͂ͦͩ͒͋̓ͮ͐̆͑̾̎̄̚͟͠ + For Alice: + Alice's access key (hashed) + + + + + 1. Any new messagesfor Alice's access key? + T̢̬̲͍̘̝̖͔̣̱̲͎ͧ̏ͭ̓̓ͮ̾̋́͘͜͟ớ̩̜̻̬̰̯͍̰͋͒͒̾̐̈́ͭ̀͢͝ ̵̯̪̠͎̺̮̱̖͐̆̈́ͮ͑̄͐̋͟͢iͯͧ̃ͫ͊̓̓̒͡͏͍̲̬̞̠̹̟͕͠n̷̶̵̝̰̰͕̩̱͉̗̝̪͙͒̽̎̔̒ͭ͗̈̌̏͊́v͚̣̘̝̜̪͎̰̗͌ͮͤ͒͒́̕͢ͅo̵̢̠͔̣͔͍̯͚̣̻͋̓́͐́͝k̛̪̹͍̥͚ͥ̌ͤͯ̍̊ͭ̂̎̈ͯ̿̈̎͠͡ͅȇ̢̄ͧͪ̾͒ͩ̅̒̈́́҉̷̭̠͚̺͚͕̤̻̺͔͖̮ ̡̡̧̡͕̮̰̬̰͈̗̦̜̲̭͈͎̥̪̹͍ͣ̅̔͌ͩͩ͊̇̇̏ͫ͌͗͂̅ͦͅṭ̶̸̸̼̘̪̮̫̩̬͒͋ͫͩͭ͆̈̂̃ͩ̓ͦ͆̍̀h̀̈́͌̾̀̌̄͒͗ͦ̈́̂ͧ̉̇҉̝̭̘̫͕̰͎̘͚͖̦͍̮̳e̛͖͙̮̗̻̼̼̱͒̿̓̑̎̂̉̒̎́ ̸̛̳̫͚͖͖͓̞̲̝͙̦̣̰̦͈͕͍̍̔̈ͯ͌̆̅̑͌̀͘͜h̵̸̼͇͎̯̫̬̩̙͇ͮͦ̏͂̒̄̄̐͌į̷̶͇̗̖̩̞̦͎̟͔͈͚͕̫͚̮͉͖͛͐͗͒̓ͯ͘ͅv̟̫̹̜̯̂̈́̿̄̄͑ͣ̾ͦͯ̌ͥͥ̈̕͘͟͢é̢̨̨͓̠̥̯̻͕͚̌͐̋̐̈͆ͬͪ͛̽̇͋͆̇̀̕ͅ-̴͊ͯ̂̒͌̀̕͡͏̮̳͔̱̻͉͍̮̥̺͚̙̟̙̰̦m̵̸̆ͫͯ͐ͭ̓͞҉̝͚͎͙̰̼i̧̝͍̘̦̣̝̤͙̥̦̥͍̯̼̪̳̘͈͖̅͐̏̂ͦ̎̆̓̔͆͆ͩ̉̈̄ͮ̑̚ņ̴͕̟͖̱̳͔̜̅̅̒̽̑ͣ̒͐͛͒ͪ̀͛͊d̷̫̥͎̫̭̱̲̦̺̱̖̪̩̳̰ͬ̊͐̏̄ͩ̆̀̚ ̧̖̜̘̙̜̪̓̍ͮ̾̅̃ͦ́ͦ͐ͧͫ̋̇̔ͧͥr̴̸̨͛͑̊ͯͩͯ̈̇̓̂̋ͨ͌̋͒͏͚̬͔̮̖͖̳͓̰͉͙̟͖̩e̷̜͍̱͙̯͙̺̠͚̳̯̻̩̹̠̠͆ͣ͐̽̅p̶̧͍̠͍̬͈̟̝̤͈̾̏̔̉̑͊͒́̏̀͌ͦͧͨ̔ͯ͗ͧͩ͘͜r̸̝̬̮͇͉͖̠̹͉̰͙̉̒ͩ̍͋̔̔̓͊́̚̕͟e̛̝̹͙͔͚͈̪͎̋̀͌͗̄͋͗̂ͥ̋̂̕s̢̛̠̗͎̙̤̳͎͎̳̠͕͖͉͛ͤ͋̒̈͋̑ͦ̃̈́̂́̅̀̂̿̂̆̀̀ė̉͋̑ͫ̏ͪͣ̀̑̎҉̨͡҉̟͇̬͖̗͟n̵̨͔͙͈̱̙̎̓ͨ̋̌̽̀͘t̛̾ͮͬ̃̆̊̿͂̾̒̑͑ͬ͊̒͛͂͗͏̯͇̲̼̗̖͈͙̥̮͓̟̩i̢̢͈̙͔̱̖̖̯̲ͬ̔ͩͤ̇̀ͤ͋ͤ͌ͣ̀̕͠ṅͧ̌̔ͥ̓̈͐ͮ̓̌̑̇͏͏̪͕̭̱̘̗̬͓͈̤̘͚͙͟ͅġ͕̗̩͚̳̬̝̼͔̹̖͖̦̠ͬ͗̌́̒̑̉̉͆̀͑̐ͫ͊͂͘͘͠ ̸̢̝̦̞͓̗̙̭̮͍̠͈̗̱̬̊̈̾̏̋͗ͭ̆̃̍̋͊̾̓ͬ̓̀c̶͚̼͚̗͔̺̘͉̹͍̼͚̟͆̓̇ͫ͐̌̆̈̌ͮ̈́ͪͨ͊ͪ̔ͣ͟͠͝ͅh̴̨̠͚̬̰͍̹̹̹̯͕̎̓͋ͫͥͮ̂̄̎a̸͙̣͓͎̟̤͔͖̳̘̐́͊͑̓̄ͥ͑̿̎ͨ͊ͫͨ̂̏̑̕͠ͅͅô͂ͪ̑͌͘҉̪̠̖͎̙̭̞̻̪͈͇̬̕͝ş̛̹̫͚̥̦͓͎͓͔̠͔͖̣̟̮̦̌̾ͧͪ̔̆̿ͅ.̵̮͓̼̤̖̻̹͉̺̱̭̝ͣ̽̀̾̓ͪ̂ͥ͆́́͜͡ ̷̨̧̢̢͕͖̩̙̻̤̯̗͈͖̝̝̯̥̪̤͖͉͇͓̤̳͚̖̙̗͓͑͐ͥ̄͑̓̌͌ͭ̂͐̈́̏ͣ̅͗ͩ͋̒̉̈̾ͨ̏̊ͧ̓͘ͅI̵̴̖̥̰̖͖̘̦͙͕͉͕̪̥ͣͧͫ̔͑ͩ̐̐ͪͮ̅̚͢͜͜n̸̷͍̬̹͉͎͙̱͚̙̖̻̭̻͈͗ͭͧ̾̐ͩ̕v̈́̿́ͩ̿ͪ̀̀̒͒̆͒ͭ͜͏̼̻̗͚̥͟͟ͅo̸͚̲̖̗͉̝̹̭̠̹̠͛͌ͪ͆͒͆̓̔ͮͣ̄̒ͅk̴̸̖̪̦̣͈̻͔̖̟̤͗ͩ̋̈̔̈̓͛̃͌͑́̚͢͝͝i̵̴̧̛͑̑̇͆ͫ̊̐ͤ̆ͬ̌ͫͦ̾ͥ̇́͏̳̞̩̜̟̲̻̻̙̻̤̭͕̥̠͕͓̖̫n̵̛̺̦̬͕̘̙͙̻͈̪͍̦̻̬̦̘͇ͤͪ͐͌̂ͭ̀ͨ͒͒̾̀ͅͅg̴̸̞͚̭̺͈͕͕̗̯ͬ̐ͦ̅ͬͤ͊ͭ̈͛ͪ͛̏̀̚ ̵̥͍͓͕̻̬̜͇̯̟̤͇̦̹͒ͧͫ̈́̄̐ͭ͗͞ṱ̴̷̡̰͙̝͉̠́ͣ̋͑̓ͣͧͯͪ̎ͧ̓̉̈̐͋ͧ̚͘͘ͅḩ͍͚̝̘̝͕̟̣̣ͣ̋̉͌̋̂̐ͯ̾ͤ̇͂̕͘͜ͅê̷̢̯̘̣̳̬͍̬͔̺ͮͧͅ ͗ͤ͑̌ͯ͌̏̔ͪ҉̧̹͈̝͙̺̫̕f̛͉͙͙͚̤̙͖̖͙̣͎͈̦̗̣̞̠͔̍̋̐ͯ͆́͡ͅeͫ̅̋͑̈́̐ͮ̔ͮ͑̋ͣ͂̌̉͟҉̸̧̞̲͔̱̻̘͖͙̟̥̰̩͜ȅ̓ͯͮ̔̂̇͗̿҉̭͇̞̦̠͎͔̼̻͍̙̝̪̩͡ͅl̨̪̙̝͖ͥ̑́͆͘͜͝͞į̧̠͙̤̭̹̜̣͙̝̯̻̪͊̾̾̒̎̊ͯ̉̈́͑͌͌ͫ̉̏̅̚n͇͔̰͖͉̣̩̞͖̝͕̏ͦ̈̑̓ͯ̏̃̋ͯ̐̈́͜͜g̸̡̮̗̮̺̲̰̜̗̓ͫ̈̽ͦͥ̾͡͠ ͚͍͖͖̩͆ͦͥ͌͋ͯ̀͘o̵̢̨̩̝̰͔̠͑̋̂̾̐ͮ̕͜f̶̨̺̲̞͖̣̤̪̟̤̠̖̠̦̟̯̲̈́̅͂ͭͨ̒ͯ͊͐͆͂ͣͫ͐̋̿̈́ͦ ̷̷͈̩͙̙̙̈ͭ̀̑̎̉͜c̫͖͔̣̫̞͎̦̟̳̜͌̄ͪ͗̍ͦ͌̀̕͜h̴̸̫̰̹̰̭͇̟̣̦̩̪̫̎̊̐ͩͮͧ̑̀ͦ͛̏ͧͯͧ̓̈̏̆͜a̮̟̦̱͆̉̈́̊ͣ̈́͒̎͊̂ͥ̍̌͋̋̚͟͠ͅͅö̧̬͉̩͙͉̠̞́ͩ̅̏ͨ͋̓̌̒ͬ͌ͭ͌͟ͅs̵̨̡̗̹͓̠̥̮ͣ͗͌̓͛̎ͯͤ̀͘ͅ.̡̣͚̺̼̝̰̟̱̤̫̟̟̙͓͊̔ͦ̆̎͋̾ͫ̄̆ͫ͑͛ͥ̽̀́̕͟ͅ ̸̢̨̤̦̺̳̠̘̰̭͚̫̰͖̪̪͖ͦͣ̓ͯͧͥ̂ͦ̽̌͆̑ͪ̾ͯ̚̚͟͢͏͎̲̭̯̰W̴̝̮̞̹͎̣̫̻̠͎ͦͪͬ͆ͯͨ̊̌̓̉͞͡͠i̷̴̵̶͇̙͓̤͕̭̯̺̮̲̬͕̜̘̿̃ͬͣ̎̂̓̅̀́ͫ̾ͨ͠t̨̲͍͇̗̥̫͉̟ͬ̽̇́͗͢ḩ̷̪͕͈̣̻̻̖͖̻̼̖̫͈̙̌̈́̽ͬ͊͂ͅ ̷͍̠̲̥̜̳͓͂̈́̋͑̈̍̑̏̋̈́̋̚̚o͙̦̙̹̘̺̤̼͖̻̤̦͙̟͇̪̟̍́̓̎̔̋̅͆̍̄̕͢u̦͕͎̭͈̯ͨ͊ͭ̈́͐̎̎̂̃́̌ͮͧͩ̆̀͘ţ̙̻̤̠̹̰ͮͩ̽̉̔ͫ̄ͧ̈̽̾̃ͥ̑̀ ͭ͒̽̔ͧ̇ͩ͗ͣ͛ͪ̆̽̀̚҉̮̰̫̰̝̩̹̥̖̫̥̤̕͟͡o̶͓̲̤̯̻̝̦ͯ͗͌͂ͯ͂ͫͪ̄͑̇̕͞r̐́͌ͭͬͥ͟҉̡̘̩̜̬͚͖̰̪̜̹̯̲͔͇͟͟ͅd̷̴̖̱̱̖̱̓̌̑̎̍̃͘ẻ̴͇͚͍̹̰̟̯̥͖̼̹̥͓͙̰ͦ͂̽̐̇̆͗ͫ̄̿̾ͦ͂ͩ͠r̷͂̒͐̐̄ͥ̋ͮ̇͋̌̾͑͟͏̫̺̖̜̯͍͕̩̫̹͎̘̗̯̦̝͔͉́.̴̧̛̛̝̰̻̺̫̣̥̬̱̦̼̰̖ͣ̿̓̉̃ͮ̌ͬ̿̀̃̂ͨ̅̕ ̴̷̵̢̢̬̰͈̻̜̤͉̻͚̺̥̼̣͔̻͙̞̖̞̥̮̰ͥͥͥ͊̂̽͌̿͐̂ͧ͆ͭ̑̄̏ͪ̽͌ͣͩ̏́͌̃͊ͦ͛̅͒ͪ͛̚͠ͅT̡̖̮̙̬̦̫̳́ͮͫ̾͛ͦͧ̿ͦ͛̋͌̀͠ͅͅḩ̷̫̮̦̩̰̟̭͈̱͈̻̠̜̖͇ͧ̅̌͗̔͐ͪ͌̔ͨͨ̏ͅe̸̵̴̱̲̭̲̺̘̳̲̯̱̟͇ͥ́ͤ̂ͤ̅ͣ̃ͯͮͮ͌̌ͪ̽ͩ͊͟ ̰͉͙͙̰̗̪͕͚͈͔̞͓̳̰̋̇ͪͦ̉̉ͧ̎ͬ̕Ņ̅̈̇͒̌͂̇ͭͬ̾ͣ͛͊ͨ̕͏̸͖̳̩͖̻e̵̛͓͕̯̜̫̝̥̗͍͉̍͋͗̈ͥͤ́ͮ̓̈́͡͞z̡̠̰̹̱̎͐̎ͦͧ̀̕͜p̨͇̻̤̜͍̮̣̊͆ͥͦ̎̃̃͘͢͢ͅe̶̢̬̬͙̤͍͙̦͖̹͕͔̭̼̭̟̞͖͓͊̑ͥ́̚̕ͅr̸̡̾̃̃ͦ̍̎̑̾͌ͨ̑͑̾͜͡͏͇̜̫̙̳d̨̈ͪ͌̎ͫ̍͛̇ͬ̇̈́̈́ͣ̉̽̾́͞҉҉̰͓̞̞̹̱͖̤̮͉̖̯ͅi̸̷̧̦̮̼̗͓̩̺͙̠̓ͣ̐̃̂ͤ̽͂̿̂̅̽̓̃̏̿́âͦ̄͂҉̧̺̗̠̩͕͕̼̯̫͎̙̭̳̺͙̹̦̥̪͢n̨̨̟͚̙̘̙͙̬̪̬͚͓͓̯̿̅ͦ̆ͩ͢͡ ̧̞̙͔̼͔̳͕͉̩̩̄̑̊̄ͣͩ̌͊̄͛͐ͭ͜ḫ̷̛̤͍̝̣̥̖̩͙̅̈ͨ͌͜͝i̔̾̀͑̔̍͂̈ͨ̾̀ͨ͗̇̌̚͏̶̥͎̺̙͈͎̳̺̖̗̖͎̯͟ͅͅv̴̨̝̮͚̲͉̩̩̗̰̭̭̘̖͎͔̮͚ͭ̃ͬ̐ͧ͐̓̏ͪ̿ͦ̿ͭ͐̚̚̚͜ẻ̛̳̣̳̱̬̟̼̱͇̻̊̿̓͆̃͐̽̇͜-̡̻̫̖͕̭̱̮̰͈̟̫̬̗̤̣͙̯͛ͧͯ̑ͩ̇ͦ̾̑̆͋ͯ̇̾͠ͅm̴̶̧ͯ̓ͪ́͗̒ͭ͜҉͎̤̱̙̭̤̣̮̪̟̞̬͙į̧͍͉̣͎͎̥̲͉̖̻̋ͮ͌̓͑̃ͥ͠͠n̓̌ͨ̊̅̈ͣ̈́̇̆͏̴̡̭̞͉͘͠d̤̹͚͉͎̖̝͉̤͓͂ͧͫͧ̃̃̅̄̐ͪ́̀͞ ͪͥ͂ͤ͌ͪ́̌̇͒͑̃ͫ͑̀҉̡̘̗̯̺̼̬̣̳͔̰̪̳͟o̧͖͔̥̘͓͇̩͉̯͖̊̓̓̑͠͡f̧͂̽̌ͥ҉͝҉̜̯̪̗͍̼͜ ̨͇̣̬̟̩̥̱̖̝̓̏̾͋͌ͭ́̋̑ͤ̐ͬ̀͞ͅͅc̴̫̭̺̫͇̓ͪ͊̈ͫ̿ͫ̄͒͊̅ͧ̓ͦͧ̾̾̚̕͟͟hͯ͌̐͐ͨ̉͐̈̊̒ͯͨ͏͓̲̳̻̦̼̳͈̙̞̳͝a̸̢͍̲̻͈̲̤̭͚̗̖͚͓̳̞͓̬̬̰̿̄ͦ̐̄ͪͤͮ̋ͩ͛ͬͥ̿͘͜͟ơ̴̙̪͍̮̣̗̱̩̫͍͋̉ͫͯ͊̀ͤ̽̍͐̀s̵̡̟̟͓͔͙̖̠͍̹͍̲̄͛ͬ͐̃ͤ̒ͬ͠.̴̏̄̂̔̓ͪ̀ͮ̂͂͏̡̢̺̯͓̖̣̗̩̮͚͠ ̡̤̳̩͕͔̜͉̠̫̻̦͍͎̻ͫͮ̈́̓̉̀Ż̡͈̳͓̩̱̗͓̤̗̥͙̎ͨ̅̑ͪͦ͛̌̓͒ͩ̔ͦ̚͟͢͞ͅa̸̸̰̫͖̟̫͔͔͆̏͒͜l̵̵͙̦̦̫͎̦͕̘͚͍̩̯̮͗̔̇̔̆̏͒̒͠͡g̒̑̑ͦͮͪ̈̀̂̈́̾͐ͬ̐ͯ̎̀͏̷̹̗̗̘͍̪̱͈͈̟̙o̡͇̮͎̖͍̟̻̺̘̬̺̓͗ͤ̒ͩ͆̂̈́͛̅͢.̛͈̳̩̼̦͙̤̩̗͔̼͕̖̘̞̼͔̺̉̃̉̇̑ͦ́́ͅ ̢̩͚̖̻̯͈̦̠̻͔̣̳̞̬̫̮ͦ̋͌ͧ̏ͮ́̄̓̐ͯ̎̽̇̒̈͑̊̄͡͠ ̶̶̝̗̟̩̼̳̙͙̖͇̣̺̬͍̗̩̥̞̮̭̮͕̘̠̻̦͎͕͎͈̮͖̑̔̑̈́ͬ̋ͫ̋ͮ̀͑̊́̐ͩ̀́͘ͅH̢̫̼̫̮̤͎̝͖͙̟̯̺̳̳̓ͤ̑ͨͦ̔͆̀e̯͓͙̤͇̜͕̦͉̼̮͛͋ͨ̽̏ͨ̉̐̃ͣ̇ͪ͜͞ ̷̶̨̡̯̰̻̥̏̔̀ͥ̈ͥ̋̐̄̂̓̊͗ͩͨͬ̚͜ͅw̢̡͖̥̗̪͉̲̞̯̟̞̲̫̔̉̌ͭ̃ͧ̾̊ͧͮ̎̍͛ͭͦ̚h̸̰͕̪̮̣̳̺͓̲̰͂́̂̅̄͗̓̓ͪͥͮ̊̊͂́̍̇̏́͢ͅo̲͙̻̖̞̝̗͈̦̼̰̬ͪͣ̐̂͊͋ͤͣ̿ͩ́͞͞͝ ̧ͬ̈́̂̂ͤ͗̂͗ͯ̓ͤ̆ͦ͆̐͋̚͏̖̤͔̜̞̮͕̹̤͍ͅͅW̴̔̐ͧͮ̊̐̂͛͂ͩͣ̓̅͛ͣ̽ͪ̍҉̸̟͎͕̞͔͡ͅä̢̱̲̲̫̘̯̠̥͖ͭ̆ͫ̓͊ͬͯ̾͐ͤ̎̚̚͟͢i̶̷̳̞̟̥̫̬̪̙ͪ̒ͫ̿͒ͯ̃ͅṯ̨̙͔̪̝̺͓̲͓̮̰̤̝͋́ͤ̚̚͡ͅs̸̡̢ͫ͛ͭ̾̓̂͛̈́͗̀̉̒̆̌̓̉͛̈͊͜҉̞̼̬͔̝̦͇͍̞̱͕̝ ̡̲̖̮̮̬͈͓̣ͤ͋ͤ̊͌̆ͨͮ͞B̶̡̾̅ͤ͆͏̶̣̭̞̫̞̳ě̶̴͉̖̤͓̰͎͓̞̹̟̜̻̠͛̾̆̾͆̊̈́̏ͬ͌͊̉͌̌̉́ͭ͞͞h̶̫̜̜̭͔̙͍̲̤͍͎͙̩͙̣̄͐̾̏ͅį͖͔̹̣͙̝͂̃ͥ͑̉̎̓̚͢n̛̛̤̯̰̠͇̟͍͕̬͚̖̼͓̦͙̦̦̲̣͛ͦͭ̈́͊͌̈́̉͋̑̚͞d͔̱̮͙͕̘̩͓̱̜̠̳̦̀͐̃ͫ͋ͧ͒̍̆ͤ̆ͯ̋͘͜͟͢ ̷͈͇̞̫̦ͧͨ̈̆̕͟͟T̢̡̛͍̟̩̘̤͉͎̣̥ͤ͑̈ͪ͌͂ͭ͌͗͋ͤ̑͒̈́̿̐̚̕͡h̡̧͓͈̗̥̼͎̘̾ͯ̓͗̊ͪͥ͌̉̄̈̌͊͑̀̀̾e̷ͤ̽́̾̏̅̾ͯ͢͏͕̮̠̫̰̥̬̦̟̟̣ ̛̭͙̹̼͇̃͐̅ͩͧͯ̑̉̔ͧ͑͐̀ͅW̶͕̙͇̯̹̘̮̟͈̜̖͚ͫ̓̔͆̅ͪ̔̽͗̑̽̂̽̒̕͢a̷̢͇̮͓̥͐̔̓ͮͩ̓̉̏̑̈́ͯ̈́ͮl̽̍ͭ̐ͭ͆̌̄͐̽ͥͭ̌̋̊͜͏̢̰̘̺͕͚̱̥͎̖̦̳͈̫̱̙̙͟l̨̺̺̥͓̠͓͈̜̖͖̩̮͖̉ͤͫ͊́͘͟.̶̧̢̽̽̋ͧ̔̃̍ͬ̔ͪ͗̉ͯ͝͏̪̥͖͙͍͎͓̲̥̮̲̼̗͈̰̞͈̯̮ ̵̧̤̱̖̮̱͉̙͔̓͋̐̒̒̒̃ͬ̂̍͘͜͞ͅZ̽̋͗͐͌̌҉̝̮̩̼͉̳̜̹̳͢͝Ą̘̥̮̫͖̰̻̟̟̣̱͍͇͔̘͇̭̼̰͑͋́̑̍͛͛̅̑ͭ̎ͤ̾ͣ͒͂̄̈́̾͘L̡̗͙̘͉̪͓̗̪͕̑̑ͥ͑̐̉ͯ̋̄̐̍̌̚̕ͅĜͤͣ̿̇͋ͪ̏͘҉̰͕̬̱͚̥̰̬̹̞̙̮̖̝̩̀͡Ǫ̷̸̱͔̜̹̤̼̪͓̿̂̓̚ͅ!̢̩̰̯͉̯͔̠͂ͦͩ͒͋̓ͮ͐̆͑̾̎̄̚͟͠ + + 2. + Bob's client + Bob's client + + (+) Alice's private key = message and signature + Store "Bob->Alice: Hello Alice" in database + T̢̬̲͍̘̝̖͔̣̱̲͎ͧ̏ͭ̓̓ͮ̾̋́͘͜͟ớ̩̜̻̬̰̯͍̰͋͒͒̾̐̈́ͭ̀͢͝ ̵̯̪̠͎̺̮̱̖͐̆̈́ͮ͑̄͐̋͟͢iͯͧ̃ͫ͊̓̓̒͡͏͍̲̬̞̠̹̟͕͠n̷̶̵̝̰̰͕̩̱͉̗̝̪͙͒̽̎̔̒ͭ͗̈̌̏͊́v͚̣̘̝̜̪͎̰̗͌ͮͤ͒͒́̕͢ͅo̵̢̠͔̣͔͍̯͚̣̻͋̓́͐́͝k̛̪̹͍̥͚ͥ̌ͤͯ̍̊ͭ̂̎̈ͯ̿̈̎͠͡ͅȇ̢̄ͧͪ̾͒ͩ̅̒̈́́҉̷̭̠͚̺͚͕̤̻̺͔͖̮ ̡̡̧̡͕̮̰̬̰͈̗̦̜̲̭͈͎̥̪̹͍ͣ̅̔͌ͩͩ͊̇̇̏ͫ͌͗͂̅ͦͅṭ̶̸̸̼̘̪̮̫̩̬͒͋ͫͩͭ͆̈̂̃ͩ̓ͦ͆̍̀h̀̈́͌̾̀̌̄͒͗ͦ̈́̂ͧ̉̇҉̝̭̘̫͕̰͎̘͚͖̦͍̮̳e̛͖͙̮̗̻̼̼̱͒̿̓̑̎̂̉̒̎́ ̸̛̳̫͚͖͖͓̞̲̝͙̦̣̰̦͈͕͍̍̔̈ͯ͌̆̅̑͌̀͘͜h̵̸̼͇͎̯̫̬̩̙͇ͮͦ̏͂̒̄̄̐͌į̷̶͇̗̖̩̞̦͎̟͔͈͚͕̫͚̮͉͖͛͐͗͒̓ͯ͘ͅv̟̫̹̜̯̂̈́̿̄̄͑ͣ̾ͦͯ̌ͥͥ̈̕͘͟͢é̢̨̨͓̠̥̯̻͕͚̌͐̋̐̈͆ͬͪ͛̽̇͋͆̇̀̕ͅ-̴͊ͯ̂̒͌̀̕͡͏̮̳͔̱̻͉͍̮̥̺͚̙̟̙̰̦m̵̸̆ͫͯ͐ͭ̓͞҉̝͚͎͙̰̼i̧̝͍̘̦̣̝̤͙̥̦̥͍̯̼̪̳̘͈͖̅͐̏̂ͦ̎̆̓̔͆͆ͩ̉̈̄ͮ̑̚ņ̴͕̟͖̱̳͔̜̅̅̒̽̑ͣ̒͐͛͒ͪ̀͛͊d̷̫̥͎̫̭̱̲̦̺̱̖̪̩̳̰ͬ̊͐̏̄ͩ̆̀̚ ̧̖̜̘̙̜̪̓̍ͮ̾̅̃ͦ́ͦ͐ͧͫ̋̇̔ͧͥr̴̸̨͛͑̊ͯͩͯ̈̇̓̂̋ͨ͌̋͒͏͚̬͔̮̖͖̳͓̰͉͙̟͖̩e̷̜͍̱͙̯͙̺̠͚̳̯̻̩̹̠̠͆ͣ͐̽̅p̶̧͍̠͍̬͈̟̝̤͈̾̏̔̉̑͊͒́̏̀͌ͦͧͨ̔ͯ͗ͧͩ͘͜r̸̝̬̮͇͉͖̠̹͉̰͙̉̒ͩ̍͋̔̔̓͊́̚̕͟e̛̝̹͙͔͚͈̪͎̋̀͌͗̄͋͗̂ͥ̋̂̕s̢̛̠̗͎̙̤̳͎͎̳̠͕͖͉͛ͤ͋̒̈͋̑ͦ̃̈́̂́̅̀̂̿̂̆̀̀ė̉͋̑ͫ̏ͪͣ̀̑̎҉̨͡҉̟͇̬͖̗͟n̵̨͔͙͈̱̙̎̓ͨ̋̌̽̀͘t̛̾ͮͬ̃̆̊̿͂̾̒̑͑ͬ͊̒͛͂͗͏̯͇̲̼̗̖͈͙̥̮͓̟̩i̢̢͈̙͔̱̖̖̯̲ͬ̔ͩͤ̇̀ͤ͋ͤ͌ͣ̀̕͠ṅͧ̌̔ͥ̓̈͐ͮ̓̌̑̇͏͏̪͕̭̱̘̗̬͓͈̤̘͚͙͟ͅġ͕̗̩͚̳̬̝̼͔̹̖͖̦̠ͬ͗̌́̒̑̉̉͆̀͑̐ͫ͊͂͘͘͠ ̸̢̝̦̞͓̗̙̭̮͍̠͈̗̱̬̊̈̾̏̋͗ͭ̆̃̍̋͊̾̓ͬ̓̀c̶͚̼͚̗͔̺̘͉̹͍̼͚̟͆̓̇ͫ͐̌̆̈̌ͮ̈́ͪͨ͊ͪ̔ͣ͟͠͝ͅh̴̨̠͚̬̰͍̹̹̹̯͕̎̓͋ͫͥͮ̂̄̎a̸͙̣͓͎̟̤͔͖̳̘̐́͊͑̓̄ͥ͑̿̎ͨ͊ͫͨ̂̏̑̕͠ͅͅô͂ͪ̑͌͘҉̪̠̖͎̙̭̞̻̪͈͇̬̕͝ş̛̹̫͚̥̦͓͎͓͔̠͔͖̣̟̮̦̌̾ͧͪ̔̆̿ͅ.̵̮͓̼̤̖̻̹͉̺̱̭̝ͣ̽̀̾̓ͪ̂ͥ͆́́͜͡ ̷̨̧̢̢͕͖̩̙̻̤̯̗͈͖̝̝̯̥̪̤͖͉͇͓̤̳͚̖̙̗͓͑͐ͥ̄͑̓̌͌ͭ̂͐̈́̏ͣ̅͗ͩ͋̒̉̈̾ͨ̏̊ͧ̓͘ͅI̵̴̖̥̰̖͖̘̦͙͕͉͕̪̥ͣͧͫ̔͑ͩ̐̐ͪͮ̅̚͢͜͜n̸̷͍̬̹͉͎͙̱͚̙̖̻̭̻͈͗ͭͧ̾̐ͩ̕v̈́̿́ͩ̿ͪ̀̀̒͒̆͒ͭ͜͏̼̻̗͚̥͟͟ͅo̸͚̲̖̗͉̝̹̭̠̹̠͛͌ͪ͆͒͆̓̔ͮͣ̄̒ͅk̴̸̖̪̦̣͈̻͔̖̟̤͗ͩ̋̈̔̈̓͛̃͌͑́̚͢͝͝i̵̴̧̛͑̑̇͆ͫ̊̐ͤ̆ͬ̌ͫͦ̾ͥ̇́͏̳̞̩̜̟̲̻̻̙̻̤̭͕̥̠͕͓̖̫n̵̛̺̦̬͕̘̙͙̻͈̪͍̦̻̬̦̘͇ͤͪ͐͌̂ͭ̀ͨ͒͒̾̀ͅͅg̴̸̞͚̭̺͈͕͕̗̯ͬ̐ͦ̅ͬͤ͊ͭ̈͛ͪ͛̏̀̚ ̵̥͍͓͕̻̬̜͇̯̟̤͇̦̹͒ͧͫ̈́̄̐ͭ͗͞ṱ̴̷̡̰͙̝͉̠́ͣ̋͑̓ͣͧͯͪ̎ͧ̓̉̈̐͋ͧ̚͘͘ͅḩ͍͚̝̘̝͕̟̣̣ͣ̋̉͌̋̂̐ͯ̾ͤ̇͂̕͘͜ͅê̷̢̯̘̣̳̬͍̬͔̺ͮͧͅ ͗ͤ͑̌ͯ͌̏̔ͪ҉̧̹͈̝͙̺̫̕f̛͉͙͙͚̤̙͖̖͙̣͎͈̦̗̣̞̠͔̍̋̐ͯ͆́͡ͅeͫ̅̋͑̈́̐ͮ̔ͮ͑̋ͣ͂̌̉͟҉̸̧̞̲͔̱̻̘͖͙̟̥̰̩͜ȅ̓ͯͮ̔̂̇͗̿҉̭͇̞̦̠͎͔̼̻͍̙̝̪̩͡ͅl̨̪̙̝͖ͥ̑́͆͘͜͝͞į̧̠͙̤̭̹̜̣͙̝̯̻̪͊̾̾̒̎̊ͯ̉̈́͑͌͌ͫ̉̏̅̚n͇͔̰͖͉̣̩̞͖̝͕̏ͦ̈̑̓ͯ̏̃̋ͯ̐̈́͜͜g̸̡̮̗̮̺̲̰̜̗̓ͫ̈̽ͦͥ̾͡͠ ͚͍͖͖̩͆ͦͥ͌͋ͯ̀͘o̵̢̨̩̝̰͔̠͑̋̂̾̐ͮ̕͜f̶̨̺̲̞͖̣̤̪̟̤̠̖̠̦̟̯̲̈́̅͂ͭͨ̒ͯ͊͐͆͂ͣͫ͐̋̿̈́ͦ ̷̷͈̩͙̙̙̈ͭ̀̑̎̉͜c̫͖͔̣̫̞͎̦̟̳̜͌̄ͪ͗̍ͦ͌̀̕͜h̴̸̫̰̹̰̭͇̟̣̦̩̪̫̎̊̐ͩͮͧ̑̀ͦ͛̏ͧͯͧ̓̈̏̆͜a̮̟̦̱͆̉̈́̊ͣ̈́͒̎͊̂ͥ̍̌͋̋̚͟͠ͅͅö̧̬͉̩͙͉̠̞́ͩ̅̏ͨ͋̓̌̒ͬ͌ͭ͌͟ͅs̵̨̡̗̹͓̠̥̮ͣ͗͌̓͛̎ͯͤ̀͘ͅ.̡̣͚̺̼̝̰̟̱̤̫̟̟̙͓͊̔ͦ̆̎͋̾ͫ̄̆ͫ͑͛ͥ̽̀́̕͟ͅ ̸̢̨̤̦̺̳̠̘̰̭͚̫̰͖̪̪͖ͦͣ̓ͯͧͥ̂ͦ̽̌͆̑ͪ̾ͯ̚̚͟͢͏͎̲̭̯̰W̴̝̮̞̹͎̣̫̻̠͎ͦͪͬ͆ͯͨ̊̌̓̉͞͡͠i̷̴̵̶͇̙͓̤͕̭̯̺̮̲̬͕̜̘̿̃ͬͣ̎̂̓̅̀́ͫ̾ͨ͠t̨̲͍͇̗̥̫͉̟ͬ̽̇́͗͢ḩ̷̪͕͈̣̻̻̖͖̻̼̖̫͈̙̌̈́̽ͬ͊͂ͅ ̷͍̠̲̥̜̳͓͂̈́̋͑̈̍̑̏̋̈́̋̚̚o͙̦̙̹̘̺̤̼͖̻̤̦͙̟͇̪̟̍́̓̎̔̋̅͆̍̄̕͢u̦͕͎̭͈̯ͨ͊ͭ̈́͐̎̎̂̃́̌ͮͧͩ̆̀͘ţ̙̻̤̠̹̰ͮͩ̽̉̔ͫ̄ͧ̈̽̾̃ͥ̑̀ ͭ͒̽̔ͧ̇ͩ͗ͣ͛ͪ̆̽̀̚҉̮̰̫̰̝̩̹̥̖̫̥̤̕͟͡o̶͓̲̤̯̻̝̦ͯ͗͌͂ͯ͂ͫͪ̄͑̇̕͞r̐́͌ͭͬͥ͟҉̡̘̩̜̬͚͖̰̪̜̹̯̲͔͇͟͟ͅd̷̴̖̱̱̖̱̓̌̑̎̍̃͘ẻ̴͇͚͍̹̰̟̯̥͖̼̹̥͓͙̰ͦ͂̽̐̇̆͗ͫ̄̿̾ͦ͂ͩ͠r̷͂̒͐̐̄ͥ̋ͮ̇͋̌̾͑͟͏̫̺̖̜̯͍͕̩̫̹͎̘̗̯̦̝͔͉́.̴̧̛̛̝̰̻̺̫̣̥̬̱̦̼̰̖ͣ̿̓̉̃ͮ̌ͬ̿̀̃̂ͨ̅̕ ̴̷̵̢̢̬̰͈̻̜̤͉̻͚̺̥̼̣͔̻͙̞̖̞̥̮̰ͥͥͥ͊̂̽͌̿͐̂ͧ͆ͭ̑̄̏ͪ̽͌ͣͩ̏́͌̃͊ͦ͛̅͒ͪ͛̚͠ͅT̡̖̮̙̬̦̫̳́ͮͫ̾͛ͦͧ̿ͦ͛̋͌̀͠ͅͅḩ̷̫̮̦̩̰̟̭͈̱͈̻̠̜̖͇ͧ̅̌͗̔͐ͪ͌̔ͨͨ̏ͅe̸̵̴̱̲̭̲̺̘̳̲̯̱̟͇ͥ́ͤ̂ͤ̅ͣ̃ͯͮͮ͌̌ͪ̽ͩ͊͟ ̰͉͙͙̰̗̪͕͚͈͔̞͓̳̰̋̇ͪͦ̉̉ͧ̎ͬ̕Ņ̅̈̇͒̌͂̇ͭͬ̾ͣ͛͊ͨ̕͏̸͖̳̩͖̻e̵̛͓͕̯̜̫̝̥̗͍͉̍͋͗̈ͥͤ́ͮ̓̈́͡͞z̡̠̰̹̱̎͐̎ͦͧ̀̕͜p̨͇̻̤̜͍̮̣̊͆ͥͦ̎̃̃͘͢͢ͅe̶̢̬̬͙̤͍͙̦͖̹͕͔̭̼̭̟̞͖͓͊̑ͥ́̚̕ͅr̸̡̾̃̃ͦ̍̎̑̾͌ͨ̑͑̾͜͡͏͇̜̫̙̳d̨̈ͪ͌̎ͫ̍͛̇ͬ̇̈́̈́ͣ̉̽̾́͞҉҉̰͓̞̞̹̱͖̤̮͉̖̯ͅi̸̷̧̦̮̼̗͓̩̺͙̠̓ͣ̐̃̂ͤ̽͂̿̂̅̽̓̃̏̿́âͦ̄͂҉̧̺̗̠̩͕͕̼̯̫͎̙̭̳̺͙̹̦̥̪͢n̨̨̟͚̙̘̙͙̬̪̬͚͓͓̯̿̅ͦ̆ͩ͢͡ ̧̞̙͔̼͔̳͕͉̩̩̄̑̊̄ͣͩ̌͊̄͛͐ͭ͜ḫ̷̛̤͍̝̣̥̖̩͙̅̈ͨ͌͜͝i̔̾̀͑̔̍͂̈ͨ̾̀ͨ͗̇̌̚͏̶̥͎̺̙͈͎̳̺̖̗̖͎̯͟ͅͅv̴̨̝̮͚̲͉̩̩̗̰̭̭̘̖͎͔̮͚ͭ̃ͬ̐ͧ͐̓̏ͪ̿ͦ̿ͭ͐̚̚̚͜ẻ̛̳̣̳̱̬̟̼̱͇̻̊̿̓͆̃͐̽̇͜-̡̻̫̖͕̭̱̮̰͈̟̫̬̗̤̣͙̯͛ͧͯ̑ͩ̇ͦ̾̑̆͋ͯ̇̾͠ͅm̴̶̧ͯ̓ͪ́͗̒ͭ͜҉͎̤̱̙̭̤̣̮̪̟̞̬͙į̧͍͉̣͎͎̥̲͉̖̻̋ͮ͌̓͑̃ͥ͠͠n̓̌ͨ̊̅̈ͣ̈́̇̆͏̴̡̭̞͉͘͠d̤̹͚͉͎̖̝͉̤͓͂ͧͫͧ̃̃̅̄̐ͪ́̀͞ ͪͥ͂ͤ͌ͪ́̌̇͒͑̃ͫ͑̀҉̡̘̗̯̺̼̬̣̳͔̰̪̳͟o̧͖͔̥̘͓͇̩͉̯͖̊̓̓̑͠͡f̧͂̽̌ͥ҉͝҉̜̯̪̗͍̼͜ ̨͇̣̬̟̩̥̱̖̝̓̏̾͋͌ͭ́̋̑ͤ̐ͬ̀͞ͅͅc̴̫̭̺̫͇̓ͪ͊̈ͫ̿ͫ̄͒͊̅ͧ̓ͦͧ̾̾̚̕͟͟hͯ͌̐͐ͨ̉͐̈̊̒ͯͨ͏͓̲̳̻̦̼̳͈̙̞̳͝a̸̢͍̲̻͈̲̤̭͚̗̖͚͓̳̞͓̬̬̰̿̄ͦ̐̄ͪͤͮ̋ͩ͛ͬͥ̿͘͜͟ơ̴̙̪͍̮̣̗̱̩̫͍͋̉ͫͯ͊̀ͤ̽̍͐̀s̵̡̟̟͓͔͙̖̠͍̹͍̲̄͛ͬ͐̃ͤ̒ͬ͠.̴̏̄̂̔̓ͪ̀ͮ̂͂͏̡̢̺̯͓̖̣̗̩̮͚͠ ̡̤̳̩͕͔̜͉̠̫̻̦͍͎̻ͫͮ̈́̓̉̀Ż̡͈̳͓̩̱̗͓̤̗̥͙̎ͨ̅̑ͪͦ͛̌̓͒ͩ̔ͦ̚͟͢͞ͅa̸̸̰̫͖̟̫͔͔͆̏͒͜l̵̵͙̦̦̫͎̦͕̘͚͍̩̯̮͗̔̇̔̆̏͒̒͠͡g̒̑̑ͦͮͪ̈̀̂̈́̾͐ͬ̐ͯ̎̀͏̷̹̗̗̘͍̪̱͈͈̟̙o̡͇̮͎̖͍̟̻̺̘̬̺̓͗ͤ̒ͩ͆̂̈́͛̅͢.̛͈̳̩̼̦͙̤̩̗͔̼͕̖̘̞̼͔̺̉̃̉̇̑ͦ́́ͅ ̢̩͚̖̻̯͈̦̠̻͔̣̳̞̬̫̮ͦ̋͌ͧ̏ͮ́̄̓̐ͯ̎̽̇̒̈͑̊̄͡͠ ̶̶̝̗̟̩̼̳̙͙̖͇̣̺̬͍̗̩̥̞̮̭̮͕̘̠̻̦͎͕͎͈̮͖̑̔̑̈́ͬ̋ͫ̋ͮ̀͑̊́̐ͩ̀́͘ͅH̢̫̼̫̮̤͎̝͖͙̟̯̺̳̳̓ͤ̑ͨͦ̔͆̀e̯͓͙̤͇̜͕̦͉̼̮͛͋ͨ̽̏ͨ̉̐̃ͣ̇ͪ͜͞ ̷̶̨̡̯̰̻̥̏̔̀ͥ̈ͥ̋̐̄̂̓̊͗ͩͨͬ̚͜ͅw̢̡͖̥̗̪͉̲̞̯̟̞̲̫̔̉̌ͭ̃ͧ̾̊ͧͮ̎̍͛ͭͦ̚h̸̰͕̪̮̣̳̺͓̲̰͂́̂̅̄͗̓̓ͪͥͮ̊̊͂́̍̇̏́͢ͅo̲͙̻̖̞̝̗͈̦̼̰̬ͪͣ̐̂͊͋ͤͣ̿ͩ́͞͞͝ ̧ͬ̈́̂̂ͤ͗̂͗ͯ̓ͤ̆ͦ͆̐͋̚͏̖̤͔̜̞̮͕̹̤͍ͅͅW̴̔̐ͧͮ̊̐̂͛͂ͩͣ̓̅͛ͣ̽ͪ̍҉̸̟͎͕̞͔͡ͅä̢̱̲̲̫̘̯̠̥͖ͭ̆ͫ̓͊ͬͯ̾͐ͤ̎̚̚͟͢i̶̷̳̞̟̥̫̬̪̙ͪ̒ͫ̿͒ͯ̃ͅṯ̨̙͔̪̝̺͓̲͓̮̰̤̝͋́ͤ̚̚͡ͅs̸̡̢ͫ͛ͭ̾̓̂͛̈́͗̀̉̒̆̌̓̉͛̈͊͜҉̞̼̬͔̝̦͇͍̞̱͕̝ ̡̲̖̮̮̬͈͓̣ͤ͋ͤ̊͌̆ͨͮ͞B̶̡̾̅ͤ͆͏̶̣̭̞̫̞̳ě̶̴͉̖̤͓̰͎͓̞̹̟̜̻̠͛̾̆̾͆̊̈́̏ͬ͌͊̉͌̌̉́ͭ͞͞h̶̫̜̜̭͔̙͍̲̤͍͎͙̩͙̣̄͐̾̏ͅį͖͔̹̣͙̝͂̃ͥ͑̉̎̓̚͢n̛̛̤̯̰̠͇̟͍͕̬͚̖̼͓̦͙̦̦̲̣͛ͦͭ̈́͊͌̈́̉͋̑̚͞d͔̱̮͙͕̘̩͓̱̜̠̳̦̀͐̃ͫ͋ͧ͒̍̆ͤ̆ͯ̋͘͜͟͢ ̷͈͇̞̫̦ͧͨ̈̆̕͟͟T̢̡̛͍̟̩̘̤͉͎̣̥ͤ͑̈ͪ͌͂ͭ͌͗͋ͤ̑͒̈́̿̐̚̕͡h̡̧͓͈̗̥̼͎̘̾ͯ̓͗̊ͪͥ͌̉̄̈̌͊͑̀̀̾e̷ͤ̽́̾̏̅̾ͯ͢͏͕̮̠̫̰̥̬̦̟̟̣ ̛̭͙̹̼͇̃͐̅ͩͧͯ̑̉̔ͧ͑͐̀ͅW̶͕̙͇̯̹̘̮̟͈̜̖͚ͫ̓̔͆̅ͪ̔̽͗̑̽̂̽̒̕͢a̷̢͇̮͓̥͐̔̓ͮͩ̓̉̏̑̈́ͯ̈́ͮl̽̍ͭ̐ͭ͆̌̄͐̽ͥͭ̌̋̊͜͏̢̰̘̺͕͚̱̥͎̖̦̳͈̫̱̙̙͟l̨̺̺̥͓̠͓͈̜̖͖̩̮͖̉ͤͫ͊́͘͟.̶̧̢̽̽̋ͧ̔̃̍ͬ̔ͪ͗̉ͯ͝͏̪̥͖͙͍͎͓̲̥̮̲̼̗͈̰̞͈̯̮ ̵̧̤̱̖̮̱͉̙͔̓͋̐̒̒̒̃ͬ̂̍͘͜͞ͅZ̽̋͗͐͌̌҉̝̮̩̼͉̳̜̹̳͢͝Ą̘̥̮̫͖̰̻̟̟̣̱͍͇͔̘͇̭̼̰͑͋́̑̍͛͛̅̑ͭ̎ͤ̾ͣ͒͂̄̈́̾͘L̡̗͙̘͉̪͓̗̪͕̑̑ͥ͑̐̉ͯ̋̄̐̍̌̚̕ͅĜͤͣ̿̇͋ͪ̏͘҉̰͕̬̱͚̥̰̬̹̞̙̮̖̝̩̀͡Ǫ̷̸̱͔̜̹̤̼̪͓̿̂̓̚ͅ!̢̩̰̯͉̯͔̠͂ͦͩ͒͋̓ͮ͐̆͑̾̎̄̚͟͠ + signature (+) Bob's public key = verification that Bob wrote it + 3. + inside Bob's client + inside Alice's client + + diff --git a/help/network diagrams.svg b/help/network diagrams.svg new file mode 100755 index 00000000..5c571f97 --- /dev/null +++ b/help/network diagrams.svg @@ -0,0 +1,10975 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + + file repository + files + thumbnails + update cache + + + + + + + + + + file repository + files + thumbnails + update cache + + + + + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + + + + user a + user b + user c + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + let's say user a uploads a few files to repo a + + + + file repository + files + thumbnails + update cache + + + + + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + + + + repo a generates thumbnailsfor the new files and addstheir info (hash, size, mime...)to its update cache + + since the upload was successful,user a's librarium generates apartial update about the files it justuploaded so that its repo asearches appear synchronised + + + + file repository + files + thumbnails + update cache + + + + + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + + + + + + + + + + + + + + + about once a day, all users with access keysfor repo a synchronise their updatecaches. user a's uploaded files will now beconsidered during repo a searches + + + + + + + file repository + files + thumbnails + update cache + + + + + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + + + + + + + + + + + + the other users download the thumbnailsfor user a's files so they will preview nicelyif they happen to turn up in a search + + + a file repository, let's call it repo a,has a number of files.each copy of librarium that has an accesskey to repo a knows about all of these files,and also has their thumbnails cached + + THIS STEP ALL HAPPENS AUTOMATICALLY + THIS STEP ALL HAPPENS AUTOMATICALLY + THIS STEP ALL HAPPENS AUTOMATICALLY + + + + file repository + files + thumbnails + update cache + + + + + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + + + + + + + + + + + any of the synchronised userscan now download any of user a'suploaded files + + + repo a + + + + + + + + + + + repo a + repo a + repo a + repo a + repo a + + + tag repository + update cache + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + + + + -or- + the tag repository knows which tagsgo with which files, but does not store thefiles themselves.librarium keeps a synchronised copy of thisinformation, and cross-references it with its localand remote file listings to perform itslocal and remote searches + + + tag repository + update cache + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + let's say user a tells the tagrepository about some new mappingse.g.[ file xxxx should have the tag 'dog',files yyyy, zzzz should have the tag 'cat',file tttt should have the tag 'delicious trap' ] + + the tag repository adds thisinformation to its update cache + + + tag repository + update cache + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + about once a day, every copy of librariumwith a valid access key to the tagrepository will synchronise their localcachesthey download everything, regardless ofwhich files they actually have + + + + + + + + + + + + tag repository + update cache + + + librarium + files + thumbnails + file update caches + + + + + + + tag update cache + + + + librarium + files + thumbnails + file update caches + + + + + + tag update cache + + + librarium + files + thumbnails + file update caches + + + + + tag update cache + + user a + user b + user c + + + + + + + + + + now if user c has file tttt andperforms a local search for'delicious trap', file tttt will beincluded in the resultsclicking on the thumbnail for filetttt will show that it has the tag'delicious trap' + plus: + THIS STEP ALL HAPPENS AUTOMATICALLY + just like with content uploads + + when those users performa search over repo a,librarium uses its copy ofthe tag cache andrepo a's cache to createthe results + + diff --git a/help/pictures.png b/help/pictures.png new file mode 100755 index 00000000..518dec35 Binary files /dev/null and b/help/pictures.png differ diff --git a/help/privacy.html b/help/privacy.html new file mode 100755 index 00000000..ef901a3f --- /dev/null +++ b/help/privacy.html @@ -0,0 +1,61 @@ + + + privacy + + + + +
+

privacy

+

Repositories do not ever know what you are searching for. The client synchronises (copies) the repository's entire file or mapping list to its internal database, and does its own searches over those internal caches, all on your hard drive. It never sends search queries outside your own computer, nor does it log what you do look for. Your searches are your business, and no-one else's.

+

Repositories know nothing more about your client than they can infer, and the software usually commands them to forget as much as possible as soon as possible. Specifically:

+ + + + + + + + + + + + + + + + + + + + + + + + + +
+ tag repositoryfile repository
+ upload mappingsdownload mappingsupload filedownload file
Account is linked to actionYesNoYesNo
IP address is rememberedNoNoMaybeNo
+

i.e:

+

+

+

+

Furthermore:

+

+

+

+

There are of course some clever exceptions. If you tag a file three years before it surfaces on the internet, someone with enough knowledge will be able to infer it was most likely you who created it. If you set up a file repository for just a friend and yourself, it becomes trivial by elimination to guess who uploaded the NarutoXSonichu shota diaper fanon. If you sign up for a file repository that hosts only evil stuff and rack up a huge bandwidth record for the current month, anyone who knows that and also knows the account is yours alone will know you were up to no good.

+

Note also that the file repository code is freely available and entirely mutable. If someone wants to put the time in, they can create a file repository that looks from the outside like any other but nonetheless logs the IP and nature of every request. Just make sure you trust the person running the repository. (And make sure they suck at programming python!)

+

Even anonymised records can reveal personally identifying information. Don't trust anyone who plans to release maps of accounts -> files or accounts -> mappings, even for some benevolent academic purpose.

+
+ + \ No newline at end of file diff --git a/help/ratings_dialog.png b/help/ratings_dialog.png new file mode 100755 index 00000000..b06d20ec Binary files /dev/null and b/help/ratings_dialog.png differ diff --git a/help/ratings_filter.png b/help/ratings_filter.png new file mode 100755 index 00000000..aa231faa Binary files /dev/null and b/help/ratings_filter.png differ diff --git a/help/ratings_like.png b/help/ratings_like.png new file mode 100755 index 00000000..b6976f6c Binary files /dev/null and b/help/ratings_like.png differ diff --git a/help/ratings_menu.png b/help/ratings_menu.png new file mode 100755 index 00000000..201930f1 Binary files /dev/null and b/help/ratings_menu.png differ diff --git a/help/ratings_numerical.png b/help/ratings_numerical.png new file mode 100755 index 00000000..22050256 Binary files /dev/null and b/help/ratings_numerical.png differ diff --git a/help/ratings_preview.png b/help/ratings_preview.png new file mode 100755 index 00000000..feb52c36 Binary files /dev/null and b/help/ratings_preview.png differ diff --git a/help/ratings_sort.png b/help/ratings_sort.png new file mode 100755 index 00000000..0231fd29 Binary files /dev/null and b/help/ratings_sort.png differ diff --git a/help/review_repos.png b/help/review_repos.png new file mode 100755 index 00000000..c5687463 Binary files /dev/null and b/help/review_repos.png differ diff --git a/help/review_services.png b/help/review_services.png new file mode 100755 index 00000000..2d30f05c Binary files /dev/null and b/help/review_services.png differ diff --git a/help/running_from_source.html b/help/running_from_source.html new file mode 100755 index 00000000..11aad20f --- /dev/null +++ b/help/running_from_source.html @@ -0,0 +1,28 @@ + + + running from source + + + + +
+

running from source

+

the hydrus network is written entirely in python, which can run straight from source. This is not recommended for someone who just wants to get the program working, but more for those who have a general interest or wish to modify their programs.

+

what you will need

+

You will need to install python 2.x and a few modules: +

+

Once you have everything set up, just run client.pyw or server.pyw from your install directory. They will look for and run off the exact same database files as the executables would, and they'll hopefully run exactly the same too.

+

I develop all this on 64-bit Win 7. I don't know how it runs on osx or linux, and don't personally have the means to find out. If you are good with python and want to make it work on non-windows, even compiling a release, go ahead! I'd be interested in how you get along.

+

my coding

+

I seem to work very much on a conceptual, boxes-and-arrows level. Although I can code in any language if I have to, I find the pedantic intricacies of C and its low-level friends a pain. But Python works for me right out of the box and is far more semantically rich; one line of intelligent and word-heavy python can do the work of 20 robotic t+=ord(j_temp)%2;s in C, and with such less fear of memory leaks or injection attacks or buffer overflows. I like long variable names and copious whitespace. I don't do much commenting because I am working alone and it usually makes quick sense to me, even when I haven't seen something for ages. If my code looks kooky to you, I'm sorry; my nerdism is INFJ, not INTP/J.

+

I honestly don't know enough about the open source community to release my code under a specific licence, so everything is just public domain. In our internet age, copyright and copyleft seem to me equally impotent.

+
+ + \ No newline at end of file diff --git a/help/server.html b/help/server.html new file mode 100755 index 00000000..8da53e04 --- /dev/null +++ b/help/server.html @@ -0,0 +1,81 @@ + + + running your own server + + + + +
+

setting up a server

+

Would you

+ +

If so, you may wish to try running your own server. But first:

+ +

still keen?

+

If this stuff be-fuzzles you, and you just want a simple server set up on the same computer you run the client, you can now go help->i don't know what I am doing->just set up the server on this computer, please and you _should_ be all set up with an admin service and tag/file repos automatically.

+

In discussing the hydrus network's server, I shall use two terms, server and service, to mean two distinct concepts:

+ +

Setting up a hydrus server is easy compared to, say, Apache. There are no .conf files to mess about with, and everything is controlled through the client. When started, the server does nothing more visible than placing an icon in your system tray. Right click that, and you only get an option to exit.

+

The basic process for setting up a server is thus:

+ +

Let us look at these steps in more detail:

+

start the server

+

Since the server and client have so much common code, I have packaged them together. If you have the client, you have the server. To start it, you can hit the shortcut in your start menu or just go straight for server.exe/.pyw in the install directory. It will first try to take port 45870 or its administration interface, so make sure that is free. Open your firewall as appropriate.

+

set up the client

+

In the add, remove or edit services dialog, go to the servers admin tab, give your server admin interface a nickname and set the credentials to whatever-hostname:45870. Don't enter any access key. Ok those changes, and go to review services.

+

On the tab+page for your new server, hit the initialise button. If you have everything set right, the server should generate its first administrator account and return the access key, which the client will set for you. It will also try to save the key inside a text file.

+

YOU'LL WANT TO SAVE THAT FILE IN A SAFE PLACE

+

If you lose your admin access key, there is no way to get it back, and if you are not sqlite-proficient, you'll have to restart from the beginning by deleting your server's database files.

+

If the client can't connect to the server, it is either not running or you have a firewall/port-mapping problem. Google will help you with NAT. If you want a quick way to test the server's visibility, just put its host:port into your browser; if working, it should return some simple html identifying itself.

+

set up the server

+

You should notice a new menu, 'admin', in the client gui. This is where you control most server-wide stuff. admin->your server->options does exactly what you think.

+

admin->your server->manage services lets you add, edit, and delete the services your server runs. Every time you add one, you should also be added as that service's first administrator, with the same access key as for the admin interface. +

Once you add a new service, the admin menu should give you another entry. Everything is fairly self-explanatory.

+

making access keys

+

Go admin->your service->create new accounts. You can then create x access keys of account type y and try to save them to a text file.

+

Go admin->manage account types to add, remove, or edit account types. Make sure everyone has at least downloader (get_data) permissions so they can stay synchronised.

+

You can create as many accounts of whatever kind you like, and distribute them the same. Depending on your usage scenario, you may want to have all uploaders, one uploader and many downloaders, or just a single administrator. There are many combinations.

+

It is your server, so do what you like.

+

On that note: python is simple enough that changes to the source are easy. If you want to auto-ban anyone who does not fit a certain quota, it is not that hard to find where the magic happens and alter it. Just do not test your changes on a live repository!

+

???

+

The most important part is to have fun! There are no losers on the INFORMATION SUPERHIGHWAY.

+

profit

+

I honestly hope you can get some benefit out of my code, whether just as a backup or as part of a far more complex system. Please mail me your comments as I am keen to make improvements.

+

btw, how to backup a repo's db

+

All of a server's files and options are stored in its accompanying .db file and respective subdirectories, which are created on first startup (just like with the client). You can backup and restore these files just by copying them about, but you have to be careful how you do it with a server; when it is running, it has a live connection to its database, and all sorts of things could be written or read at any time. If you just try to copy the .db somewhere and someone uploads a file, something might break. Instead, you have two options:

+ +

OMG EVERYTHING WENT WRONG

+

If you get to a point where you can no longer boot the repository, try running SQLite Studio and opening server.db. If the issue is simplelike manually changing the port numberyou may be in luck. Send me an email if it is tricky.

+

If you somehow damage your computer and want to blame me, try taking some responsibility for your mistakes. Everything is breaking all the time. Make backups.

+
+ + \ No newline at end of file diff --git a/help/server_database_diagram.png b/help/server_database_diagram.png new file mode 100755 index 00000000..6424eff4 Binary files /dev/null and b/help/server_database_diagram.png differ diff --git a/help/similar_gununu.png b/help/similar_gununu.png new file mode 100755 index 00000000..551a62f9 Binary files /dev/null and b/help/similar_gununu.png differ diff --git a/help/similar_gununus.png b/help/similar_gununus.png new file mode 100755 index 00000000..795668de Binary files /dev/null and b/help/similar_gununus.png differ diff --git a/help/similar_icons.png b/help/similar_icons.png new file mode 100755 index 00000000..fb3196c5 Binary files /dev/null and b/help/similar_icons.png differ diff --git a/help/style.css b/help/style.css new file mode 100755 index 00000000..0161e4de --- /dev/null +++ b/help/style.css @@ -0,0 +1,20 @@ +a { color: #222; text-decoration: none; font-weight: bold; } +a:hover { color: #555 } +body { font-family: "Calibri", Arial, sans-serif; color: #555; line-height: 1.5; } +h3 { color: #222; } +ul li { list-style: none; margin: 1.0em 0em; } +ul.bulletpoint li { list-style: disc; margin: 1.0em 0em; } +th { text-align: center; } +tr { text-align: center; } + +.dealwithit { color: #ff0000; text-shadow: +1px 1px 0 #ff7f00, +2px 2px 0 #ffff00, +3px 3px 0 #00ff00, +4px 4px 0 #0000ff, +5px 5px 0 #6600ff, +6px 6px 0 #8b00ff; } +.warning { color: #d00 } +.lololol { text-decoration: line-through; } +.right { text-align: right; } +.screenshot { float: right; clear: both; margin: 10px; } \ No newline at end of file diff --git a/help/tag_example_ainsley.jpg b/help/tag_example_ainsley.jpg new file mode 100755 index 00000000..796899e9 Binary files /dev/null and b/help/tag_example_ainsley.jpg differ diff --git a/help/tag_example_azura.jpg b/help/tag_example_azura.jpg new file mode 100755 index 00000000..c0d30ebb Binary files /dev/null and b/help/tag_example_azura.jpg differ diff --git a/help/tag_example_feel.gif b/help/tag_example_feel.gif new file mode 100755 index 00000000..4ec5c5d5 Binary files /dev/null and b/help/tag_example_feel.gif differ diff --git a/help/tag_example_five_star.png b/help/tag_example_five_star.png new file mode 100755 index 00000000..e85d8ea9 Binary files /dev/null and b/help/tag_example_five_star.png differ diff --git a/help/tag_example_five_star_small.png b/help/tag_example_five_star_small.png new file mode 100755 index 00000000..a5e2595f Binary files /dev/null and b/help/tag_example_five_star_small.png differ diff --git a/help/tag_example_mouse.jpg b/help/tag_example_mouse.jpg new file mode 100755 index 00000000..da7b31a9 Binary files /dev/null and b/help/tag_example_mouse.jpg differ diff --git a/help/tag_sync_1.png b/help/tag_sync_1.png new file mode 100755 index 00000000..505a70d0 Binary files /dev/null and b/help/tag_sync_1.png differ diff --git a/help/tag_sync_2.png b/help/tag_sync_2.png new file mode 100755 index 00000000..5ec3cda3 Binary files /dev/null and b/help/tag_sync_2.png differ diff --git a/help/tag_sync_3.png b/help/tag_sync_3.png new file mode 100755 index 00000000..3f0cd59d Binary files /dev/null and b/help/tag_sync_3.png differ diff --git a/help/tag_sync_4.png b/help/tag_sync_4.png new file mode 100755 index 00000000..6065941c Binary files /dev/null and b/help/tag_sync_4.png differ diff --git a/help/tag_trap_post.jpg b/help/tag_trap_post.jpg new file mode 100755 index 00000000..91cdac2f Binary files /dev/null and b/help/tag_trap_post.jpg differ diff --git a/help/tag_trap_pre.jpg b/help/tag_trap_pre.jpg new file mode 100755 index 00000000..fabcfac2 Binary files /dev/null and b/help/tag_trap_pre.jpg differ diff --git a/help/tagging_schema.html b/help/tagging_schema.html new file mode 100755 index 00000000..b3392ecd --- /dev/null +++ b/help/tagging_schema.html @@ -0,0 +1,144 @@ + + + tagging schema + + + + +
+

This is for the public tag repository only! You can run your own tag repositories and do your own thing additionally or instead!

+

seriousness of schema

+

Whenever many people contribute to a large whole, a rough schema is useful. But it is not the most important thing in the world for it to be upheld absolutely; it will just make searches easier if most of us can mostly agree to some guidelines.

+

The most important thing is: if your tag idea is opinion, don't add it to my repo. 'beautiful' is not a great tag since no one can agree on what it means. 'giant breasts' is an ok tag since many reasonable people can agree (within certain loose parameters) on what it means. 'f-cup' is a great tag (assuming it is accurate, and not a wild guess) since you can verify it; it is either true about a picture or not. If you think f-cup breasts are beautiful and you want to see something beautiful, you can add 'f-cup' to your query. When a particular search term becomes popular, I can alter the client to understand more intelligent queries, like '>c-cup' or '<=b-cup'.

+

As with all rulesets, it is very possible to go too far. We will never be able to easily and perfectly categorise every single image to everyone's satisfaction, so there is no point defining every possible rule for every possible situation. This is especially true for namespaces. So lower any autismal expectations you might have and just start tagging. Fixing mistakes is not difficult.

+

Just like JIT production flows, editing wikipedia articles, and the holy doctrine of self-flagellation, this is a process, not a destination.

+

you can add pretty much whatever the hell you want, but don't screw around

+

You can't prefix a tag with a '-' or 'system:' for obvious reasons, but tag repositories will accept anything else utf-8. You can start your own namespaces, categorisation systems, whatever. Just be aware that everyone else with access to a tag repo will see what you do.

+

If you don't know the difference between objective and subjective, here's a refresher:

+ +

numbers

+

Numbers should be written '22', '1457 ce', and 'page:3', unless as part of an official title like 'ocean's eleven'. When the client parses numbers, it does so intelligently, so just use '1' where you might before have done '01' or '001'. I know it looks ugly sometimes to have '2 girls' or '1 cup', but the rules for writing numbers out in full are hazy for special cases.

+

(Numbers as 123 are also readable by many different language-speakers, while 'tano', 'deux' and 'seven' are not.)

+

plurals

+

Nouns should generally be singular, not plural. 'chair' instead of 'chairs', 'female' instead of 'females', 'cat' instead of 'cats', even if there are several of the thing in the image. If there really are many of the thing in the image, add a seperate 'multiple', 'comparison' or 'group' tag as apppropriate.

+

An exception is when the thing is normally said in its plural (usually paired) form. Say 'blue eyes', not 'blue eye'; 'breasts', not 'breast'; 'pants' instead of 'pant'.

+

acronyms and synonyms and shortenings are a pain in the bum to keep track of

+

At a later date, it is planned for the client to support general synonyms, so inputting 'lotr' could return 'series:the lord of the rings', 'marimite' might return 'series:maria-sama ga miteru', and 'brangelina' would of course return 'person:brad pitt'+'person:angelia jolie'. Until then, please enter whatever you feel most comfortable with and don't go crazy with the petitions. If we end up with many surplus tags, we will sort it out when synonyms come in.

+

namespaces

+

A namespace is when you prefix a context: 'creator:range murata', 'series:futurama', 'title:what kind of day has it been', and so on.

+

The tags 'democratic', 'beck', and 'st. vincent' all have different meanings depending on context, so searches for these simple terms will often return unwanted results. Adding context also informs people who are ignorant of a work's author or title, especially when the tags are in a different language or cultural context. Anyone who has attempted to market (or just find!) a badly-named video game or movie through google has encountered these problems. Without namespaces, there can be confusion.

+

Prefixing a namespaced context where appropriate makes searching more powerful. I can also update the client to parse certain namespaces in different ways, to create clever search predicates, sort orders, collections, presentation rules, whatever. Our 'f-cup' example from above might be improved if it were 'bust:f-cup'.

+

BTW: All searches without a namespace will return all instances, namespaced or not, of that tag; a search for 'levar burton' will return everything with 'levar burton', 'creator:levar burton', 'person:levar burton', 'series:levar burton', 'offensive:levar burton', whatever. If you then want a specific version of 'levar burton' to narrow it down, use a namespace.

+

BTW Part 2, Electric Boogaloo: Nested namespaces do not work and will kill your dreams. Don't go with something crazy like 'militaries:united states:navy:marine corps:sgt. joe bloggs' as the client will parse that as (militaries):(united states:navy:marine corps:sgt. joe bloggs). Instead use a combination of 'nation:united states', 'usmc', 'person:joe bloggs', 'rank:sergeant' and so on.

+

Some basic namespaces:

+ +
+ + \ No newline at end of file diff --git a/help/unlimited_gununu_works.jpg b/help/unlimited_gununu_works.jpg new file mode 100755 index 00000000..e3077115 Binary files /dev/null and b/help/unlimited_gununu_works.jpg differ diff --git a/help/updates.html b/help/updates.html new file mode 100755 index 00000000..27da4991 --- /dev/null +++ b/help/updates.html @@ -0,0 +1,47 @@ + + + updates + + + + +
+

how the hydrus network synchronises

+

The hydrus network does not work like regular client-server architectures.

+

The most important difference is its decentralisation of processing; rather than make an expensive http request every time it wants something, the client makes an all-inclusive synchronisation request about once a day and performs all searches on its local cache.

+

so, how does the client make sure it has what it needs to do its searches?

+

When the client contacts a repository, it downloads every single change that has occured since the last time it checked. It keeps all this data, and searches over whatever is appropriate to its own circumstances. If its local circumstances change (e.g. you import a thousand new files), it doesn't need to download anything more. A repository does not know anything about any particular client's circumstances.

+

tell me more! use diagrams!

+

These diagrams are a little old! 'librarium' is the old name for the client, and now there are multiple tag update caches, which are combined into a new table called 'active mappings'. I'll update them sooooometime.

+

tags:

+

+

+

+

+

files:

+

+

+

+

+

+

+

the update request

+

The main request looks like this:

+ +

Which is a standard http query. 'begin' is a timestamp telling the repository "please give me the update which starts with this timestamp" (begin=0 initialises). The repository answers in YAML, which you can review in include/HydrusConstants.py.

+

The update duration is currently 100,000 seconds.

+

headers

+

All requests (other than '/' and '/favicon'), should have something like the following:

+ +

The user-agent doesn't have to be 'hydrus', but the network version afterwards has to match up, or you'll get an error.

+

what about the other requests?

+

I suggest you review the code for information on the other requests. HydrusServer.py does the parsing, and ProcessRequest in the databases does most of the actual magic. ConnectionToService in ClientConstants.py does the client-side request-bundling and response parsing. If you have detailed questions, you can always email me!

+

YAML is very important in the hydrus network. I love it. Just do some googling if you want to learn more, and play around with yaml.safe_dump and yaml.safe_load in the python console to get some hands-on experience.

+
+ + \ No newline at end of file diff --git a/help/upload.png b/help/upload.png new file mode 100755 index 00000000..9e146cb9 Binary files /dev/null and b/help/upload.png differ diff --git a/include/ClientConstants.py b/include/ClientConstants.py new file mode 100755 index 00000000..3c58f795 --- /dev/null +++ b/include/ClientConstants.py @@ -0,0 +1,2890 @@ +import collections +import dircache +import gc +import hashlib +import httplib +import ClientParsers +import HydrusConstants as HC +import HydrusImageHandling +import HydrusMessageHandling +import itertools +import multipart +import os +import random +import sqlite3 +import threading +import time +import threading +import traceback +import urlparse +import yaml +import wx +import zlib + +ID_NULL = wx.NewId() + +# Hue is generally 200, Sat and Lum changes based on need +COLOUR_SELECTED = wx.Colour( 217, 242, 255 ) +COLOUR_SELECTED_DARK = wx.Colour( 1, 17, 26 ) +COLOUR_UNSELECTED = wx.Colour( 223, 227, 230 ) + +COLOUR_MESSAGE = wx.Colour( 230, 246, 255 ) + +LOCAL_FILE_SERVICE_IDENTIFIER = HC.ClientServiceIdentifier( 'local files', HC.LOCAL_FILE, 'local files' ) +LOCAL_TAG_SERVICE_IDENTIFIER = HC.ClientServiceIdentifier( 'local tags', HC.LOCAL_TAG, 'local tags' ) +NULL_SERVICE_IDENTIFIER = HC.ClientServiceIdentifier( '', HC.NULL_SERVICE, 'no service' ) + +SHORTCUT_HELP = '''You can set up many custom shortcuts in file->options->shortcuts. Please check that to see your current mapping. + +Some shortcuts remain hardcoded, however: + +- While Browsing - +Ctrl + A - Select all +Escape - Deselect all +Ctrl + C - Copy selected files to clipboard + +- In Fullscreen - +Shift-LeftClick-Drag - Drag (in Filter) +Ctrl + MouseWheel - Zoom +Z - Zoom Full/Fit''' + +CLIENT_DESCRIPTION = '''This client is the media management application of the hydrus software suite.''' + +COLLECT_BY_S = 0 +COLLECT_BY_SV = 1 +COLLECT_BY_SVC = 2 +NO_COLLECTIONS = 3 + +COLLECTION_CHOICES = [ 'collect by series', 'collect by series-volume', 'collect by series-volume-chapter', 'no collections' ] + +collection_enum_lookup = {} + +collection_enum_lookup[ 'collect by series' ] = COLLECT_BY_S +collection_enum_lookup[ 'collect by series-volume' ] = COLLECT_BY_SV +collection_enum_lookup[ 'collect by series-volume-chapter' ] = COLLECT_BY_SVC +collection_enum_lookup[ 'no collections' ] = NO_COLLECTIONS + +collection_string_lookup = {} + +collection_string_lookup[ COLLECT_BY_S ] = 'collect by series' +collection_string_lookup[ COLLECT_BY_SV ] = 'collect by series-volume' +collection_string_lookup[ COLLECT_BY_SVC ] = 'collect by series-volume-chapter' +collection_string_lookup[ NO_COLLECTIONS ] = 'no collections' + +CONTENT_UPDATE_ADD = 0 +CONTENT_UPDATE_DELETE = 1 +CONTENT_UPDATE_PENDING = 2 +CONTENT_UPDATE_RESCIND_PENDING = 3 +CONTENT_UPDATE_PETITION = 4 +CONTENT_UPDATE_RESCIND_PETITION = 5 +CONTENT_UPDATE_EDIT_LOG = 6 +CONTENT_UPDATE_ARCHIVE = 7 +CONTENT_UPDATE_INBOX = 8 +CONTENT_UPDATE_RATING = 9 +CONTENT_UPDATE_RATINGS_FILTER = 10 + +DISCRIMINANT_INBOX = 0 +DISCRIMINANT_LOCAL = 1 +DISCRIMINANT_NOT_LOCAL = 2 + +DUMPER_NOT_DUMPED = 0 +DUMPER_DUMPED_OK = 1 +DUMPER_RECOVERABLE_ERROR = 2 +DUMPER_UNRECOVERABLE_ERROR = 3 + +FIELD_VERIFICATION_RECAPTCHA = 0 +FIELD_COMMENT = 1 +FIELD_TEXT = 2 +FIELD_CHECKBOX = 3 +FIELD_FILE = 4 +FIELD_THREAD_ID = 5 +FIELD_PASSWORD = 6 + +FIELDS = [ FIELD_VERIFICATION_RECAPTCHA, FIELD_COMMENT, FIELD_TEXT, FIELD_CHECKBOX, FIELD_FILE, FIELD_THREAD_ID, FIELD_PASSWORD ] + +field_enum_lookup = {} + +field_enum_lookup[ 'recaptcha' ] = FIELD_VERIFICATION_RECAPTCHA +field_enum_lookup[ 'comment' ] = FIELD_COMMENT +field_enum_lookup[ 'text' ] = FIELD_TEXT +field_enum_lookup[ 'checkbox' ] = FIELD_CHECKBOX +field_enum_lookup[ 'file' ] = FIELD_FILE +field_enum_lookup[ 'thread id' ] = FIELD_THREAD_ID +field_enum_lookup[ 'password' ] = FIELD_PASSWORD + +field_string_lookup = {} + +field_string_lookup[ FIELD_VERIFICATION_RECAPTCHA ] = 'recaptcha' +field_string_lookup[ FIELD_COMMENT ] = 'comment' +field_string_lookup[ FIELD_TEXT ] = 'text' +field_string_lookup[ FIELD_CHECKBOX ] = 'checkbox' +field_string_lookup[ FIELD_FILE ] = 'file' +field_string_lookup[ FIELD_THREAD_ID ] = 'thread id' +field_string_lookup[ FIELD_PASSWORD ] = 'password' + +LOG_ERROR = 0 +LOG_MESSAGE = 1 + +log_string_lookup = {} + +log_string_lookup[ LOG_ERROR ] = 'error' +log_string_lookup[ LOG_MESSAGE ] = 'message' + +RESTRICTION_MIN_RESOLUTION = 0 +RESTRICTION_MAX_RESOLUTION = 1 +RESTRICTION_MAX_FILE_SIZE = 2 +RESTRICTION_ALLOWED_MIMES = 3 + +SERVICE_UPDATE_ACCOUNT = 0 +SERVICE_UPDATE_DELETE_PENDING = 1 +SERVICE_UPDATE_ERROR = 2 +SERVICE_UPDATE_NEXT_BEGIN = 3 +SERVICE_UPDATE_RESET = 4 +SERVICE_UPDATE_REQUEST_MADE = 5 +SERVICE_UPDATE_LAST_CHECK = 6 + +SHUTDOWN_TIMESTAMP_VACUUM = 0 +SHUTDOWN_TIMESTAMP_FATTEN_AC_CACHE = 1 +SHUTDOWN_TIMESTAMP_DELETE_ORPHANS = 2 + +SORT_BY_SMALLEST = 0 +SORT_BY_LARGEST = 1 +SORT_BY_SHORTEST = 2 +SORT_BY_LONGEST = 3 +SORT_BY_NEWEST = 4 +SORT_BY_OLDEST = 5 +SORT_BY_MIME = 6 +SORT_BY_RANDOM = 7 +SORT_BY_LEXICOGRAPHIC_ASC = 8 +SORT_BY_LEXICOGRAPHIC_DESC = 9 +SORT_BY_INCIDENCE_ASC = 10 +SORT_BY_INCIDENCE_DESC = 11 + +SORT_CHOICES = [] + +SORT_CHOICES.append( ( 'system', SORT_BY_SMALLEST ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_LARGEST ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_SHORTEST ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_LONGEST ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_NEWEST ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_OLDEST ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_MIME ) ) +SORT_CHOICES.append( ( 'system', SORT_BY_RANDOM ) ) + +sort_enum_lookup = {} + +sort_enum_lookup[ 'smallest first' ] = SORT_BY_SMALLEST +sort_enum_lookup[ 'largest first' ] = SORT_BY_LARGEST +sort_enum_lookup[ 'shortest first' ] = SORT_BY_SHORTEST +sort_enum_lookup[ 'longest first' ] = SORT_BY_LONGEST +sort_enum_lookup[ 'newest first' ] = SORT_BY_NEWEST +sort_enum_lookup[ 'oldest first' ] = SORT_BY_OLDEST +sort_enum_lookup[ 'order by mime' ] = SORT_BY_MIME +sort_enum_lookup[ 'random order' ] = SORT_BY_RANDOM + +sort_string_lookup = {} + +sort_string_lookup[ SORT_BY_SMALLEST ] = 'smallest first' +sort_string_lookup[ SORT_BY_LARGEST ] = 'largest first' +sort_string_lookup[ SORT_BY_SHORTEST ] = 'shortest first' +sort_string_lookup[ SORT_BY_LONGEST ] = 'longest first' +sort_string_lookup[ SORT_BY_NEWEST ] = 'newest first' +sort_string_lookup[ SORT_BY_OLDEST ] = 'oldest first' +sort_string_lookup[ SORT_BY_MIME ] = 'mime' +sort_string_lookup[ SORT_BY_RANDOM ] = 'random order' + +THUMBNAIL_MARGIN = 2 +THUMBNAIL_BORDER = 1 + +UNKNOWN_ACCOUNT_TYPE = HC.AccountType( 'unknown account', [], ( None, None ) ) + +def AddPaddingToDimensions( dimensions, padding ): + + ( x, y ) = dimensions + + return ( x + padding, y + padding ) + +def GenerateCollectByChoices( sort_by_choices ): + + already_added = set() + + collect_choices = [] + + for ( sort_by_type, namespaces ) in sort_by_choices: + + for i in range( 1, len( namespaces ) ): + + combinations = itertools.combinations( namespaces, i ) + + for combination in combinations: + + combination_set = frozenset( combination ) + + if combination_set not in already_added: + + already_added.add( combination_set ) + + collect_choices.append( ( 'collect by ' + '-'.join( combination ), combination ) ) + + + + ''' + namespaces_set = frozenset( namespaces ) + + if namespaces_set not in already_added: + + collect_choices.append( ( 'collect by ' + '-'.join( namespaces ), namespaces ) ) + + already_added.add( namespaces_set ) + + + for i in range( 1, len( namespaces ) ): + + sub_namespaces = namespaces[:-i] + + sub_namespaces_set = frozenset( sub_namespaces ) + + if sub_namespaces_set not in already_added: + + collect_choices.append( ( 'collect by ' + '-'.join( sub_namespaces ), sub_namespaces ) ) + + already_added.add( sub_namespaces_set ) + + + ''' + + collect_choices.sort() + + collect_choices.insert( 0, ( 'no collections', None ) ) + + return collect_choices + +def GenerateDumpMultipartFormDataCTAndBody( fields ): + + m = multipart.Multipart() + + for ( name, type, value ) in fields: + + if type in ( FIELD_TEXT, FIELD_COMMENT, FIELD_PASSWORD, FIELD_VERIFICATION_RECAPTCHA, FIELD_THREAD_ID ): m.field( name, str( value ) ) + elif type == FIELD_CHECKBOX: + + if value: + + ( name, value ) = value.split( '/', 1 ) + + m.field( name, value ) + + + elif type == FIELD_FILE: + + ( hash, mime, file ) = value + + m.file( name, hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ], file, { 'Content-Type' : HC.mime_string_lookup[ mime ] } ) + + + + return m.get() + +def GenerateMultipartFormDataCTAndBodyFromDict( fields ): + + m = multipart.Multipart() + + for ( name, value ) in fields.items(): m.field( name, str( value ) ) + + return m.get() + +def GetMediasTagCount( pool, tag_service_identifier = NULL_SERVICE_IDENTIFIER ): + + all_tags = [] + + for media in pool: + + if media.IsCollection(): all_tags.extend( media.GetSingletonsTags() ) + else: all_tags.append( media.GetTags() ) + + + current_tags_to_count = collections.Counter() + deleted_tags_to_count = collections.Counter() + pending_tags_to_count = collections.Counter() + petitioned_tags_to_count = collections.Counter() + + for tags in all_tags: + + if tag_service_identifier == NULL_SERVICE_IDENTIFIER: ( current, deleted, pending, petitioned ) = tags.GetUnionCDPP() + else: ( current, deleted, pending, petitioned ) = tags.GetCDPP( tag_service_identifier ) + + current_tags_to_count.update( current ) + deleted_tags_to_count.update( pending ) + pending_tags_to_count.update( pending ) + petitioned_tags_to_count.update( petitioned ) + + + return ( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) + + +def GetUnknownAccount(): return HC.Account( 0, UNKNOWN_ACCOUNT_TYPE, 0, None, ( 0, 0 ) ) + +def MediaIntersectCDPPTagServiceIdentifiers( media, service_identifier ): + + all_tag_cdpps = [ m.GetTags().GetCDPP( service_identifier ) for m in media ] + + current = list( HC.IntelligentMassIntersect( ( cdpp[0] for cdpp in all_tag_cdpps ) ) ) + deleted = list( HC.IntelligentMassIntersect( ( cdpp[1] for cdpp in all_tag_cdpps ) ) ) + pending = list( HC.IntelligentMassIntersect( ( cdpp[2] for cdpp in all_tag_cdpps ) ) ) + petitioned = list( HC.IntelligentMassIntersect( ( cdpp[3] for cdpp in all_tag_cdpps ) ) ) + + return ( current, deleted, pending, petitioned ) + +def ParseImportablePaths( raw_paths, include_subdirs = True ): + + file_paths = [] + + if include_subdirs: title = 'Parsing files and subdirectories' + else: title = 'Parsing files' + + progress = wx.ProgressDialog( title, u'Preparing', 1000, None, style=wx.PD_APP_MODAL | wx.PD_AUTO_HIDE | wx.PD_CAN_ABORT | wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME ) + + try: + + paths_to_process = raw_paths + + total_paths_to_process = len( paths_to_process ) + + num_processed = 0 + + while len( paths_to_process ) > 0: + + next_paths_to_process = [] + + for path in paths_to_process: + + # would rather use progress.SetRange( total_paths_to_process ) here, but for some reason wx python doesn't support it! + + permill = int( 1000 * ( float( num_processed ) / float( total_paths_to_process ) ) ) + + ( should_continue, skip ) = progress.Update( permill, 'Done ' + str( num_processed ) + '/' + str( total_paths_to_process ) ) + + if not should_continue: + + progress.Destroy() + + return [] + + + if os.path.isdir( path ): + + if include_subdirs: + + subpaths = [ path + os.path.sep + filename for filename in dircache.listdir( path ) ] + + total_paths_to_process += len( subpaths ) + + next_paths_to_process.extend( subpaths ) + + + else: file_paths.append( path ) + + num_processed += 1 + + + paths_to_process = next_paths_to_process + + + except: wx.MessageBox( traceback.format_exc() ) + + progress.Destroy() + + good_paths = [] + odd_paths = [] + + num_file_paths = len( file_paths ) + + progress = wx.ProgressDialog( 'Checking files\' mimetypes', u'Preparing', num_file_paths, None, style=wx.PD_APP_MODAL | wx.PD_AUTO_HIDE | wx.PD_CAN_ABORT | wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME ) + + for ( i, path ) in enumerate( file_paths ): + + ( should_continue, skip ) = progress.Update( i, 'Done ' + str( i ) + '/' + str( num_file_paths ) ) + + if not should_continue: break + + mime = HC.GetMimeFromPath( path ) + + if mime in HC.ALLOWED_MIMES: + + info = os.lstat( path ) + + size = info[6] + + if size > 0: good_paths.append( path ) + + else: odd_paths.append( path ) + + + progress.Destroy() + + if len( odd_paths ) > 0: + + print( 'Because of mime, the client could not import the following files:' ) + for odd_path in odd_paths: print( odd_path ) + + wx.MessageBox( 'The ' + str( len( odd_paths ) ) + ' files that were not jpegs, pngs, bmps or gifs will not be added. If you are interested, their paths have been written to the log.' ) + + + return good_paths + +class AdvancedHTTPConnection(): + + def __init__( self, url = '', scheme = 'http', host = '', port = None, service_identifier = None, is_redirect = False, accept_cookies = False ): + + if len( url ) > 0: + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + + self._is_redirect = is_redirect + + self._scheme = scheme + self._host = host + self._port = port + self._service_identifier = service_identifier + self._accept_cookies = accept_cookies + + self._cookies = {} + + if self._scheme == 'http': self._connection = httplib.HTTPConnection( self._host, self._port ) + else: self._connection = httplib.HTTPSConnection( self._host, self._port ) + + + def close( self ): self._connection.close() + + def connect( self ): self._connection.connect() + + def GetCookies( self ): return self._cookies + + def geturl( self, url, headers = {} ): + + parse_result = urlparse.urlparse( url ) + + request = parse_result.path + + query = parse_result.query + + if query != '': request += '?' + query + + return self.request( 'GET', request, headers = headers ) + + + def request( self, request_type, request, headers = {}, body = None ): + + headers[ 'User-Agent' ] = 'hydrus/' + str( HC.NETWORK_VERSION ) + + if len( self._cookies ) > 0: headers[ 'Cookie' ] = '; '.join( [ k + '=' + v for ( k, v ) in self._cookies.items() ] ) + + try: + + self._connection.request( request_type, request, headers = headers, body = body ) + + response = self._connection.getresponse() + + raw_response = response.read() + + except ( httplib.CannotSendRequest, httplib.BadStatusLine ): + + # for some reason, we can't send a request on the current connection, so let's make a new one! + + try: + + if self._scheme == 'http': self._connection = httplib.HTTPConnection( self._host, self._port ) + else: self._connection = httplib.HTTPSConnection( self._host, self._port ) + + self._connection.request( request_type, request, headers = headers, body = body ) + + response = self._connection.getresponse() + + raw_response = response.read() + + except: + print( traceback.format_exc() ) + raise Exception( 'Could not connect to server' ) + + except: + print( traceback.format_exc() ) + raise Exception( 'Could not connect to server' ) + + if len( raw_response ) > 0: + + if self._accept_cookies: + + for cookie in response.msg.getallmatchingheaders( 'Set-Cookie' ): # msg is a mimetools.Message + + try: + + cookie = cookie.replace( 'Set-Cookie: ', '' ) + + if ';' in cookie: ( cookie, expiry_gumpf ) = cookie.split( ';', 1 ) + + ( k, v ) = cookie.split( '=' ) + + self._cookies[ k ] = v + + except: pass + + + + content_type = response.getheader( 'Content-Type' ) + + if content_type is not None: + + # additional info can be a filename or charset=utf-8 or whatever + + if content_type == 'text/html': + + mime_string = content_type + + try: raw_response = raw_response.decode( 'utf-8' ) + except: pass + + elif '; ' in content_type: + + ( mime_string, additional_info ) = content_type.split( '; ' ) + + if 'charset=' in additional_info: + + # this does utf-8, ISO-8859-4, whatever + + ( gumpf, charset ) = additional_info.split( '=' ) + + try: raw_response = raw_response.decode( charset ) + except: pass + + + else: mime_string = content_type + + if mime_string in HC.mime_enum_lookup and HC.mime_enum_lookup[ mime_string ] == HC.APPLICATION_YAML: + + try: parsed_response = yaml.safe_load( raw_response ) + except Exception as e: raise HC.NetworkVersionException( 'Failed to parse a response object!' + os.linesep + unicode( e ) ) + + else: parsed_response = raw_response + + else: parsed_response = raw_response + + else: parsed_response = raw_response + + if self._service_identifier is not None: + + service_type = self._service_identifier.GetType() + + server_header = response.getheader( 'Server' ) + + service_string = HC.service_string_lookup[ service_type ] + + if server_header is None or service_string not in server_header: + + HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, GetUnknownAccount() ) ) + + raise HC.WrongServiceTypeException( 'Target was not a ' + service_string + '!' ) + + + if '?' in request: request_command = request.split( '?' )[0] + else: request_command = request + + if '/' in request_command: request_command = request_command.split( '/' )[1] + + if request_type == 'GET': + + if ( service_type, HC.GET, request_command ) in HC.BANDWIDTH_CONSUMING_REQUESTS: HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_REQUEST_MADE, self._service_identifier, len( raw_response ) ) ) + + elif ( service_type, HC.POST, request_command ) in HC.BANDWIDTH_CONSUMING_REQUESTS: HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_REQUEST_MADE, self._service_identifier, len( body ) ) ) + + + if response.status == 200: return parsed_response + elif response.status == 205: return + elif response.status in ( 301, 302, 303, 307 ): + + location = response.getheader( 'Location' ) + + if location is None: raise Exception( data ) + else: + + if self._is_redirect: raise Exception( 'Too many redirects!' ) + + url = location + + parse_result = urlparse.urlparse( url ) + + redirected_request = parse_result.path + + redirected_query = parse_result.query + + if redirected_query != '': redirected_request += '?' + redirected_query + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme is None or scheme == self._scheme ) and ( request == redirected_request or request in redirected_request or redirected_request in request ): raise Exception( 'Redirection problem' ) + else: + + if host is None or ( host == self._host and port == self._port ): connection = self + else: connection = AdvancedHTTPConnection( url, is_redirect = True ) + + return connection.request( request_type, redirected_request, headers = headers, body = body ) + + + + elif response.status == 304: raise HC.NotModifiedException() + else: + + if self._service_identifier is not None: + + HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ERROR, self._service_identifier, parsed_response ) ) + + if response.status in ( 401, 426 ): HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, GetUnknownAccount() ) ) + + + if response.status == 401: raise HC.PermissionsException( parsed_response ) + elif response.status == 403: raise HC.ForbiddenException( parsed_response ) + elif response.status == 404: raise HC.NotFoundException( parsed_response ) + elif response.status == 426: raise HC.NetworkVersionException( parsed_response ) + elif response.status in ( 500, 501, 502, 503 ): + + try: print( parsed_response ) + except: pass + + raise Exception( parsed_response ) + + else: raise Exception( parsed_response ) + + + +class AutocompleteMatches(): + + def __init__( self, matches ): + + self._matches = matches + + self._matches.sort() + + + def GetMatches( self, search ): return [ match for match in self._matches if HC.SearchEntryMatchesTag( search, match ) ] + +class AutocompleteMatchesCounted(): + + def __init__( self, matches_to_count ): + + self._matches_to_count = matches_to_count + self._matches = self._matches_to_count.keys() + + def cmp_func( x, y ): return cmp( matches_to_count[ x ], matches_to_count[ y ] ) + + self._matches.sort( cmp = cmp_func, reverse = True ) + + + def GetMatches( self, search ): return [ ( match, self._matches_to_count[ match ] ) for match in self._matches if HC.SearchEntryMatchesTag( search, match ) ] + +class Booru( HC.HydrusYAMLBase ): + + yaml_tag = u'!Booru' + + def __init__( self, name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ): + + self._name = name + self._search_url = search_url + self._search_separator = search_separator + self._gallery_advance_num = gallery_advance_num + self._thumb_classname = thumb_classname + self._image_id = image_id + self._image_data = image_data + self._tag_classnames_to_namespaces = tag_classnames_to_namespaces + + + def GetData( self ): return ( self._search_url, self._search_separator, self._gallery_advance_num, self._thumb_classname, self._image_id, self._image_data, self._tag_classnames_to_namespaces ) + + def GetGalleryParsingInfo( self ): return ( self._search_url, self._gallery_advance_num, self._search_separator, self._thumb_classname ) + + def GetName( self ): return self._name + + def GetNamespaces( self ): return self._tag_classnames_to_namespaces.values() + +sqlite3.register_adapter( Booru, yaml.safe_dump ) + +DEFAULT_BOORUS = [] + +name = 'gelbooru' +search_url = 'http://gelbooru.com/index.php?page=post&s=list&tags=%tags%&pid=%index%' +search_separator = '+' +gallery_advance_num = 28 +thumb_classname = 'thumb' +image_id = None +image_data = 'Original image' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'safebooru' +search_url = 'http://safebooru.org/index.php?page=post&s=list&tags=%tags%&pid=%index%' +search_separator = '+' +gallery_advance_num = 25 +thumb_classname = 'thumb' +image_id = None +image_data = 'Original image' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'e621' +search_url = 'http://e621.net/post/index?page=%index%&tags=%tags%' +search_separator = '%20' +gallery_advance_num = 1 +thumb_classname = 'thumb blacklisted' +image_id = None +image_data = 'Download' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'rule34@paheal' +search_url = 'http://rule34.paheal.net/post/list/%tags%/%index%' +search_separator = '%20' +gallery_advance_num = 1 +thumb_classname = 'thumb' +image_id = 'main_image' +image_data = None +tag_classnames_to_namespaces = { 'tag_name' : '' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'danbooru' +search_url = 'http://danbooru.donmai.us/post/index?tags=%tags%&commit=Search&page=%index%' +search_separator = '%20' +gallery_advance_num = 1 +thumb_classname = 'thumb blacklisted' +image_id = 'image' +image_data = None +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'mishimmie' +search_url = 'http://shimmie.katawa-shoujo.com/post/list/%tags%/%index%' +search_separator = '%20' +gallery_advance_num = 1 +thumb_classname = 'thumb' +image_id = 'main_image' +image_data = None +tag_classnames_to_namespaces = { 'tag_name' : '' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'rule34@booru.org' +search_url = 'http://rule34.xxx/index.php?page=post&s=list&tags=%tags%&pid=%index%' +search_separator = '%20' +gallery_advance_num = 25 +thumb_classname = 'thumb' +image_id = None +image_data = 'Original image' +tag_classnames_to_namespaces = { 'tag-type-general' : '' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'furry@booru.org' +search_url = 'http://furry.booru.org/index.php?page=post&s=list&tags=%tags%&pid=%index%' +search_separator = '+' +gallery_advance_num = 25 +thumb_classname = 'thumb' +image_id = None +image_data = 'Original image' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'xbooru' +search_url = 'http://xbooru.com/index.php?page=post&s=list&tags=%tags%&pid=%index%' +search_separator = '+' +gallery_advance_num = 25 +thumb_classname = 'thumb' +image_id = None +image_data = 'Original image' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'konachan' +search_url = 'http://konachan.com/post?page=%index%&tags=%tags%' +search_separator = '+' +gallery_advance_num = 1 +thumb_classname = 'thumb' +image_id = None +image_data = 'View larger version' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +name = 'tbib' +search_url = 'http://tbib.org/index.php?page=post&s=list&tags=%tags%&pid=%index%' +search_separator = '+' +gallery_advance_num = 25 +thumb_classname = 'thumb' +image_id = None +image_data = 'Original image' +tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + +DEFAULT_BOORUS.append( Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) ) + +class CDPPFileServiceIdentifiers(): + + def __init__( self, current, deleted, pending, petitioned ): + + self._current = current + self._deleted = deleted + self._pending = pending + self._petitioned = petitioned + + + def DeletePending( self, service_identifier ): + + self._pending.discard( service_identifier ) + self._petitioned.discard( service_identifier ) + + + def GetCDPP( self ): return ( self._current, self._deleted, self._pending, self._petitioned ) + + def GetCurrent( self ): return self._current + def GetCurrentRemote( self ): return self._current - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) ) + + def GetDeleted( self ): return self._deleted + def GetDeletedRemote( self ): return self._deleted - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) ) + + def GetPending( self ): return self._pending + def GetPendingRemote( self ): return self._pending - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) ) + + def GetPetitioned( self ): return self._petitioned + def GetPetitionedRemote( self ): return self._petitioned - set( ( LOCAL_FILE_SERVICE_IDENTIFIER, ) ) + + def HasDownloading( self ): return LOCAL_FILE_SERVICE_IDENTIFIER in self._pending + + def HasLocal( self ): return LOCAL_FILE_SERVICE_IDENTIFIER in self._current + + def ProcessContentUpdate( self, content_update ): + + action = content_update.GetAction() + + service_identifier = content_update.GetServiceIdentifier() + + if action == CONTENT_UPDATE_ADD: + + self._current.add( service_identifier ) + + self._deleted.discard( service_identifier ) + self._pending.discard( service_identifier ) + + elif action == CONTENT_UPDATE_DELETE: + + self._deleted.add( service_identifier ) + + self._current.discard( service_identifier ) + self._petitioned.discard( service_identifier ) + + elif action == CONTENT_UPDATE_PENDING: + + if service_identifier not in self._current: self._pending.add( service_identifier ) + + elif action == CONTENT_UPDATE_PETITION: + + if service_identifier not in self._deleted: self._petitioned.add( service_identifier ) + + elif action == CONTENT_UPDATE_RESCIND_PENDING: self._pending.discard( service_identifier ) + elif action == CONTENT_UPDATE_RESCIND_PETITION: self._petitioned.discard( service_identifier ) + + + def ResetService( self, service_identifier ): + + self._current.discard( service_identifier ) + self._deleted.discard( service_identifier ) + self._petitioned.discard( service_identifier ) + + +class CDPPTagServiceIdentifiers(): + + def __init__( self, tag_service_precedence, service_identifiers_to_cdpp ): + + self._tag_service_precedence = tag_service_precedence + + self._service_identifiers_to_cdpp = service_identifiers_to_cdpp + + self._Recalc() + + + def _Recalc( self ): + + self._current = set() + self._deleted = set() + self._pending = set() + self._petitioned = set() + + t_s_p = list( self._tag_service_precedence ) + + t_s_p.reverse() + + for service_identifier in t_s_p: + + if service_identifier in self._service_identifiers_to_cdpp: + + ( current, deleted, pending, petitioned ) = self._service_identifiers_to_cdpp[ service_identifier ] + + # the difference_update stuff is making active_mappings from tag_service_precedence + + self._current.update( current ) + self._current.difference_update( deleted ) + + self._deleted.update( deleted ) + self._deleted.difference_update( current ) + + self._pending.update( pending ) + self._pending.difference_update( deleted ) + + self._petitioned.update( petitioned ) + self._petitioned.difference_update( current ) + + + + self._creators = set() + self._series = set() + self._titles = set() + self._volumes = set() + self._chapters = set() + self._pages = set() + + for tag in self._current | self._pending: + + if tag.startswith( 'creator:' ): self._creators.add( tag.split( 'creator:', 1 )[1] ) + elif tag.startswith( 'series:' ): self._series.add( tag.split( 'series:', 1 )[1] ) + elif tag.startswith( 'title:' ): self._titles.add( tag.split( 'title:', 1 )[1] ) + elif tag.startswith( 'volume:' ): self._volumes.add( int( tag.split( 'volume:', 1 )[1] ) ) + elif tag.startswith( 'chapter:' ): self._chapters.add( int( tag.split( 'chapter:', 1 )[1] ) ) + elif tag.startswith( 'page:' ): self._pages.add( int( tag.split( 'page:', 1 )[1] ) ) + + + + def GetCSTVCP( self ): return ( self._creators, self._series, self._titles, self._volumes, self._chapters, self._pages ) + + def DeletePending( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_cdpp: + + ( current, deleted, pending, petitioned ) = self._service_identifiers_to_cdpp[ service_identifier ] + + if len( pending ) > 0 or len( petitioned ) > 0: + + self._service_identifiers_to_cdpp[ service_identifier ] = ( current, deleted, set(), set() ) + + self._Recalc() + + + + + def GetCDPP( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_cdpp: return self._service_identifiers_to_cdpp[ service_identifier ] + else: return ( set(), set(), set(), set() ) + + + def GetNamespaceSlice( self, namespaces ): return frozenset( [ tag for tag in list( self._current ) + list( self._pending ) if True in ( tag.startswith( namespace + ':' ) for namespace in namespaces ) ] ) + + def GetNumTags( self, tag_service_identifier, include_current_tags = True, include_pending_tags = False ): + + num_tags = 0 + + if tag_service_identifier == NULL_SERVICE_IDENTIFIER: + + if include_current_tags: num_tags += len( self._current ) + if include_pending_tags: num_tags += len( self._pending ) + + else: + + ( current, deleted, pending, petitioned ) = self.GetCDPP( tag_service_identifier ) + + if include_current_tags: num_tags += len( current ) + if include_pending_tags: num_tags += len( pending ) + + + return num_tags + + + def GetServiceIdentifiersToCDPP( self ): return self._service_identifiers_to_cdpp + + def GetUnionCDPP( self ): return ( self._current, self._deleted, self._pending, self._petitioned ) + + def ProcessContentUpdate( self, content_update ): + + service_identifier = content_update.GetServiceIdentifier() + + if service_identifier in self._service_identifiers_to_cdpp: ( current, deleted, pending, petitioned ) = self._service_identifiers_to_cdpp[ service_identifier ] + else: + + ( current, deleted, pending, petitioned ) = ( set(), set(), set(), set() ) + + self._service_identifiers_to_cdpp[ service_identifier ] = ( current, deleted, pending, petitioned ) + + + action = content_update.GetAction() + + if action == CONTENT_UPDATE_ADD: + + tag = content_update.GetInfo() + + current.add( tag ) + + deleted.discard( tag ) + pending.discard( tag ) + + elif action == CONTENT_UPDATE_DELETE: + + tag = content_update.GetInfo() + + deleted.add( tag ) + + current.discard( tag ) + petitioned.discard( tag ) + + elif action == CONTENT_UPDATE_EDIT_LOG: + + edit_log = content_update.GetInfo() + + for ( action, info ) in edit_log: + + if action == CONTENT_UPDATE_ADD: + + tag = info + + current.add( tag ) + + deleted.discard( tag ) + pending.discard( tag ) + + elif action == CONTENT_UPDATE_DELETE: + + tag = info + + deleted.add( tag ) + + current.discard( tag ) + petitioned.discard( tag ) + + elif action == CONTENT_UPDATE_PENDING: + + tag = info + + if tag not in current: pending.add( tag ) + + elif action == CONTENT_UPDATE_RESCIND_PENDING: + + tag = info + + pending.discard( tag ) + + elif action == CONTENT_UPDATE_PETITION: + + ( tag, reason ) = info + + if tag not in deleted: petitioned.add( tag ) + + elif action == CONTENT_UPDATE_RESCIND_PETITION: + + tag = info + + petitioned.discard( tag ) + + + + + self._Recalc() + + + def ResetService( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_cdpp: + + ( current, deleted, pending, petitioned ) = self._service_identifiers_to_cdpp[ service_identifier ] + + self._service_identifiers_to_cdpp[ service_identifier ] = ( set(), set(), pending, set() ) + + self._Recalc() + + + +class LocalRatings(): + + # c for current; feel free to rename this stupid thing + + def __init__( self, service_identifiers_to_ratings ): + + self._service_identifiers_to_ratings = service_identifiers_to_ratings + + + def GetRating( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_ratings: return self._service_identifiers_to_ratings[ service_identifier ] + else: return None + + + def GetServiceIdentifiersToRatings( self ): return self._service_identifiers_to_ratings + + def ProcessContentUpdate( self, content_update ): + + service_identifier = content_update.GetServiceIdentifier() + + action = content_update.GetAction() + + if action == CONTENT_UPDATE_RATING: + + rating = content_update.GetInfo() + + if rating is None and service_identifier in self._service_identifiers_to_ratings: del self._service_identifiers_to_ratings[ service_identifier ] + else: self._service_identifiers_to_ratings[ service_identifier ] = rating + + + + def ResetService( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_ratings: del self._service_identifiers_to_ratings[ service_identifier ] + + +class ConnectionToService(): + + def __init__( self, service_identifier, credentials ): + + self._service_identifier = service_identifier + self._credentials = credentials + + try: + + ( host, port ) = self._credentials.GetAddress() + + self._connection = AdvancedHTTPConnection( host = host, port = port, service_identifier = self._service_identifier ) + + self._connection.connect() + + except: + + error_message = 'Could not connect.' + + if self._service_identifier is not None: HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ERROR, self._service_identifier, error_message ) ) + + raise Exception( error_message ) + + + + def _GetHeaders( self ): + + headers = {} + + if self._credentials.HasAccessKey(): + + access_key = self._credentials.GetAccessKey() + + if access_key != '': headers[ 'Authorization' ] = 'hydrus_network ' + access_key.encode( 'hex' ) + + + return headers + + + def _SendRequest( self, request_type, request, request_args = {} ): + + # prepare + + if request_type == HC.GET: + + request_type_string = 'GET' + + request_string = '/' + request + + if 'subject_identifier' in request_args: + + subject_identifier = request_args[ 'subject_identifier' ] + + del request_args[ 'subject_identifier' ] + + if subject_identifier.HasAccessKey(): + + subject_access_key = subject_identifier.GetAccessKey() + + request_args[ 'subject_access_key' ] = subject_access_key.encode( 'hex' ) + + elif subject_identifier.HasAccountId(): + + subject_account_id = subject_identifier.GetAccountId() + + request_args[ 'subject_account_id' ] = subject_account_id + + elif subject_identifier.HasHash(): + + subject_hash = subject_identifier.GetHash() + + request_args[ 'subject_hash' ] = subject_hash.encode( 'hex' ) + + if subject_identifier.HasMapping(): + + ( subject_tag, subject_hash ) = subject_identifier.GetMapping() + + request_args[ 'subject_tag' ] = subject_tag.encode( 'hex' ) + + + + + if 'title' in request_args: + + request_args[ 'title' ] = request_args[ 'title' ].encode( 'hex' ) + + + if len( request_args ) > 0: request_string += '?' + '&'.join( [ key + '=' + str( value ) for ( key, value ) in request_args.items() ] ) + + body = None + + elif request_type == HC.POST: + + request_type_string = 'POST' + + request_string = '/' + request + + if request == 'file': body = request_args[ 'file' ] + else: body = yaml.safe_dump( request_args ) + + + headers = self._GetHeaders() + + # send + + response = self._connection.request( request_type_string, request_string, headers = headers, body = body ) + + return response + + + def Close( self ): + + try: self._connection.close() + except: pass + + + def Get( self, request, **kwargs ): + + response = self._SendRequest( HC.GET, request, kwargs ) + + if request in ( 'accesskeys', 'init' ): + + if request == 'accesskeys': access_keys = response + elif request == 'init': + + access_key = response + + access_keys = ( access_key, ) + + ( host, port ) = self._credentials.GetAddress() + + self._credentials = Credentials( host, port, access_key ) + + edit_log = [ ( 'edit', ( self._service_identifier, ( self._service_identifier, self._credentials ) ) ) ] + + wx.GetApp().Write( 'update_services', edit_log ) + + + with wx.FileDialog( None, style=wx.FD_SAVE, defaultFile = 'access keys.txt' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( os.linesep.join( [ access_key.encode( 'hex' ) for access_key in access_keys ] ) ) + + + + elif request == 'account': + + account = response + + account.MakeFresh() + + HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_ACCOUNT, self._service_identifier, account ) ) + + elif request == 'update': + + update = response + + HC.pubsub.pub( 'service_update_db', ServiceUpdate( SERVICE_UPDATE_NEXT_BEGIN, self._service_identifier, update.GetNextBegin() ) ) + + + return response + + + def Post( self, request, **kwargs ): response = self._SendRequest( HC.POST, request, kwargs ) + +class ContentUpdate(): + + def __init__( self, action, service_identifier, hashes, info = None ): + + self._action = action + self._service_identifier = service_identifier + self._hashes = set( hashes ) + self._info = info + + + def GetAction( self ): return self._action + + def GetHashes( self ): return self._hashes + + def GetInfo( self ): return self._info + + def GetServiceIdentifier( self ): return self._service_identifier + +class CPRemoteRatingsServiceIdentifiers(): + + def __init__( self, service_identifiers_to_cp ): + + self._service_identifiers_to_cp = service_identifiers_to_cp + + + def GetCP( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_cp: return self._service_identifiers_to_cp[ service_identifier ] + else: return ( None, None ) + + + def GetServiceIdentifiersToCP( self ): return self._service_identifiers_to_cp + + def ProcessContentUpdate( self, content_update ): + + service_identifier = content_update.GetServiceIdentifier() + + if service_identifier in self._service_identifiers_to_cp: ( current, pending ) = self._service_identifiers_to_cp[ service_identifier ] + else: + + ( current, pending ) = ( None, None ) + + self._service_identifiers_to_cp[ service_identifier ] = ( current, pending ) + + + action = content_update.GetAction() + + # this may well need work; need to figure out how to set the pending back to None after an upload. rescind seems ugly + + if action == CONTENT_UPDATE_ADD: + + rating = content_update.GetInfo() + + current = rating + + elif action == CONTENT_UPDATE_DELETE: + + current = None + + elif action == CONTENT_UPDATE_RESCIND_PENDING: + + pending = None + + elif action == CONTENT_UPDATE_DELETE: + + rating = content_update.GetInfo() + + pending = rating + + + + def ResetService( self, service_identifier ): + + if service_identifier in self._service_identifiers_to_cp: + + ( current, pending ) = self._service_identifiers_to_cp[ service_identifier ] + + self._service_identifiers_to_cp[ service_identifier ] = ( None, pending ) + + + +class Credentials( HC.HydrusYAMLBase ): + + yaml_tag = u'!Credentials' + + def __init__( self, host, port, access_key = None ): + + HC.HydrusYAMLBase.__init__( self ) + + if host == 'localhost': host = '127.0.0.1' + + self._host = host + self._port = port + self._access_key = access_key + + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __hash__( self ): return ( self._host, self._port, self._access_key ).__hash__() + + def __ne__( self, other ): return self.__hash__() != other.__hash__() + + def GetAccessKey( self ): return self._access_key + + def GetAddress( self ): return ( self._host, self._port ) + + def GetConnectionString( self ): + + connection_string = '' + + if self._access_key is not None: connection_string += self._access_key.encode( 'hex' ) + '@' + + connection_string += self._host + ':' + str( self._port ) + + return connection_string + + + def HasAccessKey( self ): return self._access_key is not None and self._access_key is not '' + + def SetAccessKey( self, access_key ): self._access_key = access_key + +class DataCache(): + + def __init__( self, options, cache_size_key ): + + self._options = options + self._cache_size_key = cache_size_key + + self._keys_to_data = {} + self._keys_fifo = [] + + self._total_estimated_memory_footprint = 0 + + + def Clear( self ): + + self._keys_to_data = {} + self._keys_fifo = [] + + self._total_estimated_memory_footprint = 0 + + + def AddData( self, key, data ): + + self._keys_to_data[ key ] = data + + self._keys_fifo.append( key ) + + self._total_estimated_memory_footprint += data.GetEstimatedMemoryFootprint() + + while self._total_estimated_memory_footprint > self._options[ self._cache_size_key ]: + + deletee_key = self._keys_fifo.pop( 0 ) + + deletee_data = self._keys_to_data[ deletee_key ] + + self._total_estimated_memory_footprint -= deletee_data.GetEstimatedMemoryFootprint() + + del self._keys_to_data[ deletee_key ] + + + + def GetData( self, key ): + + self._keys_fifo.remove( key ) + + self._keys_fifo.append( key ) + + return self._keys_to_data[ key ] + + + def HasData( self, key ): return key in self._keys_to_data + +class FileQueryResult(): + + def __init__( self, file_service_identifier, predicates, media_results ): + + self._file_service_identifier = file_service_identifier + self._predicates = predicates + self._hashes_to_media_results = { media_result.GetHash() : media_result for media_result in media_results } + self._hashes = { hash for hash in self._hashes_to_media_results.keys() } + + HC.pubsub.sub( self, 'ProcessContentUpdates', 'content_updates_data' ) + HC.pubsub.sub( self, 'ProcessServiceUpdate', 'service_update_data' ) + + + def __iter__( self ): + + for media_result in self._hashes_to_media_results.values(): yield media_result + + + def __len__( self ): return len( self._hashes ) + + def _Remove( self, hashes ): + + for hash in hashes: + + if hash in self._hashes_to_media_results: + + del self._hashes_to_media_results[ hash ] + + + + self._hashes.difference_update( hashes ) + + + def AddMediaResult( self, media_result ): + + hash = media_result.GetHash() + + if hash in self._hashes: return # this is actually important, as sometimes we don't want the media result overwritten + + self._hashes_to_media_results[ hash ] = media_result + + self._hashes.add( hash ) + + + def GetHashes( self ): return self._hashes + + def GetMediaResult( self, hash ): return self._hashes_to_media_results[ hash ] + + def GetMediaResults( self ): return self._hashes_to_media_results.values() + + def ProcessContentUpdates( self, content_updates ): + + for content_update in content_updates: + + action = content_update.GetAction() + + service_identifier = content_update.GetServiceIdentifier() + + service_type = service_identifier.GetType() + + hashes = content_update.GetHashes() + + if action == CONTENT_UPDATE_ARCHIVE: + + if 'system:inbox' in self._predicates: self._Remove( hashes ) + + elif action == CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier: self._Remove( hashes ) + + for hash in self._hashes.intersection( hashes ): + + media_result = self._hashes_to_media_results[ hash ] + + media_result.ProcessContentUpdate( content_update ) + + + + + def ProcessServiceUpdate( self, update ): + + action = update.GetAction() + + service_identifier = update.GetServiceIdentifier() + + if action == SERVICE_UPDATE_DELETE_PENDING: + + for media_result in self._hashes_to_media_results.values(): media_result.DeletePending( service_identifier ) + + elif action == SERVICE_UPDATE_RESET: + + for media_result in self._hashes_to_media_results.values(): media_result.ResetService( service_identifier ) + + + +class FileSearchContext(): + + def __init__( self, file_service_identifier = LOCAL_FILE_SERVICE_IDENTIFIER, tag_service_identifier = NULL_SERVICE_IDENTIFIER, include_current_tags = True, include_pending_tags = True, raw_predicates = [] ): + + self._file_service_identifier = file_service_identifier + self._tag_service_identifier = tag_service_identifier + + self._include_current_tags = include_current_tags + self._include_pending_tags = include_pending_tags + + self._raw_predicates = raw_predicates + + raw_system_predicates = [ predicate for predicate in raw_predicates if predicate.startswith( 'system:' ) ] + + self._system_predicates = FileSystemPredicates( raw_system_predicates ) + + raw_tags = [ predicate for predicate in raw_predicates if not predicate.startswith( 'system:' ) ] + + self._tags_to_include = [ tag for tag in raw_tags if not tag.startswith( '-' ) ] + self._tags_to_exclude = [ tag[1:] for tag in raw_tags if tag.startswith( '-' ) ] + + + def GetFileServiceIdentifier( self ): return self._file_service_identifier + def GetRawPredicates( self ): return self._raw_predicates + def GetSystemPredicates( self ): return self._system_predicates + def GetTagServiceIdentifier( self ): return self._tag_service_identifier + def GetTagsToExclude( self ): return self._tags_to_exclude + def GetTagsToInclude( self ): return self._tags_to_include + def IncludeCurrentTags( self ): return self._include_current_tags + def IncludePendingTags( self ): return self._include_pending_tags + +class FileSystemPredicates(): + + INBOX = 0 + LOCAL = 1 + HASH = 2 + TIMESTAMP = 3 + DURATION = 4 + SIZE = 5 + NUM_TAGS = 6 + WIDTH = 7 + HEIGHT = 8 + RATIO = 9 + REPOSITORIES = 10 + MIME = 11 + + def __init__( self, system_predicates ): + + self._predicates = {} + + self._predicates[ self.INBOX ] = [] + self._predicates[ self.LOCAL ] = [] # not using this! + self._predicates[ self.HASH ] = [] + self._predicates[ self.MIME ] = [] + self._predicates[ self.TIMESTAMP ] = [] + self._predicates[ self.DURATION ] = [] + self._predicates[ self.SIZE ] = [] + self._predicates[ self.NUM_TAGS ] = [] + self._predicates[ self.WIDTH ] = [] + self._predicates[ self.HEIGHT ] = [] + self._predicates[ self.RATIO ] = [] + self._predicates[ self.REPOSITORIES ] = [] + + self._inbox = 'system:inbox' in system_predicates + + self._archive = 'system:archive' in system_predicates + + self._local = 'system:local' in system_predicates + + self._not_local = 'system:not local' in system_predicates + + self._num_tags_zero = False + self._num_tags_nonzero = False + + self._hash = None + self._min_size = None + self._size = None + self._max_size = None + self._mimes = None + self._min_timestamp = None + self._max_timestamp = None + self._min_width = None + self._width = None + self._max_width = None + self._min_height = None + self._height = None + self._max_height = None + self._min_duration = None + self._duration = None + self._max_duration = None + + self._limit = None + self._similar_to = None + + self._file_repositories_to_exclude = [] + + self._ratings_predicates = [] + + isin = lambda a, b: a in b + startswith = lambda a, b: a.startswith( b ) + lessthan = lambda a, b: a < b + greaterthan = lambda a, b: a > b + equals = lambda a, b: a == b + about_equals = lambda a, b: a < b * 1.15 and a > b * 0.85 + + for predicate in system_predicates: + + if predicate.startswith( 'system:hash=' ): + + try: + + hash = predicate[12:].decode( 'hex' ) + + self._hash = hash + + except: raise Exception( 'I could not parse the hash predicate.' ) + + + if predicate.startswith( 'system:age' ): + + try: + + condition = predicate[10] + + if condition not in ( '<', '>', u'\u2248' ): raise Exception() + + age = predicate[11:] + + years = 0 + months = 0 + days = 0 + + if 'y' in age: + + ( years, age ) = age.split( 'y' ) + + years = int( years ) + + + if 'm' in age: + + ( months, age ) = age.split( 'm' ) + + months = int( months ) + + + if 'd' in age: + + ( days, age ) = age.split( 'd' ) + + days = int( days ) + + + timestamp = int( time.time() ) - ( ( ( ( ( years * 12 ) + months ) * 30 ) + days ) * 86400 ) + + # this is backwards because we are talking about age, not timestamp + + if condition == '<': self._min_timestamp = timestamp + elif condition == '>': self._max_timestamp = timestamp + elif condition == u'\u2248': + self._min_timestamp = int( timestamp * 0.85 ) + self._max_timestamp = int( timestamp * 1.15 ) + + except: raise Exception( 'I could not parse the age predicate.' ) + + + if predicate.startswith( 'system:mime' ): + + try: + + mime = predicate[12:] + + if mime == 'image': self._mimes = HC.IMAGES + else: self._mimes = ( HC.mime_enum_lookup[ mime ], ) + + except: raise Exception( 'I could not parse the mime predicate.' ) + + + if predicate.startswith( 'system:duration' ): + + try: + + condition = predicate[15] + + if condition not in ( '>', '<', '=', u'\u2248' ): raise Exception() + + duration = int( predicate[16:] ) + + if duration >= 0: + + if condition == '<': self._max_duration = duration + elif condition == '>': self._min_duration = duration + elif condition == '=': self._duration = duration + elif condition == u'\u2248': + self._min_duration = int( duration * 0.85 ) + self._max_duration = int( duration * 1.15 ) + + + except: raise Exception( 'I could not parse the duration predicate.' ) + + + if predicate.startswith( 'system:rating' ): + + try: + + # system:rating:[service_name][operator][value] + + stuff_i_care_about = predicate[14:] + + operators = [ '<', u'\u2248', '=', '>' ] + + for operator in operators: + + if operator in stuff_i_care_about: + + ( service_name, value ) = stuff_i_care_about.split( operator ) + + self._ratings_predicates.append( ( service_name, operator, value ) ) + + break + + + + except: raise Exception( 'I could not parse the ratio predicate.' ) + + + if predicate.startswith( 'system:ratio' ): + + try: + + condition = predicate[12] + + if condition not in ( '=', u'\u2248' ): raise Exception() + + ratio = predicate[13:] + + ( width, height ) = ratio.split( ':', 1 ) + + width = float( width ) + height = float( height ) + + if width > 0 and height > 0: + + if condition == '=': self._predicates[ self.RATIO ].append( ( equals, width / height ) ) + elif condition == u'\u2248': self._predicates[ self.RATIO ].append( ( about_equals, width / height ) ) + + + except: raise Exception( 'I could not parse the ratio predicate.' ) + + + if predicate.startswith( 'system:size' ): + + try: + + condition = predicate[11] + + if condition not in ( '>', '<', '=', u'\u2248' ): raise Exception() + + size = int( predicate[12:-2] ) + + multiplier = predicate[-2] + + if multiplier == 'k': multiplier = 1000 + elif multiplier == 'K': multiplier = 1024 + elif multiplier == 'm': multiplier = 1000000 + elif multiplier == 'M': multiplier = 1048576 + elif multiplier == 'g': multiplier = 1000000000 + elif multiplier == 'G': multiplier = 1073741824 + else: + + multiplier = 1 + + size = int( predicate[12:-1] ) + + + size = size * multiplier + + bB = predicate[-1] + + if bB not in ( 'b', 'B' ): raise Exception() + + if bB == 'b': size = size / 8 + + if condition == '<': self._max_size = size + elif condition == '>': self._min_size = size + elif condition == '=': self._size = size + elif condition == u'\u2248': + self._min_size = int( size * 0.85 ) + self._max_size = int( size * 1.15 ) + + except: raise Exception( 'I could not parse the size predicate.' ) + + + if predicate.startswith( 'system:numtags' ): + + try: + + condition = predicate[14] + + if condition not in ( '>', '<', '=' ): raise Exception() + + num_tags = int( predicate[15:] ) + + if num_tags >= 0: + + if condition == '<': self._predicates[ self.NUM_TAGS ].append( ( lessthan, num_tags ) ) + elif condition == '>': + + self._predicates[ self.NUM_TAGS ].append( ( greaterthan, num_tags ) ) + + if num_tags == 0: self._num_tags_nonzero = True + + elif condition == '=': + + self._predicates[ self.NUM_TAGS ].append( ( equals, num_tags ) ) + + if num_tags == 0: self._num_tags_zero = True + + + + except: raise Exception( 'I could not parse the numtags predicate.' ) + + + if predicate.startswith( 'system:width' ): + + try: + + condition = predicate[12] + + if condition not in ( '>', '<', '=', u'\u2248' ): raise Exception() + + width = int( predicate[13:] ) + + if width >= 0: + + if condition == '<': self._max_width = width + elif condition == '>': self._min_width = width + elif condition == '=': self._width = width + elif condition == u'\u2248': + self._min_width = int( width * 0.85 ) + self._max_width = int( width * 1.15 ) + + + except: raise Exception( 'I could not parse the width predicate.' ) + + + if predicate.startswith( 'system:height' ): + + try: + + condition = predicate[13] + + if condition not in ( '>', '<', '=', u'\u2248' ): raise Exception() + + height = int( predicate[14:] ) + + if height >= 0: + + if condition == '<': self._max_height = height + elif condition == '>': self._min_height = height + elif condition == '=': self._height = height + elif condition == u'\u2248': + self._min_height = int( height * 0.85 ) + self._max_height = int( height * 1.15 ) + + + except: raise Exception( 'I could not parse the height predicate.' ) + + + if predicate.startswith( 'system:limit=' ): + + try: self._limit = int( predicate[13:] ) + except: raise Exception( 'I could not parse the limit predicate.' ) + + + if predicate.startswith( 'system:not_uploaded_to:' ): self._file_repositories_to_exclude.append( predicate[23:] ) + + if predicate.startswith( 'system:similar_to=' ): + + try: + + ( hash, max_hamming ) = predicate[18:].split( u'\u2248', 1 ) + + self._similar_to = ( hash.decode( 'hex' ), int( max_hamming ) ) + + except: raise Exception( 'I could not parse the similar to predicate.' ) + + + + + def CanPreFirstRoundLimit( self ): + + if self._limit is None: return False + + if len( self._predicates[ self.RATIO ] ) > 0: return False + + return self.CanPreSecondRoundLimit() + + + def CanPreSecondRoundLimit( self ): + + if self._limit is None: return False + + if len( self._predicates[ self.NUM_TAGS ] ) > 0: return False + + return True + + + def GetFileRepositoryNamesToExclude( self ): return self._file_repositories_to_exclude + + def GetInfo( self ): return ( self._hash, self._min_size, self._size, self._max_size, self._mimes, self._min_timestamp, self._max_timestamp, self._min_width, self._width, self._max_width, self._min_height, self._height, self._max_height, self._min_duration, self._duration, self._max_duration ) + + def GetLimit( self ): return self._limit + + def GetNumTagsInfo( self ): return ( self._num_tags_zero, self._num_tags_nonzero ) + + def GetRatingsPredicates( self ): return self._ratings_predicates + + def GetSimilarTo( self ): return self._similar_to + + def HasSimilarTo( self ): return self._similar_to is not None + + def MustBeArchive( self ): return self._archive + + def MustBeInbox( self ): return self._inbox + + def MustBeLocal( self ): return self._local + + def MustNotBeLocal( self ): return self._not_local + + def OkFirstRound( self, width, height ): + + if False in ( function( float( width ) / float( height ), arg ) for ( function, arg ) in self._predicates[ self.RATIO ] ): return False + + return True + + + def OkSecondRound( self, num_tags ): + + if False in ( function( num_tags, arg ) for ( function, arg ) in self._predicates[ self.NUM_TAGS ] ): return False + + return True + + +class GlobalBMPs(): + + @staticmethod + def STATICInitialise(): + + # these have to be created after the wxApp is instantiated, for silly GDI reasons + + GlobalBMPs.bold_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_bold.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.italic_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_italic.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.underline_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_underline.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + GlobalBMPs.align_left_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_align_left.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.align_center_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_align_center.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.align_right_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_align_right.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.align_justify_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_align_justify.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + GlobalBMPs.indent_less_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_indent_remove.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.indent_more_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'text_indent.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + GlobalBMPs.font_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'font.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.colour_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'color_swatch.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + GlobalBMPs.link_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'link.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.link_break_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'silk icons' + os.path.sep + 'link_break.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + GlobalBMPs.transparent_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'transparent.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.downloading_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'downloading.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.file_repository_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'file_repository_small.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.file_repository_pending_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'file_repository_pending_small.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.file_repository_petitioned_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'file_repository_petitioned_small.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.inbox_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'inbox.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.collection_bmp = wx.Image( HC.STATIC_DIR + os.path.sep + 'collection.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.dump_ok = wx.Image( HC.STATIC_DIR + os.path.sep + 'dump_ok.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.dump_recoverable = wx.Image( HC.STATIC_DIR + os.path.sep + 'dump_recoverable.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + GlobalBMPs.dump_fail = wx.Image( HC.STATIC_DIR + os.path.sep + 'dump_fail.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + +class Imageboard( HC.HydrusYAMLBase ): + + yaml_tag = u'!Imageboard' + + def __init__( self, name, post_url, flood_time, form_fields, restrictions ): + + self._name = name + self._post_url = post_url + self._flood_time = flood_time + self._form_fields = form_fields + self._restrictions = restrictions + + + def IsOkToPost( self, media_result ): + + ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags, file_service_identifiers, local_ratings, remote_ratings ) = media_result.GetInfo() + + if RESTRICTION_MIN_RESOLUTION in self._restrictions: + + ( min_width, min_height ) = self._restrictions[ RESTRICTION_MIN_RESOLUTION ] + + if width < min_width or height < min_height: return False + + + if RESTRICTION_MAX_RESOLUTION in self._restrictions: + + ( max_width, max_height ) = self._restrictions[ RESTRICTION_MAX_RESOLUTION ] + + if width > max_width or height > max_height: return False + + + if RESTRICTION_MAX_FILE_SIZE in self._restrictions and size > self._restrictions[ RESTRICTION_MAX_FILE_SIZE ]: return False + + if RESTRICTION_ALLOWED_MIMES in self._restrictions and mime not in self._restrictions[ RESTRICTION_ALLOWED_MIMES ]: return False + + return True + + + def GetBoardInfo( self ): return ( self._post_url, self._flood_time, self._form_fields, self._restrictions ) + + def GetName( self ): return self._name + +sqlite3.register_adapter( Imageboard, yaml.safe_dump ) + +DEFAULT_IMAGEBOARDS = [] + +fourchan_common_form_fields = [] + +fourchan_common_form_fields.append( ( 'resto', FIELD_THREAD_ID, 'thread_id', True ) ) +fourchan_common_form_fields.append( ( 'email', FIELD_TEXT, '', True ) ) +fourchan_common_form_fields.append( ( 'pwd', FIELD_PASSWORD, '', True ) ) +fourchan_common_form_fields.append( ( 'recaptcha_response_field', FIELD_VERIFICATION_RECAPTCHA, '6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc', True ) ) +fourchan_common_form_fields.append( ( 'com', FIELD_COMMENT, '', True ) ) +fourchan_common_form_fields.append( ( 'upfile', FIELD_FILE, '', True ) ) +fourchan_common_form_fields.append( ( 'mode', FIELD_TEXT, 'regist', False ) ) + +fourchan_typical_form_fields = list( fourchan_common_form_fields ) + +fourchan_typical_form_fields.insert( 1, ( 'name', FIELD_TEXT, '', True ) ) +fourchan_typical_form_fields.insert( 3, ( 'sub', FIELD_TEXT, '', True ) ) + +fourchan_anon_form_fields = list( fourchan_common_form_fields ) + +fourchan_anon_form_fields.insert( 1, ( 'name', FIELD_TEXT, '', False ) ) +fourchan_anon_form_fields.insert( 3, ( 'sub', FIELD_TEXT, '', False ) ) + +fourchan_spoiler_form_fields = list( fourchan_typical_form_fields ) + +fourchan_spoiler_form_fields.append( ( 'spoiler/on', FIELD_CHECKBOX, 'False', True ) ) + +GJP = [ HC.IMAGE_GIF, HC.IMAGE_PNG, HC.IMAGE_JPEG ] + +fourchan_typical_restrictions = { RESTRICTION_MAX_FILE_SIZE : 3145728, RESTRICTION_ALLOWED_MIMES : GJP } + +fourchan_imageboards = [] + +fourchan_imageboards.append( Imageboard( '/3/', 'https://sys.4chan.org/3/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/a/', 'https://sys.4chan.org/a/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/adv/', 'https://sys.4chan.org/adv/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/an/', 'https://sys.4chan.org/an/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/b/', 'https://sys.4chan.org/b/post', 75, fourchan_anon_form_fields, { RESTRICTION_MAX_FILE_SIZE : 2097152, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/c/', 'https://sys.4chan.org/c/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/cgl/', 'https://sys.4chan.org/cgl/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/ck/', 'https://sys.4chan.org/ck/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/cm/', 'https://sys.4chan.org/cm/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/co/', 'https://sys.4chan.org/co/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/d/', 'https://sys.4chan.org/d/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/diy/', 'https://sys.4chan.org/diy/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/e/', 'https://sys.4chan.org/e/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/fa/', 'https://sys.4chan.org/fa/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/fit/', 'https://sys.4chan.org/fit/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/g/', 'https://sys.4chan.org/g/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/gif/', 'https://sys.4chan.org/gif/post', 75, fourchan_typical_form_fields, { RESTRICTION_MAX_FILE_SIZE : 4194304, RESTRICTION_ALLOWED_MIMES : [ HC.IMAGE_GIF ] } ) ) +fourchan_imageboards.append( Imageboard( '/h/', 'https://sys.4chan.org/h/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/hc/', 'https://sys.4chan.org/hc/post', 75, fourchan_typical_form_fields, { RESTRICTION_MAX_FILE_SIZE : 8388608, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/hm/', 'https://sys.4chan.org/hm/post', 75, fourchan_typical_form_fields, { RESTRICTION_MAX_FILE_SIZE : 8388608, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/hr/', 'https://sys.4chan.org/hr/post', 75, fourchan_typical_form_fields, { RESTRICTION_MAX_FILE_SIZE : 8388608, RESTRICTION_ALLOWED_MIMES : GJP, RESTRICTION_MIN_RESOLUTION : ( 700, 700 ), RESTRICTION_MAX_RESOLUTION : ( 10000, 10000 ) } ) ) +fourchan_imageboards.append( Imageboard( '/int/', 'https://sys.4chan.org/int/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/jp/', 'https://sys.4chan.org/jp/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/k/', 'https://sys.4chan.org/k/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/lit/', 'https://sys.4chan.org/lit/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/m/', 'https://sys.4chan.org/m/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/mlp/', 'https://sys.4chan.org/mlp/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/mu/', 'https://sys.4chan.org/mu/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/n/', 'https://sys.4chan.org/n/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/o/', 'https://sys.4chan.org/o/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/p/', 'https://sys.4chan.org/p/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/po/', 'https://sys.4chan.org/po/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/pol/', 'https://sys.4chan.org/pol/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/r9k/', 'https://sys.4chan.org/r9k/post', 75, fourchan_spoiler_form_fields, { RESTRICTION_MAX_FILE_SIZE : 2097152, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/s/', 'https://sys.4chan.org/s/post', 75, fourchan_typical_form_fields, { RESTRICTION_MAX_FILE_SIZE : 8388608, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/sci/', 'https://sys.4chan.org/sci/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/soc/', 'https://sys.4chan.org/soc/post', 75, fourchan_anon_form_fields, { RESTRICTION_MAX_FILE_SIZE : 2097152, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/sp/', 'https://sys.4chan.org/sp/post', 75, fourchan_typical_form_fields, { RESTRICTION_MAX_FILE_SIZE : 4194304, RESTRICTION_ALLOWED_MIMES : GJP } ) ) +fourchan_imageboards.append( Imageboard( '/tg/', 'https://sys.4chan.org/tg/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/toy/', 'https://sys.4chan.org/toy/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/trv/', 'https://sys.4chan.org/trv/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/tv/', 'https://sys.4chan.org/tv/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/u/', 'https://sys.4chan.org/u/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/v/', 'https://sys.4chan.org/v/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/vg/', 'https://sys.4chan.org/vg/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/w/', 'https://sys.4chan.org/w/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/wg/', 'https://sys.4chan.org/wg/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/x/', 'https://sys.4chan.org/x/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/y/', 'https://sys.4chan.org/y/post', 75, fourchan_typical_form_fields, fourchan_typical_restrictions ) ) +fourchan_imageboards.append( Imageboard( '/vp/', 'https://sys.4chan.org/vp/post', 75, fourchan_spoiler_form_fields, fourchan_typical_restrictions ) ) + +DEFAULT_IMAGEBOARDS.append( ( '4chan', fourchan_imageboards ) ) + +class Job( threading.Thread ): + + def __init__( self, job_key, name ): + + threading.Thread.__init__( self, name = name ) + + self._job_key = job_key + + + def _NotifyAllDone( self ): pass + + def _NotifyPartDone( self, i ): pass + + def _NotifyStart( self ): pass + + def run( self ): + + pass # think about this more + + +class Log(): + + def __init__( self ): + + self._entries = [] + + HC.pubsub.sub( self, 'AddMessage', 'log_message' ) + HC.pubsub.sub( self, 'AddError', 'log_error' ) + + + def __iter__( self ): return self._entries.__iter__() + + def AddError( self, source, message ): self._entries.append( ( LOG_ERROR, source, message, time.time() ) ) + + def AddMessage( self, source, message ): self._entries.append( ( LOG_MESSAGE, source, message, time.time() ) ) + +class MediaResult(): + + def __init__( self, tuple ): + + # hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tag_service_identifiers_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings + + self._tuple = tuple + + + def DeletePending( self, service_identifier ): + + service_type = service_identifier.GetType() + + ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tag_service_identifiers_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings ) = self._tuple + + if service_type == HC.TAG_REPOSITORY: tag_service_identifiers_cdpp.DeletePending( service_identifier ) + elif service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ): file_service_identifiers_cdpp.DeletePending( service_identifier ) + + + def GetHash( self ): return self._tuple[0] + + def GetDuration( self ): return self._tuple[7] + + def GetInbox( self ): return self._tuple[1] + + def GetFileServiceIdentifiersCDPP( self ): return self._tuple[11] + + def GetMime( self ): return self._tuple[3] + + def GetNumFrames( self ): return self._tuple[8] + + def GetNumWords( self ): return self._tuple[9] + + def GetRatings( self ): return ( self._tuple[12], self._tuple[13] ) + + def GetResolution( self ): return ( self._tuple[5], self._tuple[6] ) + + def GetSize( self ): return self._tuple[2] + + def GetTags( self ): return self._tuple[10] + + def GetTimestamp( self ): return self._tuple[4] + + def GetInfo( self ): return self._tuple + + def ProcessContentUpdate( self, content_update ): + + ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tag_service_identifiers_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings ) = self._tuple + + service_identifier = content_update.GetServiceIdentifier() + + service_type = service_identifier.GetType() + + if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): tag_service_identifiers_cdpp.ProcessContentUpdate( content_update ) + elif service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ): + + if service_type == HC.LOCAL_FILE: + + action = content_update.GetAction() + + if action == CONTENT_UPDATE_ADD and not file_service_identifiers_cdpp.HasLocal(): inbox = True + elif action == CONTENT_UPDATE_ARCHIVE: inbox = False + elif action == CONTENT_UPDATE_DELETE: inbox = False + + self._tuple = ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tag_service_identifiers_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings ) + + + file_service_identifiers_cdpp.ProcessContentUpdate( content_update ) + + elif service_type in HC.RATINGS_SERVICES: + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): local_ratings.ProcessContentUpdate( content_update ) + else: remote_ratings.ProcessContentUpdate( content_update ) + + + + def ResetService( self, service_identifier ): + + service_type = service_identifier.GetType() + + ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tag_service_identifiers_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings ) = self._tuple + + if service_type == HC.TAG_REPOSITORY: tag_service_identifiers_cdpp.ResetService( service_identifier ) + elif service_type == HC.FILE_REPOSITORY: file_service_identifiers_cdpp.ResetService( service_identifier ) + + +class MenuEventIdToActionCache(): + + def __init__( self ): + + self._ids_to_actions = {} + self._actions_to_ids = {} + + + def GetAction( self, id ): + + if id in self._ids_to_actions: return self._ids_to_actions[ id ] + else: return None + + + def GetId( self, command, data = None ): + + action = ( command, data ) + + if action not in self._actions_to_ids: + + id = wx.NewId() + + self._ids_to_actions[ id ] = action + self._actions_to_ids[ action ] = id + + + return self._actions_to_ids[ action ] + + +MENU_EVENT_ID_TO_ACTION_CACHE = MenuEventIdToActionCache() + +class RenderedImageCache(): + + def __init__( self, db, options, type ): + + self._options = options + self._type = type + + if self._type == 'fullscreen': self._data_cache = DataCache( options, 'fullscreen_cache_size' ) + elif self._type == 'preview': self._data_cache = DataCache( options, 'preview_cache_size' ) + + self._total_estimated_memory_footprint = 0 + + self._keys_being_rendered = {} + + HC.pubsub.sub( self, 'FinishedRendering', 'finished_rendering' ) + + + def Clear( self ): self._data_cache.Clear() + + def GetImage( self, hash, resolution ): + + try: + + key = ( hash, resolution ) + + if self._data_cache.HasData( key ): return self._data_cache.GetData( key ) + elif key in self._keys_being_rendered: return self._keys_being_rendered[ key ] + else: + + file = wx.GetApp().Read( 'file', hash ) + + image_container = HydrusImageHandling.RenderImageFromFile( file, hash, target_resolution = resolution, synchronous = False ) + + self._keys_being_rendered[ key ] = image_container + + return image_container + + + except: + + print( traceback.format_exc() ) + + raise + + + + def HasImage( self, hash, resolution ): + + key = ( hash, resolution ) + + return self._data_cache.HasData( key ) or key in self._keys_being_rendered + + + def FinishedRendering( self, key ): + + if key in self._keys_being_rendered: + + image_container = self._keys_being_rendered[ key ] + + del self._keys_being_rendered[ key ] + + self._data_cache.AddData( key, image_container ) + + + +class Service( HC.HydrusYAMLBase ): + + yaml_tag = u'!Service' + + def __init__( self, service_identifier ): + + HC.HydrusYAMLBase.__init__( self ) + + self._service_identifier = service_identifier + + + def GetExtraInfo( self ): return None + + def GetServiceIdentifier( self ): return self._service_identifier + +class ServiceLocalRatingLike( Service ): + + yaml_tag = u'!ServiceLocalRatingLike' + + def __init__( self, service_identifier, like, dislike ): + + Service.__init__( self, service_identifier ) + + self._like = like + self._dislike = dislike + + + def GetExtraInfo( self ): return ( self._like, self._dislike ) + +class ServiceLocalRatingNumerical( Service ): + + yaml_tag = u'!ServiceLocalRatingNumerical' + + def __init__( self, service_identifier, lower, upper ): + + Service.__init__( self, service_identifier ) + + self._lower = lower + self._upper = upper + + + def GetExtraInfo( self ): return ( self._lower, self._upper ) + +class ServiceRemote( Service ): + + yaml_tag = u'!ServiceRemote' + + def __init__( self, service_identifier, credentials, last_error ): + + Service.__init__( self, service_identifier ) + + self._credentials = credentials + self._last_error = last_error + + HC.pubsub.sub( self, 'ProcessServiceUpdate', 'service_update_data' ) + + + def GetConnection( self ): return ConnectionToService( self._service_identifier, self._credentials ) + + def GetCredentials( self ): return self._credentials + + def GetRecentErrorPending( self ): return HC.ConvertTimestampToPrettyPending( self._last_error + 600 ) + + def HasRecentError( self ): return self._last_error + 600 > int( time.time() ) + + def SetCredentials( self, credentials ): self._credentials = credentials + + def ProcessServiceUpdate( self, update ): + + if update.GetServiceIdentifier() == self._service_identifier: + + action = update.GetAction() + + if action == SERVICE_UPDATE_ERROR: self._last_error = int( time.time() ) + elif action == SERVICE_UPDATE_RESET: + + self._service_identifier = update.GetInfo() + + self._last_error = 0 + + + + +class ServiceRemoteRestricted( ServiceRemote ): + + yaml_tag = u'!ServiceRemoteRestricted' + + def __init__( self, service_identifier, credentials, last_error, account ): + + ServiceRemote.__init__( self, service_identifier, credentials, last_error ) + + self._account = account + + + def CanDownload( self ): return self._account.HasPermission( HC.GET_DATA ) and not self.HasRecentError() + + def CanUpload( self ): return self._account.HasPermission( HC.POST_DATA ) and not self.HasRecentError() + + def GetAccount( self ): return self._account + + def GetRecentErrorPending( self ): + + if self._account.HasPermission( HC.GENERAL_ADMIN ): return HC.ConvertTimestampToPrettyPending( self._last_error + 600 ) + else: return HC.ConvertTimestampToPrettyPending( self._last_error + 3600 * 4 ) + + + def HasRecentError( self ): + + if self._account.HasPermission( HC.GENERAL_ADMIN ): return self._last_error + 600 > int( time.time() ) + else: return self._last_error + 3600 * 4 > int( time.time() ) + + + def IsInitialised( self ): + + service_type = self._service_identifier.GetType() + + if service_type == HC.SERVER_ADMIN: return self._credentials.HasAccessKey() + else: return True + + + def ProcessServiceUpdate( self, update ): + + ServiceRemote.ProcessServiceUpdate( self, update ) + + if update.GetServiceIdentifier() == self.GetServiceIdentifier(): + + action = update.GetAction() + + if action == SERVICE_UPDATE_ACCOUNT: + + account = update.GetInfo() + + self._account = account + self._last_error = 0 + + elif action == SERVICE_UPDATE_REQUEST_MADE: + + num_bytes = update.GetInfo() + + self._account.RequestMade( num_bytes ) + + + + +class ServiceRemoteRestrictedRepository( ServiceRemoteRestricted ): + + yaml_tag = u'!ServiceRemoteRestrictedRepository' + + def __init__( self, service_identifier, credentials, last_error, account, first_begin, next_begin ): + + ServiceRemoteRestricted.__init__( self, service_identifier, credentials, last_error, account ) + + self._first_begin = first_begin + self._next_begin = next_begin + + + def CanUpdate( self ): return self._account.HasPermission( HC.GET_DATA ) and not self.HasRecentError() and self.HasUpdateDue() + + def GetFirstBegin( self ): return self._first_begin + + def GetNextBegin( self ): return self._next_begin + + def GetUpdateStatus( self ): + + if not self._account.HasPermission( HC.GET_DATA ): return 'updates on hold' + else: + + if self.CanUpdate(): return HC.ConvertTimestampToPrettySync( self._next_begin ) + else: + + if self.HasRecentError(): return 'due to a previous error, update is delayed - next check ' + self.GetRecentErrorPending() + else: return 'fully synchronised - next update ' + HC.ConvertTimestampToPrettyPending( self._next_begin + HC.UPDATE_DURATION + 1800 ) + + + + + def HasUpdateDue( self ): return self._next_begin + HC.UPDATE_DURATION + 1800 < int( time.time() ) + + def SetNextBegin( self, next_begin ): + + if next_begin > self._next_begin: + + if self._first_begin == 0: self._first_begin = next_begin + + self._next_begin = next_begin + + + + def ProcessServiceUpdate( self, update ): + + ServiceRemoteRestricted.ProcessServiceUpdate( self, update ) + + if update.GetServiceIdentifier() == self.GetServiceIdentifier(): + + action = update.GetAction() + + if action == SERVICE_UPDATE_NEXT_BEGIN: + + next_begin = update.GetInfo() + + self.SetNextBegin( next_begin ) + + elif action == SERVICE_UPDATE_RESET: + + self._service_identifier = update.GetInfo() + + self._first_begin = 0 + self._next_begin = 0 + + + + +class ServiceRemoteRestrictedRepositoryRatingLike( ServiceRemoteRestrictedRepository ): + + yaml_tag = u'!ServiceRemoteRestrictedRepositoryRatingLike' + + def __init__( self, service_identifier, credentials, last_error, account, first_begin, next_begin, like, dislike ): + + ServiceRemoteRestricted.__init__( self, service_identifier, credentials, last_error, account, first_begin, next_begin ) + + self._like = like + self._dislike = dislike + + + def GetExtraInfo( self ): return ( self._like, self._dislike ) + +class ServiceRemoteRestrictedRepositoryRatingNumerical( ServiceRemoteRestrictedRepository ): + + yaml_tag = u'!ServiceRemoteRestrictedRepositoryRatingNumerical' + + def __init__( self, service_identifier, credentials, last_error, account, first_begin, next_begin, lower, upper ): + + ServiceRemoteRestricted.__init__( self, service_identifier, credentials, last_error, account, first_begin, next_begin ) + + self._lower = lower + self._upper = upper + + + def GetExtraInfo( self ): return ( self._lower, self._upper ) + +class ServiceRemoteRestrictedDepot( ServiceRemoteRestricted ): + + yaml_tag = u'!ServiceRemoteRestrictedDepot' + + def __init__( self, service_identifier, credentials, last_error, account, last_check, check_period ): + + ServiceRemoteRestricted.__init__( self, service_identifier, credentials, last_error, account ) + + self._last_check = last_check + self._check_period = check_period + + + def CanCheck( self ): return self._account.HasPermission( HC.GET_DATA ) and not self.HasRecentError() and self.HasCheckDue() + + def GetExtraInfo( self ): return self._check_period + + def GetLastCheck( self ): return self._last_check + + def GetCheckStatus( self ): + + if not self._account.HasPermission( HC.GET_DATA ): return 'checks on hold' + else: + + if self.CanCheck(): return HC.ConvertTimestampToPrettySync( self._last_check + self._check_period ) + else: + + if self.HasRecentError(): return 'due to a previous error, check is delayed - next attempt ' + self.GetRecentErrorPending() + else: return 'next check ' + HC.ConvertTimestampToPrettyPending( self._last_check + self._check_period ) + + + + + def HasCheckDue( self ): return self._last_check + self._check_period + 5 < int( time.time() ) + + def ProcessServiceUpdate( self, update ): + + ServiceRemoteRestricted.ProcessServiceUpdate( self, update ) + + if update.GetServiceIdentifier() == self.GetServiceIdentifier(): + + action = update.GetAction() + + if action == SERVICE_UPDATE_LAST_CHECK: + + last_check = update.GetInfo() + + self._last_check = last_check + + elif action == SERVICE_UPDATE_RESET: + + self._service_identifier = update.GetInfo() + + self._last_check = 0 + + + + +class ServiceRemoteRestrictedDepotMessage( ServiceRemoteRestrictedDepot ): + + yaml_tag = u'!ServiceRemoteRestrictedDepotMessage' + + def __init__( self, service_identifier, credentials, last_error, account, last_check, check_period, contact, private_key, receive_anon ): + + ServiceRemoteRestrictedDepot.__init__( self, service_identifier, credentials, last_error, account, last_check, check_period ) + + self._contact = contact + self._private_key = private_key + self._receive_anon = receive_anon + + + def GetContact( self ): return self._contact + + def GetExtraInfo( self ): return ( self._contact.GetName(), self._check_period, self._private_key, self._receive_anon ) + + def GetPrivateKey( self ): return self._private_key + + def ReceivesAnon( self ): return self._receive_anon + + def Decrypt( self, encrypted_message ): return HydrusMessageHandling.UnpackageDeliveredMessage( encrypted_message, self._private_key ) + +class ServiceUpdate(): + + def __init__( self, action, service_identifier, info = None ): + + self._action = action # make this an enumerated thing, yo + self._service_identifier = service_identifier + self._info = info + + + def GetAction( self ): return self._action + + def GetInfo( self ): return self._info + + def GetServiceIdentifier( self ): return self._service_identifier + +class ThumbnailCache(): + + def __init__( self, db, options ): + + self._db = db + + self._options = options + + self._data_cache = DataCache( options, 'thumbnail_cache_size' ) + + with open( HC.STATIC_DIR + os.path.sep + 'hydrus.png', 'rb' ) as f: self._not_found_file = f.read() + with open( HC.STATIC_DIR + os.path.sep + 'flash.png', 'rb' ) as f: self._flash_file = f.read() + with open( HC.STATIC_DIR + os.path.sep + 'flv.png', 'rb' ) as f: self._flv_file = f.read() + + self._not_found = HydrusImageHandling.GenerateHydrusBitmapFromFile( HydrusImageHandling.GenerateThumbnailFileFromFile( self._not_found_file, self._options[ 'thumbnail_dimensions' ] ) ) + self._flash = HydrusImageHandling.GenerateHydrusBitmapFromFile( HydrusImageHandling.GenerateThumbnailFileFromFile( self._flash_file, self._options[ 'thumbnail_dimensions' ] ) ) + self._flv = HydrusImageHandling.GenerateHydrusBitmapFromFile( HydrusImageHandling.GenerateThumbnailFileFromFile( self._flv_file, self._options[ 'thumbnail_dimensions' ] ) ) + + HC.pubsub.sub( self, 'Clear', 'thumbnail_resize' ) + + + def Clear( self ): + + self._data_cache.Clear() + + self._not_found = HydrusImageHandling.GenerateHydrusBitmapFromFile( HydrusImageHandling.GenerateThumbnailFileFromFile( self._not_found_file, self._options[ 'thumbnail_dimensions' ] ) ) + self._flash = HydrusImageHandling.GenerateHydrusBitmapFromFile( HydrusImageHandling.GenerateThumbnailFileFromFile( self._flash_file, self._options[ 'thumbnail_dimensions' ] ) ) + self._flv = HydrusImageHandling.GenerateHydrusBitmapFromFile( HydrusImageHandling.GenerateThumbnailFileFromFile( self._flv_file, self._options[ 'thumbnail_dimensions' ] ) ) + + + def GetFlashThumbnail( self ): return self._flash + def GetFLVThumbnail( self ): return self._flv + def GetNotFoundThumbnail( self ): return self._not_found + + def GetThumbnail( self, service_identifier, hash ): + + service_identifier_and_hash = ( service_identifier, hash ) + + if not self._data_cache.HasData( service_identifier_and_hash ): + + try: hydrus_bitmap = HydrusImageHandling.GenerateHydrusBitmapFromFile( wx.GetApp().Read( 'thumbnail', hash ) ) + except: + print( traceback.format_exc() ) + return self._not_found + + self._data_cache.AddData( service_identifier_and_hash, hydrus_bitmap ) + + + return self._data_cache.GetData( service_identifier_and_hash ) + + + def PrefetchThumbnails( self, hashes ): self._db.PrefetchThumbnails( hashes ) + +class VPTreeNode(): + + def __init__( self, phashes ): + + ghd = HydrusImageHandling.GetHammingDistance + + if len( phashes ) == 1: + + ( self._phash, ) = phashes + self._radius = 0 + + inner_phashes = [] + outer_phashes = [] + + else: + + # we want to choose a good node. + # a good node is one that doesn't overlap with other circles much + + # get a random sample with big lists, to keep cpu costs down + if len( phashes ) > 50: phashes_sample = random.sample( phashes, 50 ) + else: phashes_sample = phashes + + all_nodes_comparisons = { phash1 : [ ( ghd( phash1, phash2 ), phash2 ) for phash2 in phashes_sample if phash2 != phash1 ] for phash1 in phashes_sample } + + for comparisons in all_nodes_comparisons.values(): comparisons.sort() + + # the median of the sorted hamming distances makes a decent radius + + all_nodes_radii = [ ( comparisons[ len( comparisons ) / 2 ], phash ) for ( phash, comparisons ) in all_nodes_comparisons.items() ] + + all_nodes_radii.sort() + + # let's make our node the phash with the smallest predicted radius + + ( ( predicted_radius, whatever ), self._phash ) = all_nodes_radii[ 0 ] + + if len( phashes ) > 50: + + my_hammings = [ ( ghd( self._phash, phash ), phash ) for phash in phashes if phash != self._phash ] + + my_hammings.sort() + + else: my_hammings = all_nodes_comparisons[ self._phash ] + + median_index = len( my_hammings ) / 2 + + ( self._radius, whatever ) = my_hammings[ median_index ] + + # lets bump our index up until we actually get outside the radius + while median_index + 1 < len( my_hammings ) and my_hammings[ median_index + 1 ][0] == self._radius: median_index += 1 + + # now separate my phashes into inside and outside that radius + + inner_phashes = [ phash for ( hamming, phash ) in my_hammings[ : median_index + 1 ] ] + outer_phashes = [ phash for ( hamming, phash ) in my_hammings[ median_index + 1 : ] ] + + + if len( inner_phashes ) == 0: self._inner_node = VPTreeNodeEmpty() + else: self._inner_node = VPTreeNode( inner_phashes ) + + if len( outer_phashes ) == 0: self._outer_node = VPTreeNodeEmpty() + else: self._outer_node = VPTreeNode( outer_phashes ) + + + def __len__( self ): return len( self._inner_node ) + len( self._outer_node ) + 1 + + def GetMatches( self, phash, max_hamming ): + + hamming_distance_to_me = HydrusImageHandling.GetHammingDistance( self._phash, phash ) + + matches = [] + + if hamming_distance_to_me <= max_hamming: matches.append( self._phash ) + + if hamming_distance_to_me <= ( self._radius + max_hamming ): matches.extend( self._inner_node.GetMatches( phash, max_hamming ) ) # i.e. result could be in inner + if hamming_distance_to_me >= ( self._radius - max_hamming ): matches.extend( self._outer_node.GetMatches( phash, max_hamming ) ) # i.e. result could be in outer + + return matches + + +class VPTreeNodeEmpty(): + + def __init__( self ): pass + + def __len__( self ): return 0 + + def GetMatches( self, phash, max_hamming ): return [] + \ No newline at end of file diff --git a/include/ClientConstantsMessages.py b/include/ClientConstantsMessages.py new file mode 100755 index 00000000..21c1d812 --- /dev/null +++ b/include/ClientConstantsMessages.py @@ -0,0 +1,505 @@ +import collections +import dircache +import hashlib +import httplib +import ClientConstants as CC +import HydrusConstants as HC +import HydrusImageHandling +import HydrusMessageHandling +import multipart +import os +import random +import sqlite3 +import threading +import time +import threading +import traceback +import urlparse +import yaml +import wx +import zlib + +class Conversation(): + + def __init__( self, identity, conversation_key, subject, messages, drafts, search_context ): + + self._identity = identity + self._conversation_key = conversation_key + self._subject = subject + self._messages = messages + self._drafts = drafts + self._search_context = search_context + + HC.pubsub.sub( self, 'DeleteDraft', 'delete_draft_data' ) + HC.pubsub.sub( self, 'DeleteMessage', 'delete_message' ) + HC.pubsub.sub( self, 'DraftSaved', 'draft_saved' ) + HC.pubsub.sub( self, 'ArchiveConversation', 'archive_conversation_data' ) + HC.pubsub.sub( self, 'InboxConversation', 'inbox_conversation_data' ) + HC.pubsub.sub( self, 'UpdateMessageStatuses', 'message_statuses_data' ) + + + def AddDraft( self, draft ): self._drafts.append( draft ) + + def AddMessage( self, message ): self._messages.append( message ) + + def ArchiveConversation( self, conversation_key ): + + if conversation_key == self._conversation_key: + + self._inbox = False + + for message in self._messages: message.Archive() + + + + def DeleteDraft( self, draft_key ): + + self._drafts = [ draft for draft in self._drafts if draft.GetDraftKey() != draft_key ] + + if len( self._messages ) + len( self._drafts ) == 0: + + HC.pubsub.pub( 'delete_conversation_data', self._conversation_key ) + HC.pubsub.pub( 'delete_conversation_gui', self._conversation_key ) + + + + def DraftSaved( self, draft_key, draft_message ): + + for ( index, draft ) in enumerate( self._drafts ): + + if draft.GetDraftKey() == draft_key: + + self._drafts[ index ] = draft_message + + return + + + + + def DeleteMessage( self, message_key ): + + self._messages = [ message for message in self._messages if message.GetMessageKey() != message_key ] + + if len( self._messages ) + len( self._drafts ) == 0: + + HC.pubsub.pub( 'delete_conversation_data', self._conversation_key ) + HC.pubsub.pub( 'delete_conversation_gui', self._conversation_key ) + + + + def GetConversationKey( self ): return self._conversation_key + + def GetListCtrlTuple( self ): + + if len( self._messages ) > 0: + + first_message = self._messages[0] + last_message = self._messages[-1] + + first_timestamp = first_message.GetTimestamp() + last_timestamp = last_message.GetTimestamp() + + from_name = first_message.GetContactFrom().GetName() + + else: + + first_timestamp = None + last_timestamp = None + + from_name = self._drafts[0].GetContactFrom().GetName() + + + participants = self.GetParticipants() + + num_messages_unread = len( [ message for message in self._messages if ( self._identity, 'sent' ) in message.GetDestinations() ] ) + + inbox = True in ( message.IsInbox() for message in self._messages ) + + return ( self._conversation_key, inbox, self._subject, from_name, participants, len( self._messages ), num_messages_unread, first_timestamp, last_timestamp ) + + + def GetMessages( self ): return ( self._messages, self._drafts ) + + def GetMessageKeysWithDestination( self, destination ): return [ message.GetMessageKey() for message in self._messages if message.HasDestination( destination ) ] + + def GetParticipants( self ): + + if len( self._messages ) == 0: return [] + else: + + first_message = self._messages[ 0 ] + + return first_message.GetParticipants() + + + + def GetStartedBy( self ): + + if len( self._messages ) > 0: return self._messages[ 0 ].GetContactFrom() + elif len( self._drafts ) > 0: return self._drafts[ 0 ].GetContactFrom() + else: return None + + + def GetSubject( self ): return self._subject + + def GetUpdated( self ): + + if len( self._messages ) > 0: + + last_message = self._messages[-1] + last_timestamp = last_message.GetTimestamp() + + else: last_timestamp = None + + return last_timestamp + + + def HasMessageKey( self, message_key ): return True in ( message_key == message.GetMessageKey() for message in self._messages ) + + def HasRead( self ): return True in ( message.IsRead( self._identity ) for message in self._messages ) + + def HasUnread( self ): return True in ( message.IsUnread( self._identity ) for message in self._messages ) + + def InboxConversation( self, conversation_key ): + + if conversation_key == self._conversation_key: + + self._inbox = True + + for message in self._messages: message.Inbox() + + + + def IsInbox( self ): return True in ( message.IsInbox() for message in self._messages ) + + def UpdateMessageStatuses( self, message_key, status_updates ): + + for message in self._messages: + + if message_key == message.GetMessageKey(): + + message.UpdateMessageStatuses( status_updates ) + + break + + + + +class Contact( HC.HydrusYAMLBase ): + + yaml_tag = u'!Contact' + + def __init__( self, public_key, name, host, port ): + + HC.HydrusYAMLBase.__init__( self ) + + self._public_key = public_key + self._name = name + self._host = host + self._port = port + + + def __hash__( self ): return self._name.__hash__() + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __ne__( self, other ): return not self.__eq__( other ) + + def Encrypt( self, message ): return HydrusMessageHandling.PackageMessageForDelivery( message, self._public_key ) + + def GetAddress( self ): return ( self._host, self._port ) + + def GetConnection( self ): return CC.ConnectionToService( None, CC.Credentials( self._host, self._port ) ) + + def GetContactKey( self ): + + if self._public_key is None: return None + else: return hashlib.sha256( self._public_key ).digest() + + + def GetInfo( self ): return ( self._public_key, self._name, self._host, self._port ) + + def GetName( self ): return self._name + + def GetPublicKey( self ): return self._public_key + + def HasPublicKey( self ): return self._public_key is not None + +class DraftMessage(): + + def __init__( self, draft_key, conversation_key, subject, contact_from, contacts_names_to, recipients_visible, body, attachment_hashes, is_new = False ): + + self._draft_key = draft_key + self._conversation_key = conversation_key + self._subject = subject + self._contact_from = contact_from + self._contacts_names_to = contacts_names_to + self._recipients_visible = recipients_visible + self._body = body + self._attachment_hashes = attachment_hashes + self._is_new = is_new + + + def __hash__( self ): return self._draft_key.__hash__() + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __ne__( self, other ): return not self.__eq__( other ) + + def GetContactFrom( self ): return self._contact_from + + def GetDraftKey( self ): return self._draft_key + + def GetInfo( self ): return ( self._draft_key, self._conversation_key, self._subject, self._contact_from, self._contacts_names_to, self._recipients_visible, self._body, self._attachment_hashes ) + + def IsNew( self ): return self._is_new + + def IsReply( self ): return self._conversation_key != self._draft_key + + def Saved( self ): self._is_new = False + +class Message(): + + def __init__( self, message_key, contact_from, destinations, timestamp, body, attachment_hashes, inbox ): + + self._message_key = message_key + self._contact_from = contact_from + self._destinations = destinations + self._timestamp = timestamp + self._body = body + self._attachment_hashes = attachment_hashes + self._inbox = inbox + + + def __hash__( self ): return self._message_key.__hash__() + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __ne__( self, other ): return not self.__eq__( other ) + + def Archive( self ): self._inbox = False + + def GetBody( self ): return self._body + def GetContactFrom( self ): return self._contact_from + def GetContactsTo( self ): return [ contact_to for ( contact_to, status ) in self._destinations ] + def GetDestinations( self ): return self._destinations + def GetMessageKey( self ): return self._message_key + def GetParticipants( self ): return [ self._contact_from ] + self.GetContactsTo() + def GetTimestamp( self ): return self._timestamp + + def HasDestination( self, destination ): return destination in self._destinations + + def Inbox( self ): self._inbox = True + + def IsInbox( self ): return self._inbox + def IsRead( self, identity ): return ( identity, 'read' ) in self._destinations + def IsUnread( self, identity ): return ( identity, 'sent' ) in self._destinations + + def Read( self, identity ): self.UpdateMessageStatuses( [ ( identity.GetContactKey(), 'read' ) ] ) + + def UpdateMessageStatuses( self, updates ): + + contact_keys_to_contacts = { contact.GetContactKey() : contact for ( contact, status ) in self._destinations } + + dest_dict = dict( self._destinations ) + + for ( contact_key, status ) in updates: + + if contact_key in contact_keys_to_contacts: + + dest_dict[ contact_keys_to_contacts[ contact_key ] ] = status + + + + self._destinations = dest_dict.items() + + + def Unread( self, identity ): self.UpdateMessageStatuses( [ ( identity.GetContactKey(), 'sent' ) ] ) + +class MessageSearchContext(): + + def __init__( self, identity, raw_predicates = [] ): + + self._identity = identity + + raw_system_predicates = [ predicate for predicate in raw_predicates if predicate.startswith( 'system:' ) ] + + self._system_predicates = MessageSystemPredicates( raw_system_predicates ) + + raw_search_terms = [ predicate for predicate in raw_predicates if not predicate.startswith( 'system:' ) ] + + self._search_terms_to_include = [ search_term for search_term in raw_search_terms if not search_term.startswith( '-' ) ] + self._search_terms_to_exclude = [ search_term[1:] for search_term in raw_search_terms if search_term.startswith( '-' ) ] + + + def GetIdentity( self ): return self._identity + def GetSystemPredicates( self ): return self._system_predicates + def GetTermsToExclude( self ): return self._search_terms_to_exclude + def GetTermsToInclude( self ): return self._search_terms_to_include + +class MessageSystemPredicates(): + + STATUS = 0 + CONTACT_STARTED = 1 + CONTACT_FROM = 2 + CONTACT_TO = 3 + TIMESTAMP = 4 + NUM_ATTACHMENTS = 5 + + def __init__( self, system_predicates ): + + self._predicates = {} + + self._predicates[ self.NUM_ATTACHMENTS ] = [] + + self._status = None + + self._contact_from = None + self._contact_to = None + self._contact_started = None + self._min_timestamp = None + self._max_timestamp = None + + self._inbox = 'system:inbox' in system_predicates + + self._archive = 'system:archive' in system_predicates + + self._draft = 'system:draft' in system_predicates + + isin = lambda a, b: a in b + startswith = lambda a, b: a.startswith( b ) + lessthan = lambda a, b: a < b + greaterthan = lambda a, b: a > b + equals = lambda a, b: a == b + about_equals = lambda a, b: a < b * 1.15 and a > b * 0.85 + + for predicate in system_predicates: + + if predicate.startswith( 'system:status' ): + + try: + + status = predicate[14:] + + self._status = status + + except: raise Exception( 'I could not parse the status predicate.' ) + + + if predicate.startswith( 'system:started_by' ): + + try: + + started_by = predicate[18:] + + self._contact_started = started_by + + except: raise Exception( 'I could not parse the started by predicate.' ) + + + if predicate.startswith( 'system:from' ): + + try: + + contact_from = predicate[12:] + + self._contact_from = contact_from + + except: raise Exception( 'I could not parse the contact from predicate.' ) + + + if predicate.startswith( 'system:to' ): + + try: + + contact_to = predicate[10:] + + self._contact_to = contact_to + + except: raise Exception( 'I could not parse the contact to predicate.' ) + + + if predicate.startswith( 'system:age' ): + + try: + + condition = predicate[10] + + if condition not in ( '<', '>', u'\u2248' ): raise Exception() + + age = predicate[11:] + + years = 0 + months = 0 + days = 0 + + if 'y' in age: + + ( years, age ) = age.split( 'y' ) + + years = int( years ) + + + if 'm' in age: + + ( months, age ) = age.split( 'm' ) + + months = int( months ) + + + if 'd' in age: + + ( days, age ) = age.split( 'd' ) + + days = int( days ) + + + timestamp = int( time.time() ) - ( ( ( ( ( years * 12 ) + months ) * 30 ) + days ) * 86400 ) + + # this is backwards because we are talking about age, not timestamp + + if condition == '<': self._max_timestamp = timestamp + elif condition == '>': self._min_timestamp = timestamp + elif condition == u'\u2248': + self._min_timestamp = int( timestamp * 0.85 ) + self._max_timestamp = int( timestamp * 1.15 ) + + except: raise Exception( 'I could not parse the age predicate.' ) + + + if predicate.startswith( 'system:numattachments' ): + + try: + + condition = predicate[21] + + if condition not in ( '>', '<', '=', u'\u2248' ): raise Exception() + + num_attachments = int( predicate[22:] ) + + if num_attachments >= 0: + + if condition == '<': self._predicates[ self.NUM_ATTACHMENTS ].append( ( lessthan, num_attachments ) ) + elif condition == '>': self._predicates[ self.NUM_ATTACHMENTS ].append( ( greaterthan, num_attachments ) ) + elif condition == '=': self._predicates[ self.NUM_ATTACHMENTS ].append( ( equals, num_attachments ) ) + elif condition == u'\u2248': self._predicates[ self.NUM_ATTACHMENTS ].append( ( about_equals, num_attachments ) ) + + + except: raise Exception( 'I could not parse the num attachments predicate.' ) + + + + + def GetInfo( self ): return ( self._inbox, self._archive, self._draft, self._status, self._contact_from, self._contact_to, self._contact_started, self._min_timestamp, self._max_timestamp ) + + # maybe reconfigure this! + # instead of Ok, I could do some real good searching and ANDing with the above predicates + # especially since this is for getting the message_ids, not the whole convo, which will have the rich data + + def Ok( self, num_attachments ): + + if False in ( function( num_attachments, arg ) for ( function, arg ) in self._predicates[ self.NUM_ATTACHMENTS ] ): return False + + return True + + \ No newline at end of file diff --git a/include/ClientController.py b/include/ClientController.py new file mode 100755 index 00000000..77ab6bc9 --- /dev/null +++ b/include/ClientController.py @@ -0,0 +1,227 @@ +import gc +import HydrusConstants as HC +import HydrusImageHandling +import ClientConstants as CC +import ClientDB +import ClientGUI +import os +import threading +import time +import traceback +import wx +import wx.richtext + +ID_ANIMATED_EVENT_TIMER = wx.NewId() +ID_MAINTENANCE_EVENT_TIMER = wx.NewId() + +class Controller( wx.App ): + + def ClearCaches( self ): + + self._thumbnail_cache.Clear() + self._fullscreen_image_cache.Clear() + self._preview_image_cache.Clear() + + + def Clipboard( self, type, data ): + + # need this cause can't do it in a non-gui thread + + if type == 'paths': + + paths = data + + if wx.TheClipboard.Open(): + + data = wx.FileDataObject() + + for path in paths: data.AddFile( path ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: raise Exception( 'Could not get permission to access the clipboard!' ) + + + + def EventAnimatedTimer( self, event ): + + del gc.garbage[:] + + HC.pubsub.pub( 'animated_tick' ) + + + def EventMaintenanceTimer( self, event ): + + if int( time.time() ) - self._last_idle_time > 20 * 60: # 20 mins since last user-initiated db request + + self.MaintainDB() + + + + def EventPubSub( self, event ): + + pubsubs_queue = HC.pubsub.GetQueue() + + ( callable, args, kwargs ) = pubsubs_queue.get() + + try: callable( *args, **kwargs ) + except wx._core.PyDeadObjectError: pass + except TypeError: pass + except Exception as e: + + print( type( e ) ) + print( traceback.format_exc() ) + + + pubsubs_queue.task_done() + + + def Exception( self, exception ): wx.MessageBox( unicode( exception ) ) + + def GetFullscreenImageCache( self ): return self._fullscreen_image_cache + + def GetGUI( self ): return self._gui + + def GetLog( self ): return self._log + + def GetPreviewImageCache( self ): return self._preview_image_cache + + def GetThumbnailCache( self ): return self._thumbnail_cache + + def MaintainDB( self ): + + now = int( time.time() ) + + shutdown_timestamps = self.Read( 'shutdown_timestamps' ) + + if now - shutdown_timestamps[ CC.SHUTDOWN_TIMESTAMP_VACUUM ] > 86400 * 5: self.Write( 'vacuum' ) + if now - shutdown_timestamps[ CC.SHUTDOWN_TIMESTAMP_FATTEN_AC_CACHE ] > 50000: self.Write( 'fatten_autocomplete_cache' ) + if now - shutdown_timestamps[ CC.SHUTDOWN_TIMESTAMP_DELETE_ORPHANS ] > 86400 * 3: self.Write( 'delete_orphans' ) + + + def Message( self, message ): wx.MessageBox( message ) + + def OnInit( self ): + + try: + + self._splash = ClientGUI.FrameSplash() + + self.SetSplashText( 'log' ) + + self._log = CC.Log() + + self.SetSplashText( 'db' ) + + self._db = ClientDB.DB() + + self._options = self._db.Read( 'options', HC.HIGH_PRIORITY ) + + self._tag_service_precedence = self._db.Read( 'tag_service_precedence', HC.HIGH_PRIORITY ) + + self.SetSplashText( 'caches' ) + + self._fullscreen_image_cache = CC.RenderedImageCache( self._db, self._options, 'fullscreen' ) + self._preview_image_cache = CC.RenderedImageCache( self._db, self._options, 'preview' ) + + self._thumbnail_cache = CC.ThumbnailCache( self._db, self._options ) + + CC.GlobalBMPs.STATICInitialise() + + self.SetSplashText( 'gui' ) + + self._gui = ClientGUI.FrameGUI() + + HC.pubsub.sub( self, 'Exception', 'exception' ) + HC.pubsub.sub( self, 'Message', 'message' ) + HC.pubsub.sub( self, 'Clipboard', 'clipboard' ) + + self.Bind( HC.EVT_PUBSUB, self.EventPubSub ) + + # this is because of some bug in wx C++ that doesn't add these by default + wx.richtext.RichTextBuffer.AddHandler( wx.richtext.RichTextHTMLHandler() ) + wx.richtext.RichTextBuffer.AddHandler( wx.richtext.RichTextXMLHandler() ) + + self.Bind( wx.EVT_TIMER, self.EventAnimatedTimer, id = ID_ANIMATED_EVENT_TIMER ) + + self._animated_event_timer = wx.Timer( self, ID_ANIMATED_EVENT_TIMER ) + self._animated_event_timer.Start( 1000, wx.TIMER_CONTINUOUS ) + + self.SetSplashText( 'starting daemons' ) + + self._db._InitPostGUI() + + self._last_idle_time = 0.0 + + self.Bind( wx.EVT_TIMER, self.EventMaintenanceTimer, id = ID_MAINTENANCE_EVENT_TIMER ) + + self._maintenance_event_timer = wx.Timer( self, ID_MAINTENANCE_EVENT_TIMER ) + self._maintenance_event_timer.Start( 20 * 60000, wx.TIMER_CONTINUOUS ) + + except HC.PermissionException as e: pass + except: + + wx.MessageBox( 'Woah, bad error:' + os.linesep + os.linesep + traceback.format_exc() ) + + try: self._splash.Close() + except: pass + + return False + + + self._splash.Close() + + return True + + + def PrepStringForDisplay( self, text ): + + if self._options[ 'gui_capitalisation' ]: return text + else: return text.lower() + + + def ProcessServerRequest( self, *args, **kwargs ): return self._db.ProcessRequest( *args, **kwargs ) + + def Read( self, action, *args, **kwargs ): + + self._last_idle_time = int( time.time() ) + + if action == 'options': return self._options + elif action == 'tag_service_precedence': return self._tag_service_precedence + elif action == 'file': return self._db.ReadFile( *args, **kwargs ) + elif action == 'thumbnail': return self._db.ReadThumbnail( *args, **kwargs ) + else: return self._db.Read( action, HC.HIGH_PRIORITY, *args, **kwargs ) + + + def SetSplashText( self, text ): + + self._splash.SetText( text ) + self.Yield() # this processes the event queue immediately, so the paint event can occur + + + def WaitUntilGoodTimeToUseGUIThread( self ): + + pubsubs_queue = HC.pubsub.GetQueue() + + while True: + + if HC.shutdown: raise Exception( 'Client shutting down!' ) + elif pubsubs_queue.qsize() == 0: return + else: time.sleep( 0.04 ) + + + + def Write( self, action, *args, **kwargs ): + + self._last_idle_time = int( time.time() ) + + self._db.Write( action, HC.HIGH_PRIORITY, *args, **kwargs ) + + + def WriteLowPriority( self, action, *args, **kwargs ): + + self._db.Write( action, HC.LOW_PRIORITY, *args, **kwargs ) + + \ No newline at end of file diff --git a/include/ClientDB.py b/include/ClientDB.py new file mode 100755 index 00000000..16f2b900 --- /dev/null +++ b/include/ClientDB.py @@ -0,0 +1,6470 @@ +import collections +import dircache +import hashlib +import httplib +import itertools +import HydrusConstants as HC +import HydrusFlashHandling +import HydrusImageHandling +import HydrusMessageHandling +import HydrusVideoHandling +import HydrusServer +import ClientConstants as CC +import ClientConstantsMessages +import os +import Queue +import random +import shutil +import sqlite3 +import sys +import threading +import time +import traceback +import urlparse +import wx +import yaml + +class FileDB(): + + def _AddThumbnails( self, c, thumbnails ): + + for ( hash, thumbnail ) in thumbnails: + + hash_id = self._GetHashId( c, hash ) + + thumbnail_path_to = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( thumbnail_path_to, 'wb' ) as f: f.write( thumbnail ) + + phash = HydrusImageHandling.GeneratePerceptualHash( thumbnail ) + + c.execute( 'INSERT OR IGNORE INTO perceptual_hashes ( hash_id, phash ) VALUES ( ?, ? );', ( hash_id, sqlite3.Binary( phash ) ) ) + + + self.pub( 'new_thumbnails', [ hash for ( hash, thumbnail ) in thumbnails ] ) + + + def _CopyFiles( self, hashes ): + + if len( hashes ) > 0: + + export_path = HC.TEMP_DIR + + if not os.path.exists( export_path ): os.mkdir( export_path ) + + error_messages = set() + + paths = [] + + for hash in hashes: + + try: + + path_from = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + mime = HC.GetMimeFromPath( path_from ) + + path_to = export_path + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ] + + shutil.copy( path_from, path_to ) + + paths.append( path_to ) + + except Exception as e: error_messages.add( unicode( e ) ) + + + self.pub( 'clipboard', 'paths', paths ) + + if len( error_messages ) > 0: raise Exception( 'Some of the file exports failed with the following error message(s):' + os.linesep + os.linesep.join( error_messages ) ) + + + + def _ExportFiles( self, job_key, hashes, cancel_event ): + + num_hashes = len( hashes ) + + if num_hashes > 0: + + export_path = HC.ConvertPortablePathToAbsPath( self._options[ 'export_path' ] ) + + if export_path is None: + + with wx.DirDialog( None, message='Pick where to extract the files' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: export_path = dlg.GetPath() + else: return + + + + HC.pubsub.pub( 'progress_update', job_key, 0, num_hashes, 'The client is now exporting the files to ' + export_path + ' (0/' + HC.ConvertIntToPrettyString( num_hashes ) + ')' ) + + error_messages = set() + + for ( index, hash ) in enumerate( hashes ): + + try: + + HC.pubsub.pub( 'progress_update', job_key, index, num_hashes, 'The client is now exporting the files to ' + export_path + ' (' + HC.ConvertIntToPrettyString( index + 1 ) + '/' + HC.ConvertIntToPrettyString( num_hashes ) + ')' ) + + path_from = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + mime = HC.GetMimeFromPath( path_from ) + + # could search for some appropriate tags here, convert them to ascii or whatever, and make sure they are unique given the whole list + + path_to = export_path + os.path.sep + hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ] + + shutil.copy( path_from, path_to ) + + if cancel_event.isSet(): break + + except Exception as e: error_messages.add( unicode( e ) ) + + + if len( error_messages ) > 0: + + HC.pubsub.pub( 'progress_update', job_key, 1, 1, '' ) + + raise Exception( 'Some of the file exports failed with the following error message(s):' + os.linesep + os.linesep.join( error_messages ) ) + + + HC.pubsub.pub( 'progress_update', job_key, num_hashes, num_hashes, 'done!' ) + + else: HC.pubsub.pub( 'progress_update', job_key, 1, 1, '' ) + + + def _GenerateHashIdsEfficiently( self, c, hashes ): + + hashes_not_in_db = set( hashes ) + + for i in range( 0, len( hashes ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + hashes_subset = hashes[ i : i + 250 ] + + hashes_not_in_db.difference_update( [ hash for ( hash, ) in c.execute( 'SELECT hash FROM hashes WHERE hash IN (' + ','.join( '?' * len( hashes_subset ) ) + ');', [ sqlite3.Binary( hash ) for hash in hashes_subset ] ) ] ) + + + if len( hashes_not_in_db ) > 0: c.executemany( 'INSERT INTO hashes ( hash ) VALUES( ? );', [ ( sqlite3.Binary( hash ), ) for hash in hashes_not_in_db ] ) + + + def _GetFile( self, hash ): + + try: + + with open( HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ), 'rb' ) as f: file = f.read() + + except MemoryError: print( 'Memory error!' ) + except: raise Exception( 'Could not find that file!' ) + + return file + + + def _GetHash( self, c, hash_id ): + + result = c.execute( 'SELECT hash FROM hashes WHERE hash_id = ?;', ( hash_id, ) ).fetchone() + + if result is None: raise Exception( 'File hash error in database' ) + + ( hash, ) = result + + return hash + + + def _GetHashes( self, c, hash_ids ): return [ hash for ( hash, ) in c.execute( 'SELECT hash FROM hashes WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ) ] + + def _GetHashId( self, c, hash ): + + result = c.execute( 'SELECT hash_id FROM hashes WHERE hash = ?;', ( sqlite3.Binary( hash ), ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO hashes ( hash ) VALUES ( ? );', ( sqlite3.Binary( hash ), ) ) + + hash_id = c.lastrowid + + else: ( hash_id, ) = result + + return hash_id + + + def _GetHashIds( self, c, hashes ): + + hash_ids = [] + + if type( hashes ) == type( set() ): hashes = list( hashes ) + + for i in range( 0, len( hashes ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + hashes_subset = hashes[ i : i + 250 ] + + hash_ids.extend( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM hashes WHERE hash IN (' + ','.join( '?' * len( hashes_subset ) ) + ');', [ sqlite3.Binary( hash ) for hash in hashes_subset ] ) ] ) + + + if len( hashes ) > len( hash_ids ): + + if len( set( hashes ) ) > len( hash_ids ): + + # must be some new hashes the db has not seen before, so let's generate them as appropriate + + self._GenerateHashIdsEfficiently( c, hashes ) + + hash_ids = self._GetHashIds( c, hashes ) + + + + return hash_ids + + + def _GetHashIdsToHashes( self, c, hash_ids ): return { hash_id : hash for ( hash_id, hash ) in c.execute( 'SELECT hash_id, hash FROM hashes WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ) } + + def _GetThumbnail( self, hash, full_size = False ): + + if full_size: + + path_to = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path_to, 'rb' ) as f: thumbnail = f.read() + + else: + + path_to = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + '_resized' + + if os.path.exists( path_to ): + + with open( path_to, 'rb' ) as f: thumbnail = f.read() + + else: + + path_to_full = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path_to_full, 'rb' ) as f: thumbnail_full = f.read() + + thumbnail_dimensions = self._options[ 'thumbnail_dimensions' ] + + thumbnail = HydrusImageHandling.GenerateThumbnailFileFromFile( thumbnail_full, thumbnail_dimensions ) + + with open( path_to, 'wb' ) as f: f.write( thumbnail ) + + + + return thumbnail + + +class MessageDB(): + + def _AddContact( self, c, contact ): + + ( public_key, name, host, port ) = contact.GetInfo() + + contact_key = contact.GetContactKey() + + if public_key is not None: contact_key = sqlite3.Binary( contact_key ) + + c.execute( 'INSERT OR IGNORE INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( contact_key, public_key, name, host, port ) ) + + + def _AddMessage( self, c, transport_message, serverside_message_key = None, forced_status = None ): + + ( contact_from, contacts_to, message_key, conversation_key, timestamp, subject, body, files ) = transport_message.GetInfo() + + if contact_from is None or contact_from.GetName() == 'Anonymous': + + contact_id_from = 1 + + else: + + contact_id_from = self._GetContactId( c, contact_from ) + + # changes whatever they want to say their name and public key is to whatever we prefer it to be + contact_from = self._GetContact( c, contact_id_from ) + + public_key = contact_from.GetPublicKey() + + try: transport_message.VerifyIsFromCorrectPerson( public_key ) + except: + + self.pub( 'log_message', 'synchronise messages daemon', 'received a message that did not verify' ) + + return + + + + conversation_id = self._GetConversationId( c, conversation_key, subject ) + + message_id = self._GetMessageId( c, message_key ) + + result = c.execute( 'SELECT 1 FROM messages WHERE message_id = ?;', ( message_id, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT OR IGNORE INTO messages ( conversation_id, message_id, contact_id_from, timestamp ) VALUES ( ?, ?, ?, ? );', ( conversation_id, message_id, contact_id_from, timestamp ) ) + + c.execute( 'INSERT OR IGNORE INTO message_bodies ( docid, body ) VALUES ( ?, ? );', ( message_id, body ) ) + + attachment_hashes = [] + + if len( files ) > 0: + + for file in files: + + try: + + ( result, hash ) = self._ImportFile( c, file, override_deleted = True ) # what if the file fails? + + attachment_hashes.append( hash ) + + except: pass + + + hash_ids = self._GetHashIds( c, attachment_hashes ) + + c.executemany( 'INSERT OR IGNORE INTO message_attachments ( message_id, hash_id ) VALUES ( ?, ? );', [ ( message_id, hash_id ) for hash_id in hash_ids ] ) + + + if forced_status is None: status = 'sent' + else: status = forced_status + + status_id = self._GetStatusId( c, status ) + + inboxable_contact_ids = { id for ( id, ) in c.execute( 'SELECT contact_id FROM message_depots;' ) } + + inbox = False + + for contact_to in contacts_to: + + contact_id_to = self._GetContactId( c, contact_to ) + + if contact_id_to in inboxable_contact_ids: + + c.execute( 'INSERT OR IGNORE INTO message_inbox ( message_id ) VALUES ( ? );', ( message_id, ) ) + + inbox = True + + + c.execute( 'INSERT OR IGNORE INTO message_destination_map ( message_id, contact_id_to, status_id ) VALUES ( ?, ?, ? );', ( message_id, contact_id_to, status_id ) ) + + + destinations = [ ( contact_to, status ) for contact_to in contacts_to ] + + message = ClientConstantsMessages.Message( message_key, contact_from, destinations, timestamp, body, attachment_hashes, inbox ) + + self.pub( 'new_message', conversation_key, message ) + + + if serverside_message_key is not None: + + serverside_message_id = self._GetMessageId( c, serverside_message_key ) + + c.execute( 'DELETE FROM message_downloads WHERE message_id = ?;', ( serverside_message_id, ) ) + + + + def _AddMessageInfoSince( self, c, service_identifier, serverside_message_keys, statuses, new_last_check ): + + # message_keys + + service_id = self._GetServiceId( c, service_identifier ) + + serverside_message_ids = set( self._GetMessageIds( c, serverside_message_keys ) ) + + c.executemany( 'INSERT OR IGNORE INTO message_downloads ( service_id, message_id ) VALUES ( ?, ? );', [ ( service_id, serverside_message_id ) for serverside_message_id in serverside_message_ids ] ) + + # statuses + + message_keys_dict = {} + statuses_dict = {} + + inserts = [] + + for ( message_key, contact_key, status ) in statuses: + + if message_key in message_keys_dict: message_id = message_keys_dict[ message_key ] + else: + + message_id = self._GetMessageId( c, message_key ) + + message_keys_dict[ message_key ] = message_id + + + if status in statuses_dict: status_id = statuses_dict[ status ] + else: + + status_id = self._GetStatusId( c, status ) + + statuses_dict[ status ] = status_id + + + inserts.append( ( message_id, sqlite3.Binary( contact_key ), status_id ) ) + + + # replace is important here + c.executemany( 'INSERT OR REPLACE INTO incoming_message_statuses ( message_id, contact_key, status_id ) VALUES ( ?, ?, ? );', inserts ) + + # finally: + + c.execute( 'UPDATE message_depots SET last_check = ? WHERE service_id = ?;', ( new_last_check, service_id ) ) + + + def _ArchiveConversation( self, c, conversation_key ): + + conversation_id = self._GetMessageId( c, conversation_key ) + + message_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM messages WHERE conversation_id = ?;', ( conversation_id, ) ) ] + + c.execute( 'DELETE FROM message_inbox WHERE message_id IN ' + HC.SplayListForDB( message_ids ) + ';' ) + + self.pub( 'archive_conversation_data', conversation_key ) + self.pub( 'archive_conversation_gui', conversation_key ) + + self._DoStatusNumInbox( c ) + + + def _AssociateContact( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + service = self._GetService( c, service_id ) + + private_key = service.GetPrivateKey() + + public_key = HydrusMessageHandling.GetPublicKey( private_key ) + + contact_key = hashlib.sha256( public_key ).digest() + + contact_id = self._GetContactId( c, service_id ) + + c.execute( 'UPDATE contacts SET contact_key = ?, public_key = ? WHERE contact_id = ?;', ( sqlite3.Binary( contact_key ), public_key, contact_id ) ) + + + def _DeleteConversation( self, c, conversation_key ): + + conversation_id = self._GetMessageId( c, conversation_key ) + + message_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM messages WHERE conversation_id = ?;', ( conversation_id, ) ) ] + + splayed_message_ids = HC.SplayListForDB( message_ids ) + + c.execute( 'DELETE FROM message_keys WHERE message_id IN ' + splayed_message_ids + ';' ) + c.execute( 'DELETE FROM message_bodies WHERE docid IN ' + splayed_message_ids + ';' ) + c.execute( 'DELETE FROM conversation_subjects WHERE docid IN ' + splayed_message_ids + ';' ) + + self.pub( 'delete_conversation_data', conversation_key ) + self.pub( 'delete_conversation_gui', conversation_key ) + + self._DoStatusNumInbox( c ) + + + def _DeleteDraft( self, c, draft_key ): + + message_id = self._GetMessageId( c, draft_key ) + + c.execute( 'DELETE FROM message_keys WHERE message_id = ?;', ( message_id, ) ) + c.execute( 'DELETE FROM message_bodies WHERE docid = ?;', ( message_id, ) ) + c.execute( 'DELETE FROM conversation_subjects WHERE docid = ?;', ( message_id, ) ) + + self.pub( 'delete_draft_data', draft_key ) + self.pub( 'delete_draft_gui', draft_key ) + self.pub( 'notify_check_messages' ) + + + def _DoMessageQuery( self, c, query_key, search_context ): + + identity = search_context.GetIdentity() + + name = identity.GetName() + + contact_id = self._GetContactId( c, identity ) + + system_predicates = search_context.GetSystemPredicates() + + ( inbox, archive, draft, status, contact_from, contact_to, contact_started, min_timestamp, max_timestamp ) = system_predicates.GetInfo() + + if draft: + + draft_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM messages, message_drafts USING ( message_id ) WHERE contact_id_from = ?;', ( contact_id, ) ) ] + + query_message_ids = draft_ids + + else: + + sql_predicates = [ '( contact_id_from = ' + str( contact_id ) + ' OR contact_id_to = ' + str( contact_id ) + ' )' ] + + if name != 'Anonymous': + + service = self._GetService( c, identity ) + + if not service.ReceivesAnon(): sql_predicates.append( 'contact_id_from != 1' ) + + + if status is not None: + + if status == 'unread': status = 'sent' + + status_id = self._GetStatusId( c, status ) + + sql_predicates.append( '( contact_id_to = ' + str( contact_id ) + ' AND status_id = ' + str( status_id ) + ')' ) + + + if contact_from is not None: + + contact_id_from = self._GetContactId( c, contact_from ) + + sql_predicates.append( 'contact_id_from = ' + str( contact_id_from ) ) + + + if contact_to is not None: + + contact_id_to = self._GetContactId( c, contact_to ) + + sql_predicates.append( 'contact_id_to = ' + str( contact_id_to ) ) + + + if contact_started is not None: + + contact_id_started = self._GetContactId( c, contact_started ) + + sql_predicates.append( 'conversation_id = message_id AND contact_id_from = ' + str( contact_id_started ) ) + + + if min_timestamp is not None: sql_predicates.append( 'timestamp >= ' + str( min_timestamp ) ) + if max_timestamp is not None: sql_predicates.append( 'timestamp <= ' + str( max_timestamp ) ) + + query_message_ids = { message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM messages, message_destination_map USING ( message_id ) WHERE ' + ' AND '.join( sql_predicates ) + ';' ) } + + if inbox or archive: + + inbox_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM message_inbox, message_destination_map USING ( message_id ) WHERE contact_id_to = ?;', ( contact_id, ) ) ] + + if inbox: query_message_ids.intersection_update( inbox_ids ) + elif archive: query_message_ids.difference_update( inbox_ids ) + + + + for term in search_context.GetTermsToInclude(): + + body_query_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT docid FROM message_bodies WHERE body MATCH ?;', ( term, ) ) ] + subject_query_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT docid FROM conversation_subjects WHERE subject MATCH ?;', ( term, ) ) ] + + query_message_ids.intersection_update( body_query_ids + subject_query_ids ) + + + for term in search_context.GetTermsToExclude(): + + body_query_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT docid FROM message_bodies WHERE body MATCH ?;', ( term, ) ) ] + subject_query_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT docid FROM conversation_subjects WHERE subject MATCH ?;', ( term, ) ) ] + + query_message_ids.difference_update( body_query_ids + subject_query_ids ) + + + conversations = self._GetConversations( c, search_context, query_message_ids ) + + self.pub( 'message_query_done', query_key, conversations ) + + + def _DoStatusNumInbox( self, c ): + + convo_ids = { id for ( id, ) in c.execute( 'SELECT conversation_id FROM messages, message_inbox USING ( message_id );' ) } + + num_inbox = len( convo_ids ) + + if num_inbox == 0: inbox_string = 'inbox empty' + else: inbox_string = str( num_inbox ) + ' in inbox' + + self.pub( 'inbox_status', inbox_string ) + + + def _DraftMessage( self, c, draft_message ): + + ( draft_key, conversation_key, subject, contact_from, contact_names_to, recipients_visible, body, attachment_hashes ) = draft_message.GetInfo() + + old_message_id = self._GetMessageId( c, draft_key ) + + c.execute( 'DELETE FROM message_keys WHERE message_id = ?;', ( old_message_id, ) ) + c.execute( 'DELETE FROM message_bodies WHERE docid = ?;', ( old_message_id, ) ) + c.execute( 'DELETE FROM conversation_subjects WHERE docid = ?;', ( old_message_id, ) ) + + message_id = self._GetMessageId( c, draft_key ) + + conversation_id = self._GetConversationId( c, conversation_key, subject ) + + contact_id_from = self._GetContactId( c, contact_from ) + + c.execute( 'INSERT INTO messages ( conversation_id, message_id, contact_id_from, timestamp ) VALUES ( ?, ?, ?, ? );', ( conversation_id, message_id, contact_id_from, None ) ) + + c.execute( 'INSERT INTO message_bodies ( docid, body ) VALUES ( ?, ? );', ( message_id, body ) ) + + status_id = self._GetStatusId( c, 'draft' ) + + contact_ids_to = [ self._GetContactId( c, contact_name_to ) for contact_name_to in contact_names_to ] + + c.executemany( 'INSERT INTO message_destination_map ( message_id, contact_id_to, status_id ) VALUES ( ?, ?, ? );', [ ( message_id, contact_id_to, status_id ) for contact_id_to in contact_ids_to ] ) + + c.execute( 'INSERT INTO message_drafts ( message_id, recipients_visible ) VALUES ( ?, ? );', ( message_id, recipients_visible ) ) + + hash_ids = self._GetHashIds( c, attachment_hashes ) + + c.executemany( 'INSERT INTO message_attachments ( message_id, hash_id ) VALUES ( ?, ? );', [ ( message_id, hash_id ) for hash_id in hash_ids ] ) + + self.pub( 'draft_saved', draft_key, draft_message ) + + + def _FlushMessageStatuses( self, c ): + + incoming_message_statuses = HC.BuildKeyToListDict( [ ( message_id, ( contact_key, status_id ) ) for ( message_id, contact_key, status_id ) in c.execute( 'SELECT message_id, contact_key, status_id FROM incoming_message_statuses, messages USING ( message_id );' ) ] ) + + for ( message_id, status_infos ) in incoming_message_statuses.items(): + + for ( contact_key, status_id ) in status_infos: + + try: + + contact_id_to = self._GetContactId( c, contact_key ) + + c.execute( 'INSERT OR REPLACE INTO message_destination_map ( message_id, contact_id_to, status_id ) VALUES ( ?, ?, ? );', ( message_id, contact_id_to, status_id ) ) + + except: pass + + + c.execute( 'DELETE FROM incoming_message_statuses WHERE message_id = ?;', ( message_id, ) ) + + message_key = self._GetMessageKey( c, message_id ) + + status_updates = [ ( contact_key, self._GetStatus( c, status_id ) ) for ( contact_key, status_id ) in status_infos ] + + self.pub( 'message_statuses_data', message_key, status_updates ) + self.pub( 'message_statuses_gui', message_key, status_updates ) + + + + def _GenerateMessageIdsEfficiently( self, c, message_keys ): + + message_keys_not_in_db = set( message_keys ) + + for i in range( 0, len( message_keys ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + message_keys_subset = message_keys[ i : i + 250 ] + + message_keys_not_in_db.difference_update( [ message_key for ( message_key, ) in c.execute( 'SELECT message_key FROM message_keys WHERE message_key IN (' + ','.join( '?' * len( message_keys_subset ) ) + ');', [ sqlite3.Binary( message_key ) for message_key in message_keys_subset ] ) ] ) + + + if len( message_keys_not_in_db ) > 0: c.executemany( 'INSERT INTO message_keys ( message_key ) VALUES( ? );', [ ( sqlite3.Binary( message_key ), ) for message_key in message_keys_not_in_db ] ) + + + def _GetAutocompleteContacts( self, c, half_complete_name, name_to_exclude = None ): + + # expand this later to do groups as well + + names = [ name for ( name, ) in c.execute( 'SELECT name FROM contacts WHERE name LIKE ? AND name != ? AND public_key NOTNULL;', ( half_complete_name + '%', 'Anonymous' ) ) ] + + if name_to_exclude is not None: names = [ name for name in names if name != name_to_exclude ] + + matches = CC.AutocompleteMatches( names ) + + return matches + + + def _GetContact( self, c, parameter ): + + if type( parameter ) == int: ( public_key, name, host, port ) = c.execute( 'SELECT public_key, name, host, port FROM contacts WHERE contact_id = ?;', ( parameter, ) ).fetchone() + elif type( parameter ) in ( str, unicode ): + try: ( public_key, name, host, port ) = c.execute( 'SELECT public_key, name, host, port FROM contacts WHERE contact_key = ?;', ( sqlite3.Binary( parameter ), ) ).fetchone() + except: ( public_key, name, host, port ) = c.execute( 'SELECT public_key, name, host, port FROM contacts WHERE name = ?;', ( parameter, ) ).fetchone() + else: print( type( parameter ) ) + + return ClientConstantsMessages.Contact( public_key, name, host, port ) + + + def _GetContactId( self, c, parameter ): + + if type( parameter ) in ( str, unicode ): + + if parameter == 'Anonymous': return 1 + + try: ( contact_id, ) = c.execute( 'SELECT contact_id FROM contacts WHERE contact_key = ?;', ( sqlite3.Binary( parameter ), ) ).fetchone() + except: ( contact_id, ) = c.execute( 'SELECT contact_id FROM contacts WHERE name = ?;', ( parameter, ) ).fetchone() + + elif type( parameter ) == int: ( contact_id, ) = c.execute( 'SELECT contact_id FROM contacts, message_depots USING ( contact_id ) WHERE service_id = ?;', ( parameter, ) ).fetchone() + elif type( parameter ) == ClientConstantsMessages.Contact: + + contact_key = parameter.GetContactKey() + + name = parameter.GetName() + + if name == 'Anonymous': return 1 + + if contact_key is not None: + + result = c.execute( 'SELECT contact_id FROM contacts WHERE contact_key = ?;', ( sqlite3.Binary( contact_key ), ) ).fetchone() + + if result is None: + + # we have a new contact from an outside source! + # let's generate a name that'll fit into the db + + while c.execute( 'SELECT 1 FROM contacts WHERE name = ?;', ( name, ) ).fetchone() is not None: name += str( random.randint( 0, 9 ) ) + + + else: + + # one of our user-entered contacts that doesn't have a public key yet + + result = c.execute( 'SELECT contact_id FROM contacts WHERE name = ?;', ( name, ) ).fetchone() + + + if result is None: + + public_key = parameter.GetPublicKey() + ( host, port ) = parameter.GetAddress() + + if public_key is not None: contact_key = sqlite3.Binary( contact_key ) + + c.execute( 'INSERT INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( contact_key, public_key, name, host, port ) ) + + contact_id = c.lastrowid + + else: ( contact_id, ) = result + + + return contact_id + + + def _GetContactIdsToContacts( self, c, contact_ids ): return { contact_id : ClientConstantsMessages.Contact( public_key, name, host, port ) for ( contact_id, public_key, name, host, port ) in c.execute( 'SELECT contact_id, public_key, name, host, port FROM contacts WHERE contact_id IN ' + HC.SplayListForDB( contact_ids ) + ';' ) } + + def _GetContactNames( self, c ): return [ name for ( name, ) in c.execute( 'SELECT name FROM contacts;' ) ] + + def _GetConversations( self, c, search_context, query_message_ids ): + + system_predicates = search_context.GetSystemPredicates() + + conversation_ids = { conversation_id for ( conversation_id, ) in c.execute( 'SELECT conversation_id FROM messages WHERE message_id IN ' + HC.SplayListForDB( query_message_ids ) + ';' ) } + + splayed_conversation_ids = HC.SplayListForDB( conversation_ids ) + + conversation_infos = c.execute( 'SELECT message_id, message_key, subject FROM message_keys, conversation_subjects ON message_id = conversation_subjects.docid WHERE message_id IN ' + splayed_conversation_ids + ';' ).fetchall() + + conversation_ids_to_message_infos = HC.BuildKeyToListDict( [ ( conversation_id, ( message_id, contact_id_from, timestamp, body ) ) for ( conversation_id, message_id, contact_id_from, timestamp, body ) in c.execute( 'SELECT conversation_id, message_id, contact_id_from, timestamp, body FROM messages, message_bodies ON message_id = message_bodies.docid WHERE conversation_id IN ' + splayed_conversation_ids + ' ORDER BY timestamp ASC;' ) ] ) + + message_ids = [] + contact_ids = set() + + for message_infos in conversation_ids_to_message_infos.values(): + + message_ids.extend( [ message_id for ( message_id, contact_id_from, timestamp, body ) in message_infos ] ) + contact_ids.update( [ contact_id_from for ( message_id, contact_id_from, timestamp, body ) in message_infos ] ) + + + message_ids_to_message_keys = self._GetMessageIdsToMessageKeys( c, message_ids ) + + splayed_message_ids = HC.SplayListForDB( message_ids ) + + message_ids_to_destination_ids = HC.BuildKeyToListDict( [ ( message_id, ( contact_id_to, status_id ) ) for ( message_id, contact_id_to, status_id ) in c.execute( 'SELECT message_id, contact_id_to, status_id FROM message_destination_map WHERE message_id IN ' + splayed_message_ids + ';' ) ] ) + + messages_ids_to_recipients_visible = { message_id : recipients_visible for ( message_id, recipients_visible ) in c.execute( 'SELECT message_id, recipients_visible FROM message_drafts;' ) } + + status_ids = set() + + for destination_ids in message_ids_to_destination_ids.values(): + + contact_ids.update( [ contact_id_to for ( contact_id_to, status_id ) in destination_ids ] ) + status_ids.update( [ status_id for ( contact_id_to, status_id ) in destination_ids ] ) + + + contact_ids_to_contacts = self._GetContactIdsToContacts( c, contact_ids ) + status_ids_to_statuses = self._GetStatusIdsToStatuses( c, status_ids ) + + message_ids_to_hash_ids = HC.BuildKeyToListDict( c.execute( 'SELECT message_id, hash_id FROM message_attachments WHERE message_id IN ' + splayed_message_ids + ';' ).fetchall() ) + + hash_ids = set() + + for sub_hash_ids in message_ids_to_hash_ids.values(): hash_ids.update( sub_hash_ids ) + + hash_ids_to_hashes = self._GetHashIdsToHashes( c, hash_ids ) + + identity = search_context.GetIdentity() + + inbox_ids = { message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM message_inbox;' ) } + + conversations = [] + + for ( conversation_id, conversation_key, subject ) in conversation_infos: + + messages = [] + drafts = [] + + can_add = False + + for ( message_id, contact_id_from, timestamp, body ) in conversation_ids_to_message_infos[ conversation_id ]: + + message_key = message_ids_to_message_keys[ message_id ] + + contact_from = contact_ids_to_contacts[ contact_id_from ] + + attachment_hashes = [ hash_ids_to_hashes[ hash_id ] for hash_id in message_ids_to_hash_ids[ message_id ] ] + + if system_predicates.Ok( len( attachment_hashes ) ): can_add = True + + attachment_hashes.sort() + + destination_ids = message_ids_to_destination_ids[ message_id ] + + if message_id in messages_ids_to_recipients_visible: + + # this is a draft + + contact_names_to = [ contact_ids_to_contacts[ contact_id_to ].GetName() for ( contact_id_to, status_id ) in destination_ids ] + + recipients_visible = messages_ids_to_recipients_visible[ message_id ] + + drafts.append( ClientConstantsMessages.DraftMessage( message_key, conversation_key, subject, contact_from, contact_names_to, recipients_visible, body, attachment_hashes ) ) + + else: + + inbox = message_id in inbox_ids + + destinations = [ ( contact_ids_to_contacts[ contact_id_to ], status_ids_to_statuses[ status_id ] ) for ( contact_id_to, status_id ) in destination_ids ] + + messages.append( ClientConstantsMessages.Message( message_key, contact_from, destinations, timestamp, body, attachment_hashes, inbox ) ) + + + + if can_add: conversations.append( ClientConstantsMessages.Conversation( identity, conversation_key, subject, messages, drafts, search_context ) ) + + + return conversations + + + def _GetConversationId( self, c, conversation_key, subject ): + + result = c.execute( 'SELECT message_id FROM message_keys, conversation_subjects ON message_id = conversation_subjects.docid WHERE message_key = ?;', ( sqlite3.Binary( conversation_key ), ) ).fetchone() + + if result is None: + + conversation_id = self._GetMessageId( c, conversation_key ) + + c.execute( 'INSERT INTO conversation_subjects ( docid, subject ) VALUES ( ?, ? );', ( conversation_id, subject ) ) + + else: ( conversation_id, ) = result + + return conversation_id + + + def _GetIdentities( self, c ): + + my_identities = [ ClientConstantsMessages.Contact( public_key, name, host, port ) for ( public_key, name, host, port ) in c.execute( 'SELECT public_key, name, host, port FROM contacts, message_depots USING ( contact_id ) ORDER BY name ASC;' ) ] + + return my_identities + [ self._GetContact( c, 'Anonymous' ) ] + + + def _GetIdentitiesAndContacts( self, c ): + + contacts_info = c.execute( 'SELECT contact_id, public_key, name, host, port FROM contacts ORDER BY name ASC;' ).fetchall() + + identity_ids = { contact_id for ( contact_id, ) in c.execute( 'SELECT contact_id FROM message_depots;' ) } + + identities = [ ClientConstantsMessages.Contact( public_key, name, host, port ) for ( contact_id, public_key, name, host, port ) in contacts_info if contact_id in identity_ids ] + contacts = [ ClientConstantsMessages.Contact( public_key, name, host, port ) for ( contact_id, public_key, name, host, port ) in contacts_info if contact_id not in identity_ids and name != 'Anonymous' ] + + contact_contact_ids = [ contact_id for ( contact_id, public_key, name, host, port ) in contacts_info if contact_id not in identity_ids and name != 'Anonymous' ] + + deletable_names = { name for ( name, ) in c.execute( 'SELECT name FROM contacts WHERE contact_id IN ' + HC.SplayListForDB( contact_contact_ids ) + ' AND NOT EXISTS ( SELECT 1 FROM message_destination_map WHERE contact_id_to = contact_id ) AND NOT EXISTS ( SELECT 1 FROM messages WHERE contact_id_from = contact_id );' ) } + + return ( identities, contacts, deletable_names ) + + + def _GetMessageId( self, c, message_key ): + + result = c.execute( 'SELECT message_id FROM message_keys WHERE message_key = ?;', ( sqlite3.Binary( message_key ), ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO message_keys ( message_key ) VALUES ( ? );', ( sqlite3.Binary( message_key ), ) ) + + message_id = c.lastrowid + + else: ( message_id, ) = result + + return message_id + + + def _GetMessageIds( self, c, message_keys ): + + message_ids = [] + + if type( message_keys ) == type( set() ): message_keys = list( message_keys ) + + for i in range( 0, len( message_keys ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + message_keys_subset = message_keys[ i : i + 250 ] + + message_ids.extend( [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM message_keys WHERE message_key IN (' + ','.join( '?' * len( message_keys_subset ) ) + ');', [ sqlite3.Binary( message_key ) for message_key in message_keys_subset ] ) ] ) + + + if len( message_keys ) > len( message_ids ): + + if len( set( message_keys ) ) > len( message_ids ): + + # must be some new messages the db has not seen before, so let's generate them as appropriate + + self._GenerateMessageIdsEfficiently( c, message_keys ) + + message_ids = self._GetMessageIds( c, message_keys ) + + + + return message_ids + + + def _GetMessageIdsToMessages( self, c, message_ids ): return { message_id : message for ( message_id, message ) in c.execute( 'SELECT message_id, message FROM messages WHERE message_id IN ' + HC.SplayListForDB( message_ids ) + ';' ) } + + def _GetMessageIdsToMessageKeys( self, c, message_ids ): return { message_id : message_key for ( message_id, message_key ) in c.execute( 'SELECT message_id, message_key FROM message_keys WHERE message_id IN ' + HC.SplayListForDB( message_ids ) + ';' ) } + + def _GetMessageKey( self, c, message_id ): + + result = c.execute( 'SELECT message_key FROM message_keys WHERE message_id = ?;', ( message_id, ) ).fetchone() + + if result is None: raise Exception( 'Message key error in database' ) + + ( message_key, ) = result + + return message_key + + + def _GetMessageKeysToDownload( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + message_keys = [ message_key for ( message_key, ) in c.execute( 'SELECT message_key FROM message_downloads, message_keys USING ( message_id ) WHERE service_id = ?;', ( service_id, ) ) ] + + return message_keys + + + def _GetMessagesToSend( self, c ): + + status_id = self._GetStatusId( c, 'pending' ) + + message_id_to_contact_ids = HC.BuildKeyToListDict( c.execute( 'SELECT message_id, contact_id_to FROM message_destination_map WHERE status_id = ?;', ( status_id, ) ) ) + + messages_to_send = [ ( self._GetMessageKey( c, message_id ), [ self._GetContact( c, contact_id_to ) for contact_id_to in contact_ids_to ] ) for ( message_id, contact_ids_to ) in message_id_to_contact_ids.items() ] + + return messages_to_send + + + def _GetStatus( self, c, status_id ): + + result = c.execute( 'SELECT status FROM statuses WHERE status_id = ?;', ( status_id, ) ).fetchone() + + if result is None: raise Exception( 'Status error in database' ) + + ( status, ) = result + + return status + + + def _GetStatusId( self, c, status ): + + result = c.execute( 'SELECT status_id FROM statuses WHERE status = ?;', ( status, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO statuses ( status ) VALUES ( ? );', ( status, ) ) + + status_id = c.lastrowid + + else: ( status_id, ) = result + + return status_id + + + def _GetStatusIdsToStatuses( self, c, status_ids ): return { status_id : status for ( status_id, status ) in c.execute( 'SELECT status_id, status FROM statuses WHERE status_id IN ' + HC.SplayListForDB( status_ids ) + ';' ) } + + def _GetTransportMessage( self, c, message_key ): + + message_id = self._GetMessageId( c, message_key ) + + ( conversation_id, contact_id_from, timestamp ) = c.execute( 'SELECT conversation_id, contact_id_from, timestamp FROM messages WHERE message_id = ?;', ( message_id, ) ).fetchone() + + contact_ids_to = [ contact_id_to for ( contact_id_to, ) in c.execute( 'SELECT contact_id_to FROM message_destination_map WHERE message_id = ?;', ( message_id, ) ) ] + + ( subject, ) = c.execute( 'SELECT subject FROM conversation_subjects WHERE docid = ?;', ( conversation_id, ) ).fetchone() + + ( body, ) = c.execute( 'SELECT body FROM message_bodies WHERE docid = ?;', ( message_id, ) ).fetchone() + + attachment_hashes = [ hash for ( hash, ) in c.execute( 'SELECT hash FROM message_attachments, hashes USING ( hash_id ) WHERE message_id = ?;', ( message_id, ) ) ] + + attachment_hashes.sort() + + files = [ self._GetFile( c, hash ) for hash in attachment_hashes ] + + conversation_key = self._GetMessageKey( c, conversation_id ) + + contact_from = self._GetContact( c, contact_id_from ) + + contacts_to = [ self._GetContact( c, contact_id_to ) for contact_id_to in contact_ids_to ] + + if contact_from.GetName() == 'Anonymous': + + contact_from = None + message_depot = None + private_key = None + + else: + + message_depot = self._GetService( c, contact_from ) + private_key = message_depot.GetPrivateKey() + + + if conversation_key == message_key: conversation_key = None + + message = HydrusMessageHandling.Message( conversation_key, contact_from, contacts_to, subject, body, timestamp, files = files, private_key = private_key ) + + return message + + + def _GetTransportMessagesFromDraft( self, c, draft_message ): + + ( draft_key, conversation_key, subject, contact_from, contact_names_to, recipients_visible, body, attachment_hashes ) = draft_message.GetInfo() + + ( xml, html ) = yaml.safe_load( body ) + + body = html + + files = [ self._GetFile( c, hash ) for hash in attachment_hashes ] + + contact_id_from = self._GetContactId( c, contact_from ) + + if contact_from.GetName() == 'Anonymous': + + contact_from = None + message_depot = None + private_key = None + + else: + + message_depot = self._GetService( c, contact_from ) + private_key = message_depot.GetPrivateKey() + + + timestamp = int( time.time() ) + + contacts_to = [ self._GetContact( c, contact_name_to ) for contact_name_to in contact_names_to ] + + if conversation_key == draft_key: conversation_key = None + + if recipients_visible: messages = [ HydrusMessageHandling.Message( conversation_key, contact_from, contacts_to, subject, body, timestamp, files = files, private_key = private_key ) ] + else: messages = [ HydrusMessageHandling.Message( conversation_key, contact_from, [ contact_to ], subject, body, timestamp, files = files, private_key = private_key ) for contact_to in contacts_to ] + + return messages + + + def _InboxConversation( self, c, conversation_key ): + + conversation_id = self._GetMessageId( c, conversation_key ) + + inserts = c.execute( 'SELECT message_id FROM messages WHERE conversation_id = ?;', ( conversation_id, ) ).fetchall() + + c.executemany( 'INSERT OR IGNORE INTO message_inbox ( message_id ) VALUES ( ? );', inserts ) + + self.pub( 'inbox_conversation_data', conversation_key ) + self.pub( 'inbox_conversation_gui', conversation_key ) + + self._DoStatusNumInbox( c ) + + + def _UpdateContacts( self, c, edit_log ): + + for ( action, details ) in edit_log: + + if action == 'add': + + contact = details + + self._AddContact( c, contact ) + + elif action == 'delete': + + name = details + + result = c.execute( 'SELECT 1 FROM contacts WHERE name = ? AND NOT EXISTS ( SELECT 1 FROM message_destination_map WHERE contact_id_to = contact_id ) AND NOT EXISTS ( SELECT 1 FROM messages WHERE contact_id_from = contact_id );', ( name, ) ).fetchone() + + if result is not None: c.execute( 'DELETE FROM contacts WHERE name = ?;', ( name, ) ) + + elif action == 'edit': + + ( old_name, contact ) = details + + try: + + contact_id = self._GetContactId( c, old_name ) + + ( public_key, name, host, port ) = contact.GetInfo() + + contact_key = contact.GetContactKey() + + if public_key is not None: contact_key = sqlite3.Binary( contact_key ) + + c.execute( 'UPDATE contacts SET contact_key = ?, public_key = ?, name = ?, host = ?, port = ? WHERE contact_id = ?;', ( contact_key, public_key, name, host, port, contact_id ) ) + + except: pass + + + + self.pub( 'notify_new_contacts' ) + + + def _UpdateMessageStatuses( self, c, message_key, status_updates ): + + message_id = self._GetMessageId( c, message_key ) + + updates = [] + + for ( contact_key, status ) in status_updates: + + contact_id = self._GetContactId( c, contact_key ) + status_id = self._GetStatusId( c, status ) + + updates.append( ( contact_id, status_id ) ) + + + c.executemany( 'UPDATE message_destination_map SET status_id = ? WHERE contact_id_to = ? AND message_id = ?;', [ ( status_id, contact_id, message_id ) for ( contact_id, status_id ) in updates ] ) + + self.pub( 'message_statuses_data', message_key, status_updates ) + self.pub( 'message_statuses_gui', message_key, status_updates ) + self.pub( 'notify_check_messages' ) + + +class RatingDB(): + + def _GetRatingsMediaResult( self, c, service_identifier, min, max ): + + service_id = self._GetServiceId( c, service_identifier ) + + half_point = ( min + max ) / 2 + + tighter_min = ( min + half_point ) / 2 + tighter_max = ( max + half_point ) / 2 + + # I know this is horrible, ordering by random, but I can't think of a better way to do it right now + result = c.execute( 'SELECT hash_id FROM local_ratings, files_info USING ( hash_id ) WHERE local_ratings.service_id = ? AND files_info.service_id = ? AND rating BETWEEN ? AND ? ORDER BY RANDOM() LIMIT 1;', ( service_id, self._local_file_service_id, tighter_min, tighter_max ) ).fetchone() + + if result is None: result = c.execute( 'SELECT hash_id FROM local_ratings, files_info USING ( hash_id ) WHERE local_ratings.service_id = ? AND files_info.service_id = ? AND rating BETWEEN ? AND ? ORDER BY RANDOM() LIMIT 1;', ( service_id, self._local_file_service_id, min, max ) ).fetchone() + + if result is None: return None + else: + + ( hash_id, ) = result + + search_context = CC.FileSearchContext() + + ( media_result, ) = self._GetMediaResults( c, search_context, set( ( hash_id, ) ) ) + + return media_result + + + + def _GetRatingsFilter( self, c, service_identifier, hashes ): + + service_id = self._GetServiceId( c, service_identifier ) + + hash_ids = self._GetHashIds( c, hashes ) + + empty_rating = lambda: ( 0.0, 1.0 ) + + ratings_filter = collections.defaultdict( empty_rating ) + + ratings_filter.update( ( ( hash, ( min, max ) ) for ( hash, min, max ) in c.execute( 'SELECT hash, min, max FROM ratings_filter, hashes USING ( hash_id ) WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) ) ) + + return ratings_filter + + + # leave this until I am more sure of how it'll work remotely + # pending is involved here too + def _UpdateRemoteRatings( self, c, service_identfier, ratings ): + + service_id = self._GetServiceId( c, service_identifier ) + + hashes = [ hash for ( hash, count, rating ) in ratings ] + + hash_ids = self._GetHashIds( c, hashes ) + + c.execute( 'DELETE FROM ratings WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) + + c.executemany( 'INSERT INTO ratings ( service_id, hash_id, count, rating, score ) VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, self._GetHashId( c, hash ), count, rating, HC.CalculateScoreFromRating( count, rating ) ) for ( hash, count, rating ) in ratings if count > 0 ] ) + + # these need count and score in + #self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_RATING_REMOTE, service_identifier, ( hash, ), rating ) for ( hash, rating ) in ratings ] ) + #self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_RATING_REMOTE, service_identifier, ( hash, ), rating ) for ( hash, rating ) in ratings ] ) + + +class TagDB(): + + def _GenerateTagIdsEfficiently( self, c, tags ): + + namespaced_tags = [ tag.split( ':', 1 ) for tag in tags if ':' in tag ] + + namespaces = [ namespace for ( namespace, tag ) in namespaced_tags ] + + tags = [ tag for tag in tags if ':' not in tag ] + [ tag for ( namespace, tag ) in namespaced_tags ] + + namespaces_not_in_db = set( namespaces ) + + for i in range( 0, len( namespaces ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + namespaces_subset = namespaces[ i : i + 250 ] + + namespaces_not_in_db.difference_update( [ namespace for ( namespace, ) in c.execute( 'SELECT namespace FROM namespaces WHERE namespace IN (' + ','.join( '?' * len( namespaces_subset ) ) + ');', [ namespace for namespace in namespaces_subset ] ) ] ) + + + if len( namespaces_not_in_db ) > 0: c.executemany( 'INSERT INTO namespaces( namespace ) VALUES( ? );', [ ( namespace, ) for namespace in namespaces_not_in_db ] ) + + tags_not_in_db = set( tags ) + + for i in range( 0, len( tags ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + tags_subset = tags[ i : i + 250 ] + + tags_not_in_db.difference_update( [ tag for ( tag, ) in c.execute( 'SELECT tag FROM tags WHERE tag IN (' + ','.join( '?' * len( tags_subset ) ) + ');', [ tag for tag in tags_subset ] ) ] ) + + + if len( tags_not_in_db ) > 0: c.executemany( 'INSERT INTO tags( tag ) VALUES( ? );', [ ( tag, ) for tag in tags_not_in_db ] ) + + + def _GetNamespaceTag( self, c, namespace_id, tag_id ): + + result = c.execute( 'SELECT tag FROM tags WHERE tag_id = ?;', ( tag_id, ) ).fetchone() + + if result is None: raise Exception( 'Tag error in database' ) + + ( tag, ) = result + + if namespace_id == 1: return tag + else: + + result = c.execute( 'SELECT namespace FROM namespaces WHERE namespace_id = ?;', ( namespace_id, ) ).fetchone() + + if result is None: raise Exception( 'Namespace error in database' ) + + ( namespace, ) = result + + return namespace + ':' + tag + + + + def _GetNamespaceIdTagId( self, c, tag ): + + tag = HC.CleanTag( tag ) + + if ':' in tag: + + ( namespace, tag ) = tag.split( ':', 1 ) + + result = c.execute( 'SELECT namespace_id FROM namespaces WHERE namespace = ?;', ( namespace, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO namespaces ( namespace ) VALUES ( ? );', ( namespace, ) ) + + namespace_id = c.lastrowid + + else: ( namespace_id, ) = result + + else: namespace_id = 1 + + result = c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( tag, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO tags ( tag ) VALUES ( ? );', ( tag, ) ) + + tag_id = c.lastrowid + + else: ( tag_id, ) = result + + result = c.execute( 'SELECT 1 FROM existing_tags WHERE namespace_id = ? AND tag_id = ?;', ( namespace_id, tag_id ) ).fetchone() + + if result is None: c.execute( 'INSERT INTO existing_tags ( namespace_id, tag_id ) VALUES ( ?, ? );', ( namespace_id, tag_id ) ) + + return ( namespace_id, tag_id ) + + +class ServiceDB( FileDB, MessageDB, TagDB, RatingDB ): + + def _AddDownloads( self, c, service_identifier, hashes ): + + service_id = self._GetServiceId( c, service_identifier ) + + hash_ids = self._GetHashIds( c, hashes ) + + c.executemany( 'INSERT OR IGNORE INTO file_transfers ( service_id_from, service_id_to, hash_id ) VALUES ( ?, ?, ? );', [ ( service_id, self._local_file_service_id, hash_id ) for hash_id in hash_ids ] ) + + self.pub( 'notify_new_downloads' ) + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] ) + + + def _AddFiles( self, c, files_info_rows ): + + # service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words + + c.executemany( 'INSERT OR IGNORE INTO files_info VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );', files_info_rows ) + + service_ids_to_rows = HC.BuildKeyToListDict( [ ( row[ 0 ], row[ 1: ] ) for row in files_info_rows ] ) + + for ( service_id, rows ) in service_ids_to_rows.items(): + + hash_ids = [ row[ 0 ] for row in rows ] + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + c.execute( 'DELETE FROM deleted_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + num_deleted_files_revoked = c.rowcount + + c.execute( 'DELETE FROM file_transfers WHERE service_id_to = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + total_size = sum( [ row[ 1 ] for row in rows ] ) + num_files = len( rows ) + num_thumbnails = len( [ 1 for row in rows if row[ 2 ] in HC.MIMES_WITH_THUMBNAILS ] ) + + service_info_updates = [] + + service_info_updates.append( ( total_size, service_id, HC.SERVICE_INFO_TOTAL_SIZE ) ) + service_info_updates.append( ( num_files, service_id, HC.SERVICE_INFO_NUM_FILES ) ) + service_info_updates.append( ( num_thumbnails, service_id, HC.SERVICE_INFO_NUM_THUMBNAILS ) ) + service_info_updates.append( ( -num_deleted_files_revoked, service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) ) + + c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates ) + + c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_INBOX, HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) ) + ';', ( service_id, ) ) + + self._UpdateAutocompleteTagCacheFromFiles( c, service_id, hash_ids, 1 ) + + + + def _AddFileRepositoryUpdate( self, c, service_id, update ): + + # new + + files = update.GetFiles() + + new_hashes = [ hash for ( hash, size, mime, timestamp, width, height, duration, num_frames, num_words ) in files ] + + new_hash_ids = self._GetHashIds( c, new_hashes ) + + files_info_rows = [ ( service_id, self._GetHashId( c, hash ), size, mime, timestamp, width, height, duration, num_frames, num_words ) for ( hash, size, mime, timestamp, width, height, duration, num_frames, num_words ) in files ] + + self._AddFiles( c, files_info_rows ) + + # deleted + + deleted_hashes = update.GetDeletedHashes() + + deleted_hash_ids = self._GetHashIds( c, deleted_hashes ) + + self._DeleteFiles( c, service_id, deleted_hash_ids ) + + # news + + c.executemany( 'INSERT OR IGNORE INTO news VALUES ( ?, ?, ? );', [ ( service_id, post, timestamp ) for ( post, timestamp ) in update.GetNews() ] ) + + # done + + c.execute( 'UPDATE repositories SET first_begin = ? WHERE service_id = ? AND first_begin = ?;', ( update.GetNextBegin(), service_id, 0 ) ) + + c.execute( 'UPDATE repositories SET next_begin = ? WHERE service_id = ?;', ( update.GetNextBegin(), service_id ) ) + + deleted_hashes = [ hash for hash in update.GetDeletedHashes() ] # to proceess generator + + service_identifier = self._GetServiceIdentifier( c, service_id ) + + if len( new_hashes ) > 0: + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, new_hashes ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, new_hashes ) ] ) + if len( deleted_hashes ) > 0: + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, deleted_hashes ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, deleted_hashes ) ] ) + if len( new_hashes ) > 0 or len( deleted_hashes ) > 0: self.pub( 'notify_new_thumbnails' ) + + + def _AddService( self, c, service_identifier, credentials, extra_info ): + + service_key = service_identifier.GetServiceKey() + service_type = service_identifier.GetType() + service_name = service_identifier.GetName() + + c.execute( 'INSERT INTO services ( service_key, type, name ) VALUES ( ?, ?, ? );', ( sqlite3.Binary( service_key ), service_type, service_name ) ) + + service_id = c.lastrowid + + if service_type in HC.REMOTE_SERVICES: + + ( host, port ) = credentials.GetAddress() + + c.execute( 'INSERT OR IGNORE INTO addresses ( service_id, host, port, last_error ) VALUES ( ?, ?, ?, ? );', ( service_id, host, port, 0 ) ) + + if service_type in HC.RESTRICTED_SERVICES: + + access_key = credentials.GetAccessKey() + + account = CC.GetUnknownAccount() + + account.MakeStale() + + c.execute( 'INSERT OR IGNORE INTO accounts ( service_id, access_key, account ) VALUES ( ?, ?, ? );', ( service_id, sqlite3.Binary( access_key ), account ) ) + + if service_type in HC.REPOSITORIES: + + c.execute( 'INSERT OR IGNORE INTO repositories ( service_id, first_begin, next_begin ) VALUES ( ?, ?, ? );', ( service_id, 0, 0 ) ) + + if service_type == HC.TAG_REPOSITORY: c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) SELECT ?, CASE WHEN MAX( precedence ) NOT NULL THEN MAX( precedence ) + 1 ELSE 0 END FROM tag_service_precedence;', ( service_id, ) ) + elif service_type == HC.RATING_LIKE_REPOSITORY: + + ( like, dislike ) = extra_info + + c.execute( 'INSERT INTO ratings_like ( service_id, like, dislike ) VALUES ( ?, ?, ? );', ( service_id, like, dislike ) ) + + elif service_type == HC.RATING_LIKE_REPOSITORY: + + ( lower, upper ) = extra_info + + c.execute( 'INSERT INTO ratings_numerical ( service_id, lower, upper ) VALUES ( ?, ?, ? );', ( service_id, lower, upper ) ) + + + elif service_type == HC.MESSAGE_DEPOT: + + ( identity_name, check_period, private_key, receive_anon ) = extra_info + + public_key = HydrusMessageHandling.GetPublicKey( private_key ) + + contact_key = hashlib.sha256( public_key ).digest() + + try: + + contact_id = self._GetContactId( c, contact_key ) + + c.execute( 'UPDATE contacts SET contact_key = ?, public_key = ? WHERE contact_id = ?;', ( None, None, contact_id ) ) + + except: + + c.execute( 'INSERT OR IGNORE INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( None, None, identity_name, host, port ) ) + + contact_id = c.lastrowid + + + c.execute( 'INSERT OR IGNORE INTO message_depots ( service_id, contact_id, last_check, check_period, private_key, receive_anon ) VALUES ( ?, ?, ?, ?, ?, ? );', ( service_id, contact_id, 0, check_period, private_key, receive_anon ) ) + + + + else: + + if service_type == HC.LOCAL_RATING_LIKE: + + ( like, dislike ) = extra_info + + c.execute( 'INSERT INTO ratings_like ( service_id, like, dislike ) VALUES ( ?, ?, ? );', ( service_id, like, dislike ) ) + + elif HC.LOCAL_RATING_NUMERICAL: + + ( lower, upper ) = extra_info + + c.execute( 'INSERT INTO ratings_numerical ( service_id, lower, upper ) VALUES ( ?, ?, ? );', ( service_id, lower, upper ) ) + + + + + def _AddServiceUpdates( self, c, update_log ): + + do_new_permissions = False + + requests_made = [] + + for service_update in update_log: + + action = service_update.GetAction() + + service_identifier = service_update.GetServiceIdentifier() + + try: + + service_id = self._GetServiceId( c, service_identifier ) + + if action == CC.SERVICE_UPDATE_ACCOUNT: + + account = service_update.GetInfo() + + c.execute( 'UPDATE accounts SET account = ? WHERE service_id = ?;', ( account, service_id ) ) + c.execute( 'UPDATE addresses SET last_error = ? WHERE service_id = ?;', ( 0, service_id ) ) + + do_new_permissions = True + + elif action == CC.SERVICE_UPDATE_ERROR: c.execute( 'UPDATE addresses SET last_error = ? WHERE service_id = ?;', ( int( time.time() ), service_id ) ) + elif action == CC.SERVICE_UPDATE_REQUEST_MADE: requests_made.append( ( service_id, service_update.GetInfo() ) ) + + except: pass + + self.pub( 'service_update_data', service_update ) + self.pub( 'service_update_gui', service_update ) + + + for ( service_id, nums_bytes ) in HC.BuildKeyToListDict( requests_made ).items(): + + ( account, ) = c.execute( 'SELECT account FROM accounts WHERE service_id = ?;', ( service_id, ) ).fetchone() + + for num_bytes in nums_bytes: account.RequestMade( num_bytes ) + + c.execute( 'UPDATE accounts SET account = ? WHERE service_id = ?;', ( account, service_id ) ) + + + if do_new_permissions: self.pub( 'notify_new_permissions' ) + + + def _AddTagRepositoryUpdate( self, c, service_id, update ): + + # new + + mappings = update.GetMappings() + + mappings_ids = [] + + for ( tag, hashes ) in mappings: + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + hash_ids = self._GetHashIds( c, hashes ) + + mappings_ids.append( ( namespace_id, tag_id, hash_ids ) ) + + + # deleted + + deleted_mappings = update.GetDeletedMappings() + + deleted_mappings_ids = [] + + for ( tag, hashes ) in update.GetDeletedMappings(): + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + hash_ids = self._GetHashIds( c, hashes ) + + deleted_mappings_ids.append( ( namespace_id, tag_id, hash_ids ) ) + + + self._UpdateMappings( c, service_id, mappings_ids, deleted_mappings_ids ) + + # news + + c.executemany( 'INSERT OR IGNORE INTO news VALUES ( ?, ?, ? );', [ ( service_id, post, timestamp ) for ( post, timestamp ) in update.GetNews() ] ) + + # done + + if update.GetEnd() is not None: + + c.execute( 'UPDATE repositories SET first_begin = ? WHERE service_id = ? AND first_begin = ?;', ( update.GetNextBegin(), service_id, 0 ) ) + + c.execute( 'UPDATE repositories SET next_begin = ? WHERE service_id = ?;', ( update.GetNextBegin(), service_id ) ) + + + service_identifier = self._GetServiceIdentifier( c, service_id ) + + mappings = [ mapping for mapping in update.GetMappings() ] # to clear generator + + deleted_mappings = [ deleted_mapping for deleted_mapping in update.GetDeletedMappings() ] # to clear generator + + if len( mappings ) > 0: + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, hashes, info = tag ) for ( tag, hashes ) in mappings ] ) + if len( deleted_mappings ) > 0: + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( tag, hashes ) in deleted_mappings ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, hashes, info = tag ) for ( tag, hashes ) in deleted_mappings ] ) + + + def _AddUpdate( self, c, service_identifier, update ): + + service_type = service_identifier.GetType() + + service_id = self._GetServiceId( c, service_identifier ) + + if service_type == HC.FILE_REPOSITORY: self._AddFileRepositoryUpdate( c, service_id, update ) + elif service_type == HC.TAG_REPOSITORY: self._AddTagRepositoryUpdate( c, service_id, update ) + + + def _AddUploads( self, c, service_identifier, hashes ): + + service_id = self._GetServiceId( c, service_identifier ) + + service = self._GetService( c, service_id ) + + hash_ids = set( self._GetHashIds( c, hashes ) ) + + if not service.GetAccount().HasPermission( HC.RESOLVE_PETITIONS ): + + deleted_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_files WHERE service_id = ?;', ( service_id, ) ) ] + + hash_ids.difference_update( deleted_hash_ids ) + + + existing_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] + + hash_ids.difference_update( existing_hash_ids ) + + c.executemany( 'INSERT OR IGNORE INTO file_transfers ( service_id_from, service_id_to, hash_id ) VALUES ( ?, ?, ? );', [ ( self._local_file_service_id, service_id, hash_id ) for hash_id in hash_ids ] ) + + self.pub( 'notify_new_pending' ) + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, service_identifier, self._GetHashes( c, hash_ids ) ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_PENDING, service_identifier, self._GetHashes( c, hash_ids ) ) ] ) + + + def _ArchiveFiles( self, c, hash_ids ): + + valid_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_inbox WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ) ] + + if len( valid_hash_ids ) > 0: + + splayed_hash_ids = HC.SplayListForDB( valid_hash_ids ) + + c.execute( 'DELETE FROM file_inbox WHERE hash_id IN ' + splayed_hash_ids + ';' ) + + updates = c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id;' ).fetchall() + + c.executemany( 'UPDATE service_info SET info = info - ? WHERE service_id = ? AND info_type = ?;', [ ( count, service_id, HC.SERVICE_INFO_NUM_INBOX ) for ( service_id, count ) in updates ] ) + + + + def _DeleteFiles( self, c, service_id, hash_ids ): + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + if service_id == self._local_file_service_id: c.execute( 'DELETE FROM file_inbox WHERE hash_id IN ' + splayed_hash_ids + ';' ) + + info = c.execute( 'SELECT size, mime FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ).fetchall() + + total_size = sum( [ row[ 0 ] for row in info ] ) + num_files = len( info ) + num_thumbnails = len( [ 1 for row in info if row[ 1 ] in HC.MIMES_WITH_THUMBNAILS ] ) + + service_info_updates = [] + + service_info_updates.append( ( total_size, service_id, HC.SERVICE_INFO_TOTAL_SIZE ) ) + service_info_updates.append( ( num_files, service_id, HC.SERVICE_INFO_NUM_FILES ) ) + service_info_updates.append( ( num_thumbnails, service_id, HC.SERVICE_INFO_NUM_THUMBNAILS ) ) + service_info_updates.append( ( -num_files, service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) ) # - because we want to increment in the following query + + c.executemany( 'UPDATE service_info SET info = info - ? WHERE service_id = ? AND info_type = ?;', service_info_updates ) + + c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_INBOX, HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) ) + ';', ( service_id, ) ) + + c.execute( 'DELETE FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + invalid_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM deleted_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) } + + actual_hash_ids_i_can_delete = set( hash_ids ) + + actual_hash_ids_i_can_delete.difference_update( invalid_hash_ids ) + + c.executemany( 'INSERT OR IGNORE INTO deleted_files ( service_id, hash_id ) VALUES ( ?, ? );', [ ( service_id, hash_id ) for hash_id in actual_hash_ids_i_can_delete ] ) + + self._UpdateAutocompleteTagCacheFromFiles( c, service_id, actual_hash_ids_i_can_delete, -1 ) + + self.pub( 'notify_new_pending' ) + + + def _DeleteOrphans( self, c ): + + # careful of the .encode( 'hex' ) business here! + + # files + + deleted_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_files WHERE service_id = ?;', ( self._local_file_service_id, ) ) } + + pending_upload_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_transfers WHERE service_id_from = ?;', ( self._local_file_service_id, ) ) } + + message_attachment_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM message_attachments;' ) } + + deletee_hash_ids = ( deleted_hash_ids - pending_upload_hash_ids ) - message_attachment_hash_ids + + deletee_hashes = set( self._GetHashes( c, deletee_hash_ids ) ) + + local_files_hashes = { hash.decode( 'hex' ) for hash in dircache.listdir( HC.CLIENT_FILES_DIR ) } + + for hash in local_files_hashes & deletee_hashes: os.remove( HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) ) + + # perceptual_hashes and thumbs + + perceptual_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM perceptual_hashes;' ) } + + hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info;' ) } + + perceptual_deletees = perceptual_hash_ids - hash_ids + + c.execute( 'DELETE FROM perceptual_hashes WHERE hash_id IN ' + HC.SplayListForDB( perceptual_deletees ) + ';' ) + + all_thumbnail_paths = dircache.listdir( HC.CLIENT_THUMBNAILS_DIR ) + + thumbnails_i_have = { path.decode( 'hex' ) for path in all_thumbnail_paths if not path.endswith( '_resized' ) } + + hashes = set( self._GetHashes( c, hash_ids ) ) + + thumbnail_deletees = thumbnails_i_have - hashes + + for hash in thumbnail_deletees: + + path = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + resized_path = path + '_resized' + + os.remove( path ) + + if os.path.exists( resized_path ): os.remove( resized_path ) + + + c.execute( 'REPLACE INTO shutdown_timestamps ( shutdown_type, timestamp ) VALUES ( ?, ? );', ( CC.SHUTDOWN_TIMESTAMP_DELETE_ORPHANS, int( time.time() ) ) ) + + + def _DeletePending( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + if service_identifier.GetType() == HC.TAG_REPOSITORY: + + c.execute( 'DELETE FROM pending_mappings WHERE service_id = ?;', ( service_id, ) ) + + c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = ?;', ( service_id, ) ) + c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL;' ) + + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ?;', ( service_id, ) ) + + self._RecalcActivePendingMappings( c ) + + elif service_identifier.GetType() == HC.FILE_REPOSITORY: + + c.execute( 'DELETE FROM file_transfers WHERE service_id_to = ?;', ( service_id, ) ) + c.execute( 'DELETE FROM file_petitions WHERE service_id = ?;', ( service_id, ) ) + + + self.pub( 'notify_new_pending' ) + self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_DELETE_PENDING, service_identifier ) ) + + + def _DoFileQuery( self, c, query_key, search_context ): + + # setting up + + system_predicates = search_context.GetSystemPredicates() + + file_service_identifier = search_context.GetFileServiceIdentifier() + tag_service_identifier = search_context.GetTagServiceIdentifier() + + file_service_id = self._GetServiceId( c, file_service_identifier ) + tag_service_id = self._GetServiceId( c, tag_service_identifier ) + + file_service_type = file_service_identifier.GetType() + tag_service_type = tag_service_identifier.GetType() + + tags_to_include = search_context.GetTagsToInclude() + tags_to_exclude = search_context.GetTagsToExclude() + + include_current_tags = search_context.IncludeCurrentTags() + include_pending_tags = search_context.IncludePendingTags() + + sql_predicates = [ 'service_id = ' + str( file_service_id ) ] + + ( hash, min_size, size, max_size, mimes, min_timestamp, max_timestamp, min_width, width, max_width, min_height, height, max_height, min_duration, duration, max_duration ) = system_predicates.GetInfo() + + if min_size is not None: sql_predicates.append( 'size > ' + str( min_size ) ) + if size is not None: sql_predicates.append( 'size = ' + str( size ) ) + if max_size is not None: sql_predicates.append( 'size < ' + str( max_size ) ) + + if mimes is not None: + + if len( mimes ) == 1: + + ( mime, ) = mimes + + sql_predicates.append( 'mime = ' + str( mime ) ) + + else: sql_predicates.append( 'mime IN ' + HC.SplayListForDB( mimes ) ) + + + if min_timestamp is not None: sql_predicates.append( 'timestamp >= ' + str( min_timestamp ) ) + if max_timestamp is not None: sql_predicates.append( 'timestamp <= ' + str( max_timestamp ) ) + + if min_width is not None: sql_predicates.append( 'width > ' + str( min_width ) ) + if width is not None: sql_predicates.append( 'width = ' + str( width ) ) + if max_width is not None: sql_predicates.append( 'width < ' + str( max_width ) ) + + if min_height is not None: sql_predicates.append( 'height > ' + str( min_height ) ) + if height is not None: sql_predicates.append( 'height = ' + str( height ) ) + if max_height is not None: sql_predicates.append( 'height < ' + str( max_height ) ) + + if min_duration is not None: sql_predicates.append( 'duration > ' + str( min_duration ) ) + if duration is not None: + + if duration == 0: sql_predicates.append( '( duration IS NULL OR duration = 0 )' ) + else: sql_predicates.append( 'duration = ' + str( duration ) ) + + if max_duration is not None: sql_predicates.append( 'duration < ' + str( max_duration ) ) + + if len( tags_to_include ) > 0: + + query_hash_ids = HC.IntelligentMassIntersect( ( self._GetHashIdsFromTag( c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ) for tag in tags_to_include ) ) + + if len( sql_predicates ) > 1: query_hash_ids.intersection_update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) ] ) + + else: + + if file_service_identifier != CC.NULL_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) } + elif tag_service_identifier != CC.NULL_SERVICE_IDENTIFIER: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) } + else: query_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings UNION SELECT hash_id FROM files_info;' ) } + + + ( num_tags_zero, num_tags_nonzero ) = system_predicates.GetNumTagsInfo() + + if num_tags_zero: + + zero_tag_hash_ids = set() + + if include_current_tags: + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: current_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings;' ) } + else: current_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) } + + zero_tag_hash_ids = current_zero_tag_hash_ids + + + if include_pending_tags: + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: pending_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings;' ) } + else: pending_zero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id = ?;', ( tag_service_id, ) ) } + + zero_tag_hash_ids.update( pending_zero_tag_hash_ids ) + + + query_hash_ids.difference_update( zero_tag_hash_ids ) + + + if num_tags_nonzero: + + nonzero_tag_hash_ids = set() + + if include_current_tags: + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: current_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings;' ) } + else: current_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( tag_service_id, ) ) } + + nonzero_tag_hash_ids = current_nonzero_tag_hash_ids + + + if include_pending_tags: + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: pending_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings;' ) } + else: pending_nonzero_tag_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id = ?;', ( tag_service_id, ) ) } + + nonzero_tag_hash_ids.update( pending_nonzero_tag_hash_ids ) + + + query_hash_ids.intersection_update( nonzero_tag_hash_ids ) + + + if hash is not None: + + hash_id = self._GetHashId( c, hash ) + + query_hash_ids.intersection_update( { hash_id } ) + + + exclude_query_hash_ids = HC.IntelligentMassUnion( [ self._GetHashIdsFromTag( c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ) for tag in tags_to_exclude ] ) + + if file_service_type == HC.FILE_REPOSITORY and self._options[ 'exclude_deleted_files' ]: exclude_query_hash_ids.update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_files WHERE service_id = ?;', ( self._local_file_service_id, ) ) ] ) + + query_hash_ids.difference_update( exclude_query_hash_ids ) + + for name_to_exclude in system_predicates.GetFileRepositoryNamesToExclude(): + + ( service_id, ) = c.execute( 'SELECT service_id FROM services WHERE type = ? AND name = ?;', ( HC.FILE_REPOSITORY, name_to_exclude ) ).fetchone() + + query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] ) + + + for ( service_name, operator, value ) in system_predicates.GetRatingsPredicates(): + + service_id = self._GetServiceId( c, service_name ) + + if value == 'rated': query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ?;', ( service_id, ) ) ] ) + elif value == 'not rated': query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ?;', ( service_id, ) ) ] ) + elif value == 'uncertain': query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM ratings_filter WHERE service_id = ?;', ( service_id, ) ) ] ) + else: + + if operator == u'\u2248': predicate = str( value * 0.95 ) + ' < rating AND rating < ' + str( value * 1.05 ) + else: predicate = 'rating ' + operator + ' ' + str( value ) + + query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM local_ratings WHERE service_id = ? AND ' + predicate + ';', ( service_id, ) ) ] ) + + + + media_results = self._GetMediaResults( c, search_context, query_hash_ids ) + + self.pub( 'file_query_done', query_key, media_results ) + + + def _FattenAutocompleteCache( self, c ): + + tag_service_identifiers = self._GetServiceIdentifiers( c, ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ) ) + file_service_identifiers = self._GetServiceIdentifiers( c, ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ) ) + + tag_service_identifiers.add( CC.NULL_SERVICE_IDENTIFIER ) + file_service_identifiers.add( CC.NULL_SERVICE_IDENTIFIER ) + + for ( tag_service_identifier, file_service_identifier ) in itertools.product( tag_service_identifiers, file_service_identifiers ): self._GetAutocompleteTags( c, tag_service_identifier = tag_service_identifier, file_service_identifier = file_service_identifier ) + + c.execute( 'REPLACE INTO shutdown_timestamps ( shutdown_type, timestamp ) VALUES ( ?, ? );', ( CC.SHUTDOWN_TIMESTAMP_FATTEN_AC_CACHE, int( time.time() ) ) ) + + + def _Get4chanPass( self, c ): + + result = c.execute( 'SELECT token, pin, timeout FROM fourchan_pass;' ).fetchone() + + if result is None: return ( '', '', 0 ) + else: return result + + + def _GetAllDownloads( self, c ): + + id_result = HC.BuildKeyToListDict( c.execute( 'SELECT service_id_from, hash_id FROM file_transfers WHERE service_id_to = ?;', ( self._local_file_service_id, ) ) ) + + all_downloads = { self._GetServiceIdentifier( c, service_id ) : self._GetHashes( c, hash_ids ) for ( service_id, hash_ids ) in id_result.items() } + + return all_downloads + + + def _GetAutocompleteTags( self, c, tag_service_identifier = CC.NULL_SERVICE_IDENTIFIER, file_service_identifier = CC.NULL_SERVICE_IDENTIFIER, half_complete_tag = '', include_current = True, include_pending = True ): + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + tag_service_id = None + + if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + file_service_id = None + + current_tables_phrase = 'active_mappings WHERE ' + pending_tables_phrase = 'active_pending_mappings WHERE ' + + else: + + file_service_id = self._GetServiceId( c, file_service_identifier ) + + current_tables_phrase = 'active_mappings, files_info USING ( hash_id ) WHERE service_id = ' + str( file_service_id ) + ' AND ' + pending_tables_phrase = 'active_pending_mappings, files_info USING ( hash_id ) WHERE service_id = ' + str( file_service_id ) + ' AND ' + + + else: + + tag_service_id = self._GetServiceId( c, tag_service_identifier ) + + if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + file_service_id = None + + current_tables_phrase = 'mappings WHERE service_id = ' + str( tag_service_id ) + ' AND ' + pending_tables_phrase = 'pending_mappings WHERE service_id = ' + str( tag_service_id ) + ' AND ' + + else: + + file_service_id = self._GetServiceId( c, file_service_identifier ) + + current_tables_phrase = 'mappings, files_info USING ( hash_id ) WHERE mappings.service_id = ' + str( tag_service_id ) + ' AND files_info.service_id = ' + str( file_service_id ) + ' AND ' + pending_tables_phrase = 'pending_mappings, files_info USING ( hash_id ) WHERE pending_mappings.service_id = ' + str( tag_service_id ) + ' AND files_info.service_id = ' + str( file_service_id ) + ' AND ' + + + + if tag_service_id is None: autocomplete_services_predicates_phrase = 'tag_service_id IS NULL AND ' + else: autocomplete_services_predicates_phrase = 'tag_service_id = ' + str( tag_service_id ) + ' AND ' + + if file_service_id is None: autocomplete_services_predicates_phrase += 'file_service_id IS NULL AND' + else: autocomplete_services_predicates_phrase += 'file_service_id = ' + str( file_service_id ) + ' AND' + + # precache search + + there_was_a_namespace = False + + if len( half_complete_tag ) > 0: + + if ':' in half_complete_tag: + + there_was_a_namespace = True + + ( namespace, half_complete_tag ) = half_complete_tag.split( ':', 1 ) + + if half_complete_tag == '': return CC.AutocompleteMatchesCounted( {} ) + else: + + result = c.execute( 'SELECT namespace_id FROM namespaces WHERE namespace = ?;', ( namespace, ) ).fetchone() + + if result is None: return CC.AutocompleteMatchesCounted( {} ) + else: + + ( namespace_id, ) = result + + possible_tag_ids = [ tag_id for ( tag_id, ) in c.execute( 'SELECT tag_id FROM tags WHERE tag LIKE ?;', ( half_complete_tag + '%', ) ) ] + + predicates_phrase = 'namespace_id = ' + str( namespace_id ) + ' AND tag_id IN ' + HC.SplayListForDB( possible_tag_ids ) + + + + else: + + possible_tag_ids = [ tag_id for ( tag_id, ) in c.execute( 'SELECT tag_id FROM tags WHERE tag LIKE ?;', ( half_complete_tag + '%', ) ) ] + + predicates_phrase = 'tag_id IN ' + HC.SplayListForDB( possible_tag_ids ) + + + else: + + predicates_phrase = '1 = 1' + + + results = { result for result in c.execute( 'SELECT namespace_id, tag_id FROM existing_tags WHERE ' + predicates_phrase + ';' ) } + + # fetch what we can from cache + + cache_results = [] + + if len( half_complete_tag ) > 0: + + for ( namespace_id, tag_ids ) in HC.BuildKeyToListDict( results ).items(): cache_results.extend( c.execute( 'SELECT namespace_id, tag_id, current_count, pending_count FROM autocomplete_tags_cache WHERE ' + autocomplete_services_predicates_phrase + ' namespace_id = ? AND tag_id IN ' + HC.SplayListForDB( tag_ids ) + ';', ( namespace_id, ) ).fetchall() ) + + else: + + cache_results = c.execute( 'SELECT namespace_id, tag_id, current_count, pending_count FROM autocomplete_tags_cache WHERE ' + autocomplete_services_predicates_phrase + ' 1=1;' ).fetchall() + + + results_hit = { ( namespace_id, tag_id ) for ( namespace_id, tag_id, current_count, pending_count ) in cache_results } + + results_missed = results.difference( results_hit ) + + zero = lambda: 0 + + for ( namespace_id, tag_ids ) in HC.BuildKeyToListDict( results_missed ).items(): + + current_counts = collections.defaultdict( zero ) + pending_counts = collections.defaultdict( zero ) + + current_counts.update( { tag_id : count for ( tag_id, count ) in c.execute( 'SELECT tag_id, COUNT( * ) FROM ' + current_tables_phrase + 'namespace_id = ? AND tag_id IN ' + HC.SplayListForDB( tag_ids ) + ' GROUP BY tag_id;', ( namespace_id, ) ) } ) + pending_counts.update( { tag_id : count for ( tag_id, count ) in c.execute( 'SELECT tag_id, COUNT( * ) FROM ' + pending_tables_phrase + 'namespace_id = ? AND tag_id IN ' + HC.SplayListForDB( tag_ids ) + ' GROUP BY tag_id;', ( namespace_id, ) ) } ) + + c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', [ ( file_service_id, tag_service_id, namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ] ) + + cache_results.extend( [ ( namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ] ) + + + results = [] + + if include_current: results += [ ( namespace_id, tag_id, current_count ) for ( namespace_id, tag_id, current_count, pending_count ) in cache_results ] + if include_pending: results += [ ( namespace_id, tag_id, pending_count ) for ( namespace_id, tag_id, current_count, pending_count ) in cache_results ] + + tags_to_count = collections.Counter() + + [ tags_to_count.update( { ( namespace_id, tag_id ) : num_tags } ) for ( namespace_id, tag_id, num_tags ) in results ] + + if not there_was_a_namespace: + + unnamespaced_tag_ids = { tag_id for ( namespace_id, tag_id, num_tags ) in results if namespace_id == 1 } + + [ tags_to_count.update( { ( 1, tag_id ) : num_tags } ) for ( namespace_id, tag_id, num_tags ) in results if namespace_id != 1 and tag_id in unnamespaced_tag_ids ] + + + matches = CC.AutocompleteMatchesCounted( { self._GetNamespaceTag( c, namespace_id, tag_id ) : num_tags for ( ( namespace_id, tag_id ), num_tags ) in tags_to_count.items() if num_tags > 0 } ) + + return matches + + + def _GetHashIdsFromTag( self, c, file_service_identifier, tag_service_identifier, tag, include_current_tags, include_pending_tags ): + + hash_ids = set() + + if file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + current_tables_phrase = 'active_mappings' + pending_tables_phrase = 'active_pending_mappings' + + current_predicates_phrase = '' + pending_predicates_phrase = '' + + else: + + tag_service_id = self._GetServiceId( c, tag_service_identifier ) + + current_tables_phrase = 'mappings' + pending_tables_phrase = 'pending_mappings' + + current_predicates_phrase = 'service_id = ' + str( tag_service_id ) + ' AND ' + pending_predicates_phrase = 'service_id = ' + str( tag_service_id ) + ' AND ' + + + else: + + file_service_id = self._GetServiceId( c, file_service_identifier ) + + if tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + current_tables_phrase = '( active_mappings, files_info USING ( hash_id ) )' + pending_tables_phrase = '( active_pending_mappings, files_info USING ( hash_id ) )' + + current_predicates_phrase = 'service_id = ' + str( file_service_id ) + ' AND ' + pending_predicates_phrase = 'service_id = ' + str( file_service_id ) + ' AND ' + + else: + + tag_service_id = self._GetServiceId( c, tag_service_identifier ) + + # we have to do a crazy join because of the nested joins, which wipe out table-namespaced identifiers like mappings.service_id, replacing them with useless stuff like service_id:1 + + current_tables_phrase = '( mappings, files_info ON ( mappings.hash_id = files_info.hash_id AND mappings.service_id = ' + str( tag_service_id ) + ' AND files_info.service_id = ' + str( file_service_id ) + ' ) )' + pending_tables_phrase = '( pending_mappings, files_info ON ( pending_mappings.hash_id = files_info.hash_id AND pending_mappings.service_id = ' + str( tag_service_id ) + ' AND files_info.service_id = ' + str( file_service_id ) + ' ) )' + + current_predicates_phrase = '' + pending_predicates_phrase = '' + + + + + if ':' in tag: + + ( namespace, tag ) = tag.split( ':', 1 ) + + if include_current_tags: hash_ids.update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM namespaces, ( tags, ' + current_tables_phrase + ' USING ( tag_id ) ) USING ( namespace_id ) WHERE ' + current_predicates_phrase + 'namespace = ? AND tag = ?;', ( namespace, tag ) ) ] ) + if include_pending_tags: hash_ids.update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM namespaces, ( tags, ' + pending_tables_phrase + ' USING ( tag_id ) ) USING ( namespace_id ) WHERE ' + pending_predicates_phrase + 'namespace = ? AND tag = ?;', ( namespace, tag ) ) ] ) + + else: + + if include_current_tags: hash_ids.update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM tags, ' + current_tables_phrase + ' USING ( tag_id ) WHERE ' + current_predicates_phrase + 'tag = ?;', ( tag, ) ) ] ) + if include_pending_tags: hash_ids.update( [ id for ( id, ) in c.execute( 'SELECT hash_id FROM tags, ' + pending_tables_phrase + ' USING ( tag_id ) WHERE ' + pending_predicates_phrase + 'tag = ?;', ( tag, ) ) ] ) + + + return hash_ids + + + def _GetHashesNamespaceIdsTagIds( self, c, hash_ids, mapping_type = 'regular' ): + + shared_namespace_ids_tag_ids = None + + for hash_id in hash_ids: + + if mapping_type == 'regular': namespace_ids_tag_ids = c.execute( 'SELECT namespace_id, tag_id FROM public_mappings WHERE hash_id = ?;', ( hash_id, ) ).fetchall() + elif mapping_type == 'deleted': namespace_ids_tag_ids = c.execute( 'SELECT namespace_id, tag_id FROM deleted_public_mappings WHERE hash_id = ?;', ( hash_id, ) ).fetchall() + elif mapping_type == 'pending': namespace_ids_tag_ids = c.execute( 'SELECT namespace_id, tag_id FROM pending_public_mappings WHERE hash_id = ?;', ( hash_id, ) ).fetchall() + elif mapping_type == 'petitioned': namespace_ids_tag_ids = c.execute( 'SELECT namespace_id, tag_id FROM pending_public_mapping_petitions WHERE hash_id = ?;', ( hash_id, ) ).fetchall() + + if shared_namespace_ids_tag_ids is None: shared_namespace_ids_tag_ids = set( namespace_ids_tag_ids ) + else: shared_namespace_ids_tag_ids.intersection_update( namespace_ids_tag_ids ) + + if len( shared_namespace_ids_tag_ids ) == 0: break + + + if shared_namespace_ids_tag_ids is None: return set() + + return shared_namespace_ids_tag_ids + + + def _GetMD5Status( self, c, md5 ): + + result = c.execute( 'SELECT hash_id FROM local_hashes WHERE md5 = ?;', ( sqlite3.Binary( md5 ), ) ).fetchone() + + if result is not None: + + ( hash_id, ) = result + + if self._options[ 'exclude_deleted_files' ]: + + result = c.execute( 'SELECT 1 FROM deleted_files WHERE hash_id = ?;', ( hash_id, ) ).fetchone() + + if result is not None: return ( 'deleted', None ) + + + result = c.execute( 'SELECT 1 FROM files_info WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone() + + if result is not None: + + hash = self._GetHash( c, hash_id ) + + return ( 'redundant', hash ) + + + + return ( 'new', None ) + + + def _GetMediaResults( self, c, search_context, query_hash_ids ): + + file_service_identifier = search_context.GetFileServiceIdentifier() + tag_service_identifier = search_context.GetTagServiceIdentifier() + + service_id = self._GetServiceId( c, file_service_identifier ) + + system_predicates = search_context.GetSystemPredicates() + + limit = system_predicates.GetLimit() + + inbox_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM file_inbox;' ) } + + # get basic results + + must_be_local = system_predicates.MustBeLocal() or system_predicates.MustBeArchive() + must_not_be_local = system_predicates.MustNotBeLocal() + must_be_inbox = system_predicates.MustBeInbox() + must_be_archive = system_predicates.MustBeArchive() + + if must_be_local or must_not_be_local: + + if service_id == self._local_file_service_id: + + if must_not_be_local: query_hash_ids = set() + + else: + + local_hash_ids = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( self._local_file_service_id, ) ) ] + + if must_be_local: query_hash_ids.intersection_update( local_hash_ids ) + else: query_hash_ids.difference_update( local_hash_ids ) + + + + if must_be_inbox: query_hash_ids.intersection_update( inbox_hash_ids ) + elif must_be_archive: query_hash_ids.difference_update( inbox_hash_ids ) + + # similar to + + if system_predicates.HasSimilarTo(): + + ( hash, max_hamming ) = system_predicates.GetSimilarTo() + + hash_id = self._GetHashId( c, hash ) + + result = c.execute( 'SELECT phash FROM perceptual_hashes WHERE hash_id = ?;', ( hash_id, ) ).fetchone() + + if result is not None: + + ( phash, ) = result + + similar_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM perceptual_hashes WHERE hydrus_hamming( phash, ? ) <= ?;', ( sqlite3.Binary( phash ), max_hamming ) ) ] + + query_hash_ids.intersection_update( similar_hash_ids ) + + + + # get first detailed results + + # since I've changed to new search model, this bit needs working over, I think? + # null_service_identifier + if file_service_identifier.GetType() not in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ): + + all_services_results = c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( query_hash_ids ) + ';' ).fetchall() + + hash_ids_i_have_info_for = set() + + results = [] + + for result in all_services_results: + + hash_id = result[0] + + if hash_id not in hash_ids_i_have_info_for: + + hash_ids_i_have_info_for.add( hash_id ) + + results.append( result ) + + + + results.extend( [ ( hash_id, None, HC.APPLICATION_UNKNOWN, None, None, None, None, None, None ) for hash_id in query_hash_ids - hash_ids_i_have_info_for ] ) + + else: results = c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( query_hash_ids ) + ';', ( service_id, ) ).fetchall() + + # filtering basic results + + if system_predicates.CanPreFirstRoundLimit(): + + if len( results ) > limit: results = random.sample( results, limit ) + + else: + + results = [ ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) for ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in results if system_predicates.OkFirstRound( width, height ) ] + + if system_predicates.CanPreSecondRoundLimit(): + + if len( results ) > limit: results = random.sample( results, system_predicates.GetLimit() ) + + + + # get tagged results + + hash_ids = [ result[0] for result in results ] + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + hash_ids_to_hashes = self._GetHashIdsToHashes( c, hash_ids ) + + hash_ids_to_current_tags = HC.BuildKeyToListDict( [ ( hash_id, ( service_id, namespace + ':' + tag ) ) if namespace != '' else ( hash_id, ( service_id, tag ) ) for ( hash_id, service_id, namespace, tag ) in c.execute( 'SELECT hash_id, service_id, namespace, tag FROM namespaces, ( tags, mappings USING ( tag_id ) ) USING ( namespace_id ) WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] ) + + hash_ids_to_deleted_tags = HC.BuildKeyToListDict( [ ( hash_id, ( service_id, namespace + ':' + tag ) ) if namespace != '' else ( hash_id, ( service_id, tag ) ) for ( hash_id, service_id, namespace, tag ) in c.execute( 'SELECT hash_id, service_id, namespace, tag FROM namespaces, ( tags, deleted_mappings USING ( tag_id ) ) USING ( namespace_id ) WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] ) + + hash_ids_to_pending_tags = HC.BuildKeyToListDict( [ ( hash_id, ( service_id, namespace + ':' + tag ) ) if namespace != '' else ( hash_id, ( service_id, tag ) ) for ( hash_id, service_id, namespace, tag ) in c.execute( 'SELECT hash_id, service_id, namespace, tag FROM namespaces, ( tags, pending_mappings USING ( tag_id ) ) USING ( namespace_id ) WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] ) + + hash_ids_to_petitioned_tags = HC.BuildKeyToListDict( [ ( hash_id, ( service_id, namespace + ':' + tag ) ) if namespace != '' else ( hash_id, ( service_id, tag ) ) for ( hash_id, service_id, namespace, tag ) in c.execute( 'SELECT hash_id, service_id, namespace, tag FROM namespaces, ( tags, mapping_petitions USING ( tag_id ) ) USING ( namespace_id ) WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] ) + + hash_ids_to_current_file_service_ids = HC.BuildKeyToListDict( c.execute( 'SELECT hash_id, service_id FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ';' ) ) + + hash_ids_to_deleted_file_service_ids = HC.BuildKeyToListDict( c.execute( 'SELECT hash_id, service_id FROM deleted_files WHERE hash_id IN ' + splayed_hash_ids + ';' ) ) + + hash_ids_to_pending_file_service_ids = HC.BuildKeyToListDict( c.execute( 'SELECT hash_id, service_id_to FROM file_transfers WHERE hash_id IN ' + splayed_hash_ids + ';' ) ) + + hash_ids_to_petitioned_file_service_ids = HC.BuildKeyToListDict( c.execute( 'SELECT hash_id, service_id FROM file_petitions WHERE hash_id IN ' + splayed_hash_ids + ';' ) ) + + hash_ids_to_local_ratings = HC.BuildKeyToListDict( [ ( hash_id, ( service_id, rating ) ) for ( service_id, hash_id, rating ) in c.execute( 'SELECT service_id, hash_id, rating FROM local_ratings WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] ) + + # do current and pending remote ratings here + + service_ids_to_service_identifiers = { service_id : HC.ClientServiceIdentifier( service_key, service_type, name ) for ( service_id, service_key, service_type, name ) in c.execute( 'SELECT service_id, service_key, type, name FROM services;' ) } + + # build it + + limit = system_predicates.GetLimit() + + include_current_tags = search_context.IncludeCurrentTags() + include_pending_tags = search_context.IncludePendingTags() + + media_results = [] + + random.shuffle( results ) # important for system:limit + + for ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in results: + + if limit is not None and len( media_results ) >= limit: break + + hash = hash_ids_to_hashes[ hash_id ] + + if hash_id in hash_ids_to_current_tags: current_tags_dict = HC.BuildKeyToListDict( hash_ids_to_current_tags[ hash_id ] ) + else: current_tags_dict = {} + + if hash_id in hash_ids_to_deleted_tags: deleted_tags_dict = HC.BuildKeyToListDict( hash_ids_to_deleted_tags[ hash_id ] ) + else: deleted_tags_dict = {} + + if hash_id in hash_ids_to_pending_tags: pending_tags_dict = HC.BuildKeyToListDict( hash_ids_to_pending_tags[ hash_id ] ) + else: pending_tags_dict = {} + + if hash_id in hash_ids_to_petitioned_tags: petitioned_tags_dict = HC.BuildKeyToListDict( hash_ids_to_petitioned_tags[ hash_id ] ) + else: petitioned_tags_dict = {} + + tag_service_ids = { service_id for ( service_id, tags ) in current_tags_dict.items() + deleted_tags_dict.items() + pending_tags_dict.items() + petitioned_tags_dict.items() } + + service_identifiers_to_cdpp = {} + + for tag_service_id in tag_service_ids: + + if tag_service_id in current_tags_dict: current_tags = set( current_tags_dict[ tag_service_id ] ) + else: current_tags = set() + + if tag_service_id in deleted_tags_dict: deleted_tags = set( deleted_tags_dict[ tag_service_id ] ) + else: deleted_tags = set() + + if tag_service_id in pending_tags_dict: pending_tags = set( pending_tags_dict[ tag_service_id ] ) + else: pending_tags = set() + + if tag_service_id in petitioned_tags_dict: petitioned_tags = set( petitioned_tags_dict[ tag_service_id ] ) + else: petitioned_tags = set() + + tag_s_i = service_ids_to_service_identifiers[ tag_service_id ] + + service_identifiers_to_cdpp[ tag_s_i ] = ( current_tags, deleted_tags, pending_tags, petitioned_tags ) + + + tags_cdpp = CC.CDPPTagServiceIdentifiers( self._tag_service_precedence, service_identifiers_to_cdpp ) + + if not system_predicates.OkSecondRound( tags_cdpp.GetNumTags( tag_service_identifier, include_current_tags = include_current_tags, include_pending_tags = include_pending_tags ) ): continue + + inbox = hash_id in inbox_hash_ids + + if hash_id in hash_ids_to_current_file_service_ids: current_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_current_file_service_ids[ hash_id ] } + else: current_file_service_identifiers = set() + + if hash_id in hash_ids_to_deleted_file_service_ids: deleted_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_deleted_file_service_ids[ hash_id ] } + else: deleted_file_service_identifiers = set() + + if hash_id in hash_ids_to_pending_file_service_ids: pending_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_pending_file_service_ids[ hash_id ] } + else: pending_file_service_identifiers = set() + + if hash_id in hash_ids_to_petitioned_file_service_ids: petitioned_file_service_identifiers = { service_ids_to_service_identifiers[ service_id ] for service_id in hash_ids_to_petitioned_file_service_ids[ hash_id ] } + else: petitioned_file_service_identifiers = set() + + file_service_identifiers_cdpp = CC.CDPPFileServiceIdentifiers( current_file_service_identifiers, deleted_file_service_identifiers, pending_file_service_identifiers, petitioned_file_service_identifiers ) + + if hash_id in hash_ids_to_local_ratings: local_ratings = { service_ids_to_service_identifiers[ service_id ] : rating for ( service_id, rating ) in hash_ids_to_local_ratings[ hash_id ] } + else: local_ratings = {} + + local_ratings = CC.LocalRatings( local_ratings ) + remote_ratings = {} + + media_results.append( CC.MediaResult( ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_cdpp, file_service_identifiers_cdpp, local_ratings, remote_ratings ) ) ) + + + return CC.FileQueryResult( file_service_identifier, search_context.GetRawPredicates(), media_results ) + + + def _GetMediaResultsFromHashes( self, c, search_context, hashes ): + + query_hash_ids = set( self._GetHashIds( c, hashes ) ) + + return self._GetMediaResults( c, search_context, query_hash_ids ) + + + def _GetMime( self, c, service_id, hash_id ): + + result = c.execute( 'SELECT mime FROM files_info USING ( hash_id ) WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() + + if result is None: raise HC.NotFoundException( 'Could not find that file\'s mime!' ) + + ( mime, ) = result + + return mime + + + def _GetNews( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + news = c.execute( 'SELECT post, timestamp FROM news WHERE service_id = ?;', ( service_id, ) ).fetchall() + + return news + + + def _GetNumsPending( self, c ): + + tag_pending_1 = c.execute( 'SELECT service_id, COUNT( * ) FROM pending_mappings GROUP BY service_id;' ).fetchall() + + tag_pending_2 = c.execute( 'SELECT service_id, COUNT( * ) FROM mapping_petitions GROUP BY service_id;' ).fetchall() + + file_pending_1 = c.execute( 'SELECT service_id_to, COUNT( * ) FROM file_transfers WHERE service_id_to != ? GROUP BY service_id_to;', ( self._local_file_service_id, ) ).fetchall() + + file_pending_2 = c.execute( 'SELECT service_id, COUNT( * ) FROM file_petitions GROUP BY service_id;' ).fetchall() + + pendings_dict = {} + + for ( service_id, count ) in tag_pending_1 + tag_pending_2 + file_pending_1 + file_pending_2: + + if service_id in pendings_dict: pendings_dict[ service_id ] += count + else: pendings_dict[ service_id ] = count + + + pendings = { self._GetServiceIdentifier( c, service_id ) : count for ( service_id, count ) in pendings_dict.items() } + + return pendings + + + def _GetPending( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + if service_identifier.GetType() == HC.TAG_REPOSITORY: + + uploads = [ ( namespace + ':' + tag, hash ) if namespace != '' else ( tag, hash ) for ( namespace, tag, hash ) in c.execute( 'SELECT namespace, tag, hash FROM hashes, ( tags, ( namespaces, pending_mappings USING ( namespace_id ) ) USING ( tag_id ) ) USING ( hash_id ) WHERE service_id = ? ORDER BY namespace DESC, tag ASC;', ( service_id, ) ) ] + petitions = [ ( namespace + ':' + tag, hash, reason ) if namespace != '' else ( tag, hash, reason ) for ( namespace, tag, hash, reason ) in c.execute( 'SELECT namespace, tag, hash, reason FROM hashes, ( reasons, ( tags, ( namespaces, mapping_petitions USING ( namespace_id ) ) USING ( tag_id ) ) USING ( reason_id ) ) USING ( hash_id ) WHERE service_id = ? ORDER BY namespace DESC, tag ASC;', ( service_id, ) ) ] + + elif service_identifier.GetType() == HC.FILE_REPOSITORY: + + uploads = [ hash for ( hash, ) in c.execute( 'SELECT hash FROM hashes, file_transfers USING ( hash_id ) WHERE service_id_to = ?;', ( service_id, ) ) ] + petitions = c.execute( 'SELECT hash, reason FROM reasons, ( hashes, file_petitions USING ( hash_id ) ) USING ( reason_id ) WHERE service_id = ?;', ( service_id, ) ).fetchall() + + + return ( uploads, petitions ) + + + def _GetReason( self, c, reason_id ): + + result = c.execute( 'SELECT reason FROM reasons WHERE reason_id = ?;', ( reason_id, ) ).fetchone() + + if result is None: raise Exception( 'Reason error in database' ) + + ( reason, ) = result + + return reason + + + def _GetReasonId( self, c, reason ): + + result = c.execute( 'SELECT reason_id FROM reasons WHERE reason=?;', ( reason, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO reasons ( reason ) VALUES ( ? );', ( reason, ) ) + + reason_id = c.lastrowid + + else: ( reason_id, ) = result + + return reason_id + + + def _GetResolution( self, c, hash ): return c.execute( 'SELECT width, height FROM files_info, hashes USING ( hash_id ) WHERE service_id = ? AND hash = ?;', ( self._local_file_service_id, sqlite3.Binary( hash ) ) ).fetchone() + + def _GetService( self, c, parameter ): + + try: + + if type( parameter ) == int: service_id = parameter + elif type( parameter ) == HC.ClientServiceIdentifier: service_id = self._GetServiceId( c, parameter ) + elif type( parameter ) == ClientConstantsMessages.Contact: + + contact_id = self._GetContactId( c, parameter ) + + ( service_id, ) = c.execute( 'SELECT service_id FROM message_depots WHERE contact_id = ?;', ( contact_id, ) ).fetchone() + + + except: raise Exception( 'Service error in database.' ) + + result = c.execute( 'SELECT service_key, type, name FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if result is None: raise Exception( 'Service error in database.' ) + + ( service_key, service_type, name ) = result + + service_identifier = HC.ClientServiceIdentifier( service_key, service_type, name ) + + if service_type in HC.REPOSITORIES: + + ( host, port, last_error, access_key, account, first_begin, next_begin ) = c.execute( 'SELECT host, port, last_error, access_key, account, first_begin, next_begin FROM repositories, ( accounts, addresses USING ( service_id ) ) USING ( service_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone() + + credentials = CC.Credentials( host, port, access_key ) + + if service_type == HC.RATING_LIKE_REPOSITORY: + + ( like, dislike ) = c.execute( 'SELECT like, dislike FROM ratings_like WHERE service_id = ?;', ( service_id, ) ).fetchone() + + service = CC.ServiceRemoteRestrictedRepositoryRatingLike( service_identifier, credentials, last_error, account, first_begin, next_begin, like, dislike ) + + elif service_type == HC.RATING_NUMERICAL_REPOSITORY: + + ( lower, upper ) = c.execute( 'SELECT lower, upper FROM ratings_numerical WHERE service_id = ?;', ( service_id, ) ).fetchone() + + service = CC.ServiceRemoteRestrictedRepositoryRatingNumerical( service_identifier, credentials, last_error, account, first_begin, next_begin, lower, upper ) + + else: service = CC.ServiceRemoteRestrictedRepository( service_identifier, credentials, last_error, account, first_begin, next_begin ) + + elif service_type == HC.MESSAGE_DEPOT: + + ( host, port, last_error, access_key, account, contact_id, last_check, check_period, private_key, receive_anon ) = c.execute( 'SELECT host, port, last_error, access_key, account, contact_id, last_check, check_period, private_key, receive_anon FROM message_depots, ( accounts, addresses USING ( service_id ) ) USING ( service_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone() + + credentials = CC.Credentials( host, port, access_key ) + + contact = self._GetContact( c, contact_id ) + + service = CC.ServiceRemoteRestrictedDepotMessage( service_identifier, credentials, last_error, account, last_check, check_period, contact, private_key, receive_anon ) + + elif service_type in HC.RESTRICTED_SERVICES: + + ( host, port, last_error, access_key, account ) = c.execute( 'SELECT host, port, last_error, access_key, account FROM accounts, addresses USING ( service_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone() + + credentials = CC.Credentials( host, port, access_key ) + + service = CC.ServiceRemoteRestricted( service_identifier, credentials, last_error, account ) + + elif service_type in HC.REMOTE_SERVICES: + + ( host, port, last_error ) = c.execute( 'SELECT host, port, last_error FROM addresses WHERE service_id = ?;', ( service_id, ) ).fetchone() + + credentials = CC.Credentials( host, port ) + + service = CC.ServiceRemoteRestricted( service_identifier, credentials, last_error ) + + elif service_type == HC.LOCAL_RATING_LIKE: + + ( like, dislike ) = c.execute( 'SELECT like, dislike FROM ratings_like WHERE service_id = ?;', ( service_id, ) ).fetchone() + + service = CC.ServiceLocalRatingLike( service_identifier, like, dislike ) + + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + ( lower, upper ) = c.execute( 'SELECT lower, upper FROM ratings_numerical WHERE service_id = ?;', ( service_id, ) ).fetchone() + + service = CC.ServiceLocalRatingNumerical( service_identifier, lower, upper ) + + else: service = CC.Service( service_identifier ) + + return service + + + def _GetServices( self, c, limited_types = HC.ALL_SERVICES ): + + service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM services WHERE type IN ' + HC.SplayListForDB( limited_types ) + ';' ) ] + + services = [ self._GetService( c, service_id ) for service_id in service_ids ] + + return services + + + def _GetServiceId( self, c, parameter ): + + if type( parameter ) in ( str, unicode ): + + result = c.execute( 'SELECT service_id FROM services WHERE name = ?;', ( parameter, ) ).fetchone() + + if result is None: raise Exception( 'Service id error in database' ) + + ( service_id, ) = result + + elif type( parameter ) == HC.ClientServiceIdentifier: + + service_type = parameter.GetType() + + if service_type == HC.NULL_SERVICE: return None + elif service_type == HC.LOCAL_FILE: return self._local_file_service_id + else: + + service_key = parameter.GetServiceKey() + + result = c.execute( 'SELECT service_id FROM services WHERE service_key = ?;', ( sqlite3.Binary( service_key ), ) ).fetchone() + + if result is None: raise Exception( 'Service id error in database' ) + + ( service_id, ) = result + + + + return service_id + + + def _GetServiceIds( self, c, service_type ): return [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM services WHERE type = ?;', ( service_type, ) ) ] + + def _GetServiceIdentifier( self, c, service_id ): + + result = c.execute( 'SELECT service_key, type, name FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if result is None: raise Exception( 'Service type, name error in database' ) + + ( service_key, service_type, name ) = result + + return HC.ClientServiceIdentifier( service_key, service_type, name ) + + + def _GetServiceIdentifiers( self, c, limited_types = HC.ALL_SERVICES ): return { HC.ClientServiceIdentifier( service_key, service_type, name ) for ( service_key, service_type, name ) in c.execute( 'SELECT service_key, type, name FROM services WHERE type IN ' + HC.SplayListForDB( limited_types ) + ';' ) } + + def _GetServiceInfo( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + service_type = service_identifier.GetType() + + if service_type == HC.LOCAL_FILE: info_types = { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_TOTAL_SIZE, HC.SERVICE_INFO_NUM_DELETED_FILES } + elif service_type == HC.FILE_REPOSITORY: info_types = { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_TOTAL_SIZE, HC.SERVICE_INFO_NUM_DELETED_FILES, HC.SERVICE_INFO_NUM_THUMBNAILS, HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL } + elif service_type == HC.LOCAL_TAG: info_types = { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS, HC.SERVICE_INFO_NUM_MAPPINGS } + elif service_type == HC.TAG_REPOSITORY: info_types = { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS, HC.SERVICE_INFO_NUM_MAPPINGS, HC.SERVICE_INFO_NUM_DELETED_MAPPINGS } + elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): info_types = { HC.SERVICE_INFO_NUM_FILES } + else: info_types = set() + + service_info = self._GetServiceInfoSpecific( c, service_id, service_type, info_types ) + + return service_info + + + def _GetServiceInfoSpecific( self, c, service_id, service_type, info_types ): + + save_it = True + + results = { info_type : info for ( info_type, info ) in c.execute( 'SELECT info_type, info FROM service_info WHERE service_id = ? AND info_type IN ' + HC.SplayListForDB( info_types ) + ';', ( service_id, ) ) } + + if len( results ) != len( info_types ): + + info_types_hit = results.keys() + + info_types_missed = info_types.difference( info_types_hit ) + + if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + common_tag_info_types = { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS } + + if common_tag_info_types <= info_types_missed: + + ( num_files, num_namespaces, num_tags ) = c.execute( 'SELECT COUNT( DISTINCT hash_id ), COUNT( DISTINCT namespace_id ), COUNT( DISTINCT tag_id ) FROM mappings WHERE service_id = ?;', ( service_id, ) ).fetchone() + + results[ HC.SERVICE_INFO_NUM_FILES ] = num_files + results[ HC.SERVICE_INFO_NUM_NAMESPACES ] = num_namespaces + results[ HC.SERVICE_INFO_NUM_TAGS ] = num_tags + + c.execute( 'INSERT INTO service_info ( service_id, info_type, info ) VALUES ( ?, ?, ? );', ( service_id, HC.SERVICE_INFO_NUM_FILES, num_files ) ) + c.execute( 'INSERT INTO service_info ( service_id, info_type, info ) VALUES ( ?, ?, ? );', ( service_id, HC.SERVICE_INFO_NUM_NAMESPACES, num_namespaces ) ) + c.execute( 'INSERT INTO service_info ( service_id, info_type, info ) VALUES ( ?, ?, ? );', ( service_id, HC.SERVICE_INFO_NUM_TAGS, num_tags ) ) + + info_types_missed.difference_update( common_tag_info_types ) + + + + for info_type in info_types_missed: + + if service_type in ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ): + + if info_type == HC.SERVICE_INFO_NUM_FILES: result = c.execute( 'SELECT COUNT( * ) FROM files_info WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_TOTAL_SIZE: result = c.execute( 'SELECT SUM( size ) FROM files_info WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_DELETED_FILES: result = c.execute( 'SELECT COUNT( * ) FROM deleted_files WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_THUMBNAILS: result = c.execute( 'SELECT COUNT( * ) FROM files_info WHERE service_id = ? AND mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ';', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL: + + thumbnails_i_have = { path.decode( 'hex' ) for path in dircache.listdir( HC.CLIENT_THUMBNAILS_DIR ) if not path.endswith( '_resized' ) } + + hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ] + + thumbnails_i_should_have = self._GetHashes( c, hash_ids ) + + thumbnails_i_have.intersection_update( thumbnails_i_should_have ) + + result = ( len( thumbnails_i_have ), ) + + elif info_type == HC.SERVICE_INFO_NUM_INBOX: result = c.execute( 'SELECT COUNT( * ) FROM file_inbox, files_info USING ( hash_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone() + + elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + if info_type == HC.SERVICE_INFO_NUM_FILES: result = c.execute( 'SELECT COUNT( DISTINCT hash_id ) FROM mappings WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_NAMESPACES: result = c.execute( 'SELECT COUNT( DISTINCT namespace_id ) FROM mappings WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_TAGS: result = c.execute( 'SELECT COUNT( DISTINCT tag_id ) FROM mappings WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_MAPPINGS: result = c.execute( 'SELECT COUNT( * ) FROM mappings WHERE service_id = ?;', ( service_id, ) ).fetchone() + elif info_type == HC.SERVICE_INFO_NUM_DELETED_MAPPINGS: result = c.execute( 'SELECT COUNT( * ) FROM deleted_mappings WHERE service_id = ?;', ( service_id, ) ).fetchone() + + elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + if info_type == HC.SERVICE_INFO_NUM_FILES: result = c.execute( 'SELECT COUNT( * ) FROM local_ratings WHERE service_id = ?;', ( service_id, ) ).fetchone() + + elif service_type == HC.NULL_SERVICE: + + if info_type == HC.SERVICE_INFO_NUM_FILES: + + result = ( len( c.execute( 'SELECT hash_id FROM mappings UNION SELECT hash_id FROM files_info;' ).fetchall() ), ) + + save_it = False + + + + if result is None: info = 0 + else: ( info, ) = result + + if info is None: info = 0 + + if save_it: c.execute( 'INSERT INTO service_info ( service_id, info_type, info ) VALUES ( ?, ?, ? );', ( service_id, info_type, info ) ) + + results[ info_type ] = info + + + + return results + + + def _GetServiceType( self, c, service_id ): + + result = c.execute( 'SELECT type FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if result is None: raise Exception( 'Service id error in database' ) + + ( service_type, ) = result + + return service_type + + + def _GetFileSystemPredicates( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + service_type = service_identifier.GetType() + + predicates = [] + + if service_type == HC.NULL_SERVICE: + + service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_FILES } ) + + num_everything = service_info[ HC.SERVICE_INFO_NUM_FILES ] + + predicates.append( ( u'system:everything', num_everything ) ) + + predicates.extend( [ ( predicate, None ) for predicate in [ u'system:untagged', u'system:numtags', u'system:hash' ] ] ) + + # num local, would be great + # num inbox, would be great + + # we can't guarantee knowing files_info, so only have untagged and numtags + + elif service_type == HC.TAG_REPOSITORY: + + service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_FILES } ) + + num_everything = service_info[ HC.SERVICE_INFO_NUM_FILES ] + + predicates.append( ( u'system:everything', num_everything ) ) + + predicates.extend( [ ( predicate, None ) for predicate in [ u'system:untagged', u'system:numtags', u'system:hash' ] ] ) + + # num local, would be great + # num inbox, would be great + + elif service_type == HC.LOCAL_TAG: + + service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_FILES } ) + + num_everything = service_info[ HC.SERVICE_INFO_NUM_FILES ] + + predicates.append( ( u'system:everything', num_everything ) ) + + predicates.extend( [ ( predicate, None ) for predicate in [ u'system:untagged', u'system:numtags', u'system:hash' ] ] ) + + # num local, would be great + # num inbox, would be great + + elif service_type == HC.LOCAL_FILE: + + service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_INBOX } ) + + num_everything = service_info[ HC.SERVICE_INFO_NUM_FILES ] + num_inbox = service_info[ HC.SERVICE_INFO_NUM_INBOX ] + + num_archive = num_everything - num_inbox + + predicates.append( ( u'system:everything', num_everything ) ) + + if num_inbox > 0: + + predicates.append( ( u'system:inbox', num_inbox ) ) + predicates.append( ( u'system:archive', num_archive ) ) + + + predicates.extend( [ ( predicate, None ) for predicate in [ u'system:untagged', u'system:numtags', u'system:limit', u'system:size', u'system:age', u'system:hash', u'system:width', u'system:height', u'system:ratio', u'system:duration', u'system:mime', u'system:rating', u'system:similar_to' ] ] ) + + for service_identifier in self._GetServiceIdentifiers( c, ( HC.FILE_REPOSITORY, ) ): predicates.append( ( u'system:not_uploaded_to:' + service_identifier.GetName(), None ) ) + + elif service_type == HC.FILE_REPOSITORY: + + service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_INBOX } ) + + num_everything = service_info[ HC.SERVICE_INFO_NUM_FILES ] + num_inbox = service_info[ HC.SERVICE_INFO_NUM_INBOX ] + + if self._options[ 'exclude_deleted_files' ]: + + ( num_everything_deleted, ) = c.execute( 'SELECT COUNT( * ) FROM files_info, deleted_files USING ( hash_id ) WHERE files_info.service_id = ? AND deleted_files.service_id = ?;', ( service_id, self._local_file_service_id ) ).fetchone() + + num_everything -= num_everything_deleted + + + ( num_local, ) = c.execute( 'SELECT COUNT( * ) FROM files_info AS remote_files_info, files_info USING ( hash_id ) WHERE remote_files_info.service_id = ? AND files_info.service_id = ?;', ( service_id, self._local_file_service_id ) ).fetchone() + + num_not_local = num_everything - num_local + num_archive = num_local - num_inbox + + predicates.append( ( u'system:everything', num_everything ) ) + + if num_inbox > 0: + + predicates.append( ( u'system:inbox', num_inbox ) ) + predicates.append( ( u'system:archive', num_archive ) ) + + + predicates.append( ( u'system:local', num_local ) ) + predicates.append( ( u'system:not local', num_not_local ) ) + + predicates.extend( [ ( predicate, None ) for predicate in [ u'system:untagged', u'system:numtags', u'system:limit', u'system:size', u'system:age', u'system:hash', u'system:width', u'system:height', u'system:ratio', u'system:duration', u'system:mime', u'system:rating', u'system:similar_to' ] ] ) + + + return predicates + + + def _GetMessageSystemPredicates( self, c, identity ): + + name = identity.GetName() + + is_anon = name == 'Anonymous' + + additional_predicate = '' + + if name != 'Anonymous': + + service = self._GetService( c, identity ) + + if not service.ReceivesAnon(): additional_predicate = 'contact_id_from != 1 AND ' + + + contact_id = self._GetContactId( c, name ) + + unread_status_id = self._GetStatusId( c, 'sent' ) + + #service_info = self._GetServiceInfoSpecific( c, service_id, service_type, { HC.SERVICE_INFO_NUM_CONVERSATIONS, HC.SERVICE_INFO_NUM_INBOX, HC.SERVICE_INFO_NUM_UNREAD, HC.SERVICE_INFO_NUM_DRAFTS } ) + + ( num_conversations, ) = c.execute( 'SELECT COUNT( DISTINCT conversation_id ) FROM messages, message_destination_map USING ( message_id ) WHERE ' + additional_predicate + '( contact_id_from = ? OR contact_id_to = ? );', ( contact_id, contact_id ) ).fetchone() + ( num_inbox, ) = c.execute( 'SELECT COUNT( DISTINCT conversation_id ) FROM message_destination_map, ( messages, message_inbox USING ( message_id ) ) USING ( message_id ) WHERE ' + additional_predicate + 'contact_id_to = ?;', ( contact_id, ) ).fetchone() + ( num_drafts, ) = c.execute( 'SELECT COUNT( DISTINCT conversation_id ) FROM messages, message_drafts USING ( message_id ) WHERE contact_id_from = ?;', ( contact_id, ) ).fetchone() + ( num_unread, ) = c.execute( 'SELECT COUNT( DISTINCT conversation_id ) FROM messages, message_destination_map USING ( message_id ) WHERE ' + additional_predicate + 'contact_id_to = ? AND status_id = ?;', ( contact_id, unread_status_id ) ).fetchone() + + predicates = [] + + # anon has no inbox, no received mail; only sent mail + + predicates.append( ( u'system:everything', num_conversations ) ) + if not is_anon: + predicates.append( ( u'system:inbox', num_inbox ) ) + predicates.append( ( u'system:archive', num_conversations - num_inbox ) ) + predicates.append( ( u'system:unread', num_unread ) ) + predicates.append( ( u'system:drafts', num_drafts ) ) + if not is_anon: + predicates.append( ( u'system:started_by', None ) ) + predicates.append( ( u'system:from', None ) ) + predicates.append( ( u'system:to', None ) ) + predicates.append( ( u'system:age', None ) ) + predicates.append( ( u'system:numattachments', None ) ) + # we can add more later + + return predicates + + + def _GetShutdownTimestamps( self, c ): + + shutdown_timestamps = collections.defaultdict( lambda: 0 ) + + shutdown_timestamps.update( c.execute( 'SELECT shutdown_type, timestamp FROM shutdown_timestamps;' ).fetchall() ) + + return shutdown_timestamps + + + def _GetTagServicePrecedence( self, c ): + + service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence ASC;' ) ] + + # the first service_id is the most important + + return [ self._GetServiceIdentifier( c, service_id ) for service_id in service_ids ] + + + def _GetThumbnailHashesIShouldHave( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ] + + hashes = set( self._GetHashes( c, hash_ids ) ) + + return hashes + + + def _GetURLStatus( self, c, url ): + + result = c.execute( 'SELECT hash_id FROM urls WHERE url = ?;', ( url, ) ).fetchone() + + if result is not None: + + ( hash_id, ) = result + + if self._options[ 'exclude_deleted_files' ]: + + result = c.execute( 'SELECT 1 FROM deleted_files WHERE hash_id = ?;', ( hash_id, ) ).fetchone() + + if result is not None: return ( 'deleted', None ) + + + result = c.execute( 'SELECT 1 FROM files_info WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone() + + if result is not None: + + hash = self._GetHash( c, hash_id ) + + return ( 'redundant', hash ) + + + + return ( 'new', None ) + + + def _ImportFile( self, c, file, advanced_import_options = {}, service_identifiers_to_tags = {}, generate_media_result = False, override_deleted = False ): + + result = 'successful' + + can_add = True + + archive = 'auto_archive' in advanced_import_options + + exclude_deleted_files = 'exclude_deleted_files' in advanced_import_options + + file = HydrusImageHandling.ConvertToPngIfBmp( file ) + + size = len( file ) + + if size == 0: can_add = False + + if 'min_size' in advanced_import_options: + + min_size = advanced_import_options[ 'min_size' ] + + if size < min_size: raise Exception( 'File too small' ) + + + hash = hashlib.sha256( file ).digest() + + hash_id = self._GetHashId( c, hash ) + + already_in_db = c.execute( 'SELECT 1 FROM files_info WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone() is not None + + if already_in_db: + + result = 'redundant' + + if archive: + + c.execute( 'DELETE FROM file_inbox WHERE hash_id = ?;', ( hash_id, ) ) + + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, set( ( hash, ) ) ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, set( ( hash, ) ) ) ] ) + + + can_add = False + + else: + + if not override_deleted: + + if exclude_deleted_files and c.execute( 'SELECT 1 FROM deleted_files WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone() is not None: + + result = 'deleted' + + can_add = False + + + + + if can_add: + + mime = HC.GetMimeFromString( file[:256] ) + + if mime in HC.IMAGES: + + image_container = HydrusImageHandling.RenderImageFromFile( file, hash ) + + ( width, height ) = image_container.GetSize() + + image_container = HydrusImageHandling.RenderImageFromFile( file, hash ) + + ( width, height ) = image_container.GetSize() + + if image_container.IsAnimated(): + + duration = image_container.GetTotalDuration() + num_frames = image_container.GetNumFrames() + + else: + + duration = None + num_frames = None + + + num_words = None + + elif mime == HC.APPLICATION_FLASH: + + ( ( width, height ), duration, num_frames ) = HydrusFlashHandling.GetFlashProperties( file ) + + num_words = None + + elif mime == HC.VIDEO_FLV: + + ( ( width, height ), duration, num_frames ) = HydrusVideoHandling.GetFLVProperties( file ) + + num_words = None + + + if 'min_resolution' in advanced_import_options: + + ( min_x, min_y ) = advanced_import_options[ 'min_resolution' ] + + if width < min_x or height < min_y: raise Exception( 'Resolution too small' ) + + + + if can_add: + + timestamp = int( time.time() ) + + dest_path = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + if not os.path.exists( dest_path ): + + with open( dest_path, 'wb' ) as f: f.write( file ) + + + if mime in HC.MIMES_WITH_THUMBNAILS: + + thumbnail = HydrusImageHandling.GenerateThumbnailFileFromFile( file, HC.UNSCALED_THUMBNAIL_DIMENSIONS ) + + thumbnail_path_to = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( thumbnail_path_to, 'wb' ) as f: f.write( thumbnail ) + + thumbnail_resized = HydrusImageHandling.GenerateThumbnailFileFromFile( thumbnail, self._options[ 'thumbnail_dimensions' ] ) + + thumbnail_resized_path_to = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + '_resized' + + with open( thumbnail_resized_path_to, 'wb' ) as f: f.write( thumbnail_resized ) + + phash = HydrusImageHandling.GeneratePerceptualHash( thumbnail ) + + # replace is important here! + c.execute( 'INSERT OR REPLACE INTO perceptual_hashes VALUES ( ?, ? );', ( hash_id, sqlite3.Binary( phash ) ) ) + + + files_info_rows = [ ( self._local_file_service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) ] + + self._AddFiles( c, files_info_rows ) + + md5 = hashlib.md5( file ).digest() + + sha1 = hashlib.sha1( file ).digest() + + c.execute( 'INSERT OR IGNORE INTO local_hashes ( hash_id, md5, sha1 ) VALUES ( ?, ?, ? );', ( hash_id, sqlite3.Binary( md5 ), sqlite3.Binary( sha1 ) ) ) + + if not archive: self._InboxFiles( c, ( hash_id, ) ) + + + if len( service_identifiers_to_tags ) > 0 and c.execute( 'SELECT 1 FROM files_info WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone() is not None: + + for ( service_identifier, tags ) in service_identifiers_to_tags.items(): + + if service_identifier == CC.LOCAL_TAG_SERVICE_IDENTIFIER: edit_log = [ ( CC.CONTENT_UPDATE_ADD, tag ) for tag in tags ] + else: edit_log = [ ( CC.CONTENT_UPDATE_PENDING, tag ) for tag in tags ] + + content_updates = [ CC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), edit_log ) ] + + self._ProcessContentUpdates( c, content_updates ) + + + + if generate_media_result: + + if ( can_add or already_in_db ): + + search_context = CC.FileSearchContext() + + ( media_result, ) = self._GetMediaResults( c, search_context, set( ( hash_id, ) ) ) + + return ( result, hash, media_result ) + + else: return ( result, hash, None ) + + else: return ( result, hash ) + + + def _ImportFilePage( self, c, page_key, file, advanced_import_options = {}, service_identifiers_to_tags = {}, url = None ): + + try: + + ( result, hash, media_result ) = self._ImportFile( c, file, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, generate_media_result = True ) + + if url is not None: + + hash_id = self._GetHashId( c, hash ) + + c.execute( 'INSERT OR IGNORE INTO urls ( url, hash_id ) VALUES ( ?, ? );', ( url, hash_id ) ) + + + if media_result is not None: self.pub( 'add_media_result', page_key, media_result ) + + if result == 'successful': + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] ) + + self.pub( 'import_done', page_key, result ) + + except Exception as e: + + HC.pubsub.pub( 'import_done', page_key, 'failed', exception = e ) + + raise + + + + def _InboxFiles( self, c, hash_ids ): + + c.executemany( 'INSERT OR IGNORE INTO file_inbox VALUES ( ? );', [ ( hash_id, ) for hash_id in hash_ids ] ) + + num_added = c.rowcount + + if num_added > 0: + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + updates = c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id;' ).fetchall() + + c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', [ ( count, service_id, HC.SERVICE_INFO_NUM_INBOX ) for ( service_id, count ) in updates ] ) + + + + def _PetitionFiles( self, c, service_identifier, hashes, reason ): + + service_id = self._GetServiceId( c, service_identifier ) + + hash_ids = self._GetHashIds( c, hashes ) + + reason_id = self._GetReasonId( c, reason ) + + c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) + + c.executemany( 'INSERT OR IGNORE INTO file_petitions ( service_id, hash_id, reason_id ) VALUES ( ?, ?, ? );', [ ( service_id, hash_id, reason_id ) for hash_id in hash_ids ] ) + + self.pub( 'notify_new_pending' ) + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_PETITION, service_identifier, hashes ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_PETITION, service_identifier, hashes ) ] ) + + + def _ProcessContentUpdates( self, c, content_updates ): + + for content_update in content_updates: + + service_identifier = content_update.GetServiceIdentifier() + + service_type = service_identifier.GetType() + + service_id = self._GetServiceId( c, service_identifier ) + + action = content_update.GetAction() + + if service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ): + + hashes = content_update.GetHashes() + + hash_ids = self._GetHashIds( c, hashes ) + + if action == CC.CONTENT_UPDATE_ARCHIVE: self._ArchiveFiles( c, hash_ids ) + elif action == CC.CONTENT_UPDATE_DELETE: self._DeleteFiles( c, service_id, hash_ids ) + + elif service_type == HC.LOCAL_TAG: + + hashes = content_update.GetHashes() + + hash_ids = self._GetHashIds( c, hashes ) + + info = content_update.GetInfo() + + if action == CC.CONTENT_UPDATE_EDIT_LOG: + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + hash_ids_set = set( hash_ids ) + + edit_log = info + + mappings_ids = [] + deleted_mappings_ids = [] + + for ( action, info ) in edit_log: + + if action == CC.CONTENT_UPDATE_ADD: + + tag = info + + if tag == '': continue + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + mappings_ids.append( ( namespace_id, tag_id, hash_ids ) ) + + elif action == CC.CONTENT_UPDATE_DELETE: + + tag = info + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + deleted_mappings_ids.append( ( namespace_id, tag_id, hash_ids ) ) + + + + self._UpdateMappings( c, service_id, mappings_ids, deleted_mappings_ids ) + + self.pub( 'notify_new_pending' ) + + + elif service_type == HC.TAG_REPOSITORY: + + hashes = content_update.GetHashes() + + hash_ids = self._GetHashIds( c, hashes ) + + info = content_update.GetInfo() + + if action == CC.CONTENT_UPDATE_DELETE: + + tag = info + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + self._UpdateMappings( c, service_id, [], [ ( namespace_id, tag_id, hash_ids ) ] ) + + elif action == CC.CONTENT_UPDATE_EDIT_LOG: + + ( precedence, ) = c.execute( 'SELECT precedence FROM tag_service_precedence WHERE service_id = ?;', ( service_id, ) ).fetchone() + + higher_precedence_service_ids = [ id for ( id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence WHERE precedence < ?;', ( precedence, ) ) ] + + splayed_higher_precedence_service_ids = HC.SplayListForDB( higher_precedence_service_ids ) + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + hash_ids_set = set( hash_ids ) + + edit_log = info + + for ( action, info ) in edit_log: + + if action == CC.CONTENT_UPDATE_PENDING: + + tag = info + + if tag == '': continue + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + already_in_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) } + + hash_ids_i_can_add = hash_ids_set - already_in_hash_ids + + c.executemany( 'INSERT OR IGNORE INTO pending_mappings VALUES ( ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id ) for hash_id in hash_ids_i_can_add ] ) + + self._UpdateAutocompleteTagCacheFromPendingTags( c, service_id, namespace_id, tag_id, hash_ids_i_can_add, 1 ) + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_mappings WHERE service_id IN ' + splayed_higher_precedence_service_ids + ' AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids = hash_ids_i_can_add - invalid_hash_ids + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids.difference_update( invalid_hash_ids ) + + c.executemany( 'INSERT OR IGNORE INTO active_pending_mappings VALUES ( ?, ?, ? );', [ ( namespace_id, tag_id, hash_id ) for hash_id in valid_hash_ids ] ) + + self._UpdateAutocompleteTagCacheFromActivePendingTags( c, namespace_id, tag_id, valid_hash_ids, 1 ) + + elif action == CC.CONTENT_UPDATE_RESCIND_PENDING: + + tag = info + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + actual_hash_ids_i_can_delete = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) ] + + c.execute( 'DELETE FROM pending_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( actual_hash_ids_i_can_delete ) + ';', ( service_id, namespace_id, tag_id ) ) + + self._UpdateAutocompleteTagCacheFromPendingTags( c, service_id, namespace_id, tag_id, actual_hash_ids_i_can_delete, -1 ) + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id IN ' + splayed_higher_precedence_service_ids + ' AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids = set( hash_ids ) - invalid_hash_ids + + actual_hash_ids_i_can_delete = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( namespace_id, tag_id ) ) ] + + c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( actual_hash_ids_i_can_delete ) + ';', ( namespace_id, tag_id ) ) + + self._UpdateAutocompleteTagCacheFromActivePendingTags( c, namespace_id, tag_id, actual_hash_ids_i_can_delete, -1 ) + + elif action == CC.CONTENT_UPDATE_PETITION: + + ( tag, reason ) = info + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + reason_id = self._GetReasonId( c, reason ) + + c.executemany( 'INSERT OR IGNORE INTO mapping_petitions VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id, reason_id ) for hash_id in hash_ids ] ) + + elif action == CC.CONTENT_UPDATE_RESCIND_PETITION: + + tag = info + + ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( c, tag ) + + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) + + + + self.pub( 'notify_new_pending' ) + + + elif service_type in HC.RATINGS_SERVICES: + + action = content_update.GetAction() + + hashes = content_update.GetHashes() + + hash_ids = self._GetHashIds( c, hashes ) + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + info = content_update.GetInfo() + + if action == CC.CONTENT_UPDATE_RATING: + + rating = info + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + ratings_added = 0 + + c.execute( 'DELETE FROM local_ratings WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + ratings_added -= c.rowcount + + if rating is not None: + + c.execute( 'DELETE FROM ratings_filter WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + c.executemany( 'INSERT INTO local_ratings ( service_id, hash_id, rating ) VALUES ( ?, ?, ? );', [ ( service_id, hash_id, rating ) for hash_id in hash_ids ] ) + + ratings_added += c.rowcount + + + c.execute( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', ( ratings_added, service_id, HC.SERVICE_INFO_NUM_FILES ) ) + + # and then do a thing here where it looks up remote services links and then pends/rescinds pends appropriately + + + elif action == CC.CONTENT_UPDATE_RATINGS_FILTER: + + ( min, max ) = info + + c.execute( 'DELETE FROM ratings_filter WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + c.executemany( 'INSERT INTO ratings_filter ( service_id, hash_id, min, max ) VALUES ( ?, ?, ?, ? );', [ ( service_id, hash_id, min, max ) for hash_id in hash_ids ] ) + + + + + self.pub( 'content_updates_data', content_updates ) + self.pub( 'content_updates_gui', content_updates ) + + + def _RecalcActiveMappings( self, c ): + + service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence DESC;' ) ] + + c.execute( 'DELETE FROM active_mappings;' ) + c.execute( 'DELETE FROM active_pending_mappings;' ) + + first_round = True + + for service_id in service_ids: + + c.execute( 'INSERT OR IGNORE INTO active_mappings SELECT namespace_id, tag_id, hash_id FROM mappings WHERE service_id = ?;', ( service_id, ) ) + c.execute( 'INSERT OR IGNORE INTO active_pending_mappings SELECT namespace_id, tag_id, hash_id FROM pending_mappings WHERE service_id = ?;', ( service_id, ) ) + + # is this incredibly inefficient? + # if this is O( n-squared ) or whatever, just rewrite it as two queries using indices + if not first_round: + + c.execute( 'DELETE FROM active_mappings WHERE namespace_id || "," || tag_id || "," || hash_id IN ( SELECT namespace_id || "," || tag_id || "," || hash_id FROM deleted_mappings WHERE service_id = ? );', ( service_id, ) ) + c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id || "," || tag_id || "," || hash_id IN ( SELECT namespace_id || "," || tag_id || "," || hash_id FROM deleted_mappings WHERE service_id = ? );', ( service_id, ) ) + + + first_round = False + + + c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL;' ) + + + def _RecalcActivePendingMappings( self, c ): + + service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence DESC;' ) ] + + c.execute( 'DELETE FROM active_pending_mappings;' ) + + first_round = True + + for service_id in service_ids: + + c.execute( 'INSERT OR IGNORE INTO active_pending_mappings SELECT namespace_id, tag_id, hash_id FROM pending_mappings WHERE service_id = ?;', ( service_id, ) ) + + # is this incredibly inefficient? + # if this is O( n-squared ) or whatever, just rewrite it as two queries using indices + if not first_round: c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id || "," || tag_id || "," || hash_id IN ( SELECT namespace_id || "," || tag_id || "," || hash_id FROM deleted_mappings WHERE service_id = ? );', ( service_id, ) ) + + first_round = False + + + c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL;' ) + + + def _ResetService( self, c, service_identifier ): + + service_id = self._GetServiceId( c, service_identifier ) + + service_key = os.urandom( 32 ) + + service_type = service_identifier.GetType() + + service_name = service_identifier.GetName() + + new_service_identifier = HC.ClientServiceIdentifier( service_key, service_type, service_name ) + + kwargs = {} + + # we don't reset local services yet, so no need to check if address exists + ( host, port ) = c.execute( 'SELECT host, port FROM addresses WHERE service_id = ?;', ( service_id, ) ).fetchone() + + result = c.execute( 'SELECT access_key FROM accounts WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if result is None: access_key = None + else: ( access_key, ) = result + + credentials = CC.Credentials( host, port, access_key ) + + extra_info = None # we don't reset message depots yet, so no need to preserve + + c.execute( 'DELETE FROM services WHERE service_id = ?;', ( service_id, ) ) + + if service_type == HC.TAG_REPOSITORY: self._RecalcActiveMappings( c ) + + self._AddService( c, new_service_identifier, credentials, extra_info ) + + self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_RESET, service_identifier, new_service_identifier ) ) + self.pub( 'notify_new_pending' ) + self.pub( 'permissions_are_stale' ) + self.pub( 'log_message', 'database', 'reset ' + service_name ) + + + def _Set4chanPass( self, c, token, pin, timeout ): + + c.execute( 'DELETE FROM fourchan_pass;' ) + + c.execute( 'INSERT INTO fourchan_pass ( token, pin, timeout ) VALUES ( ?, ?, ? );', ( token, pin, timeout ) ) + + + def _SetTagServicePrecedence( self, c, service_identifiers ): + + del self._tag_service_precedence[:] + + self._tag_service_precedence.extend( service_identifiers ) + + service_ids = [ self._GetServiceId( c, service_identifier ) for service_identifier in service_identifiers ] + + c.execute( 'DELETE FROM tag_service_precedence;' ) + + c.executemany( 'INSERT INTO tag_service_precedence ( service_id, precedence ) VALUES ( ?, ? );', [ ( service_id, precedence ) for ( precedence, service_id ) in enumerate( service_ids ) ] ) + + self._RecalcActiveMappings( c ) + + + def _UpdateAutocompleteTagCacheFromActiveCurrentTags( self, c, namespace_id, tag_id, hash_ids, direction ): + + info = c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY service_id;' ).fetchall() + + c.executemany( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id = ? AND tag_service_id IS NULL AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, service_id, namespace_id, tag_id ) for ( service_id, count ) in info ] ) + + c.execute( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id IS NULL AND tag_service_id IS NULL AND namespace_id = ? AND tag_id = ?;', ( len( hash_ids ) * direction, namespace_id, tag_id ) ) + + + def _UpdateAutocompleteTagCacheFromCurrentTags( self, c, tag_service_id, namespace_id, tag_id, hash_ids, direction ): + + info = c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY service_id;' ).fetchall() + + c.executemany( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( file_service_id, count ) in info ] ) + + c.execute( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id IS NULL AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', ( len( hash_ids ) * direction, tag_service_id, namespace_id, tag_id ) ) + + + def _UpdateAutocompleteTagCacheFromFiles( self, c, file_service_id, hash_ids, direction ): + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + current_tags = c.execute( 'SELECT service_id, namespace_id, tag_id, COUNT( * ) FROM mappings WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id, namespace_id, tag_id;' ).fetchall() + pending_tags = c.execute( 'SELECT service_id, namespace_id, tag_id, COUNT( * ) FROM pending_mappings WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id, namespace_id, tag_id;' ).fetchall() + + c.executemany( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( tag_service_id, namespace_id, tag_id, count ) in current_tags ] ) + c.executemany( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( tag_service_id, namespace_id, tag_id, count ) in current_tags ] ) + + active_tags = c.execute( 'SELECT namespace_id, tag_id, COUNT( * ) FROM active_mappings WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY namespace_id, tag_id;' ).fetchall() + active_pending_tags = c.execute( 'SELECT namespace_id, tag_id, COUNT( * ) FROM active_pending_mappings WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY namespace_id, tag_id;' ).fetchall() + + c.executemany( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id = ? AND tag_service_id IS NULL AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, namespace_id, tag_id ) for ( namespace_id, tag_id, count ) in active_tags ] ) + c.executemany( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id = ? AND tag_service_id IS NULL AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, namespace_id, tag_id ) for ( namespace_id, tag_id, count ) in active_pending_tags ] ) + + + def _UpdateAutocompleteTagCacheFromActivePendingTags( self, c, namespace_id, tag_id, hash_ids, direction ): + + info = c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY service_id;' ).fetchall() + + c.executemany( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id = ? AND tag_service_id IS NULL AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, service_id, namespace_id, tag_id ) for ( service_id, count ) in info ] ) + + c.execute( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id IS NULL AND tag_service_id IS NULL AND namespace_id = ? AND tag_id = ?;', ( len( hash_ids ) * direction, namespace_id, tag_id ) ) + + + def _UpdateAutocompleteTagCacheFromPendingTags( self, c, tag_service_id, namespace_id, tag_id, hash_ids, direction ): + + info = c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY service_id;' ).fetchall() + + c.executemany( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( file_service_id, count ) in info ] ) + + c.execute( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id IS NULL AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', ( len( hash_ids ) * direction, tag_service_id, namespace_id, tag_id ) ) + + + def _UpdateMappings( self, c, service_id, mappings_ids, deleted_mappings_ids ): + + namespace_ids_being_added = { namespace_id for ( namespace_id, tag_id, hash_ids ) in mappings_ids } + tag_ids_being_added = { tag_id for ( namespace_id, tag_id, hash_ids ) in mappings_ids } + + hash_ids_lists = [ hash_ids for ( namespace_id, tag_id, hash_ids ) in mappings_ids ] + hash_ids_being_added = { hash_id for hash_id in itertools.chain( *hash_ids_lists ) } + + existing_namespace_ids = { namespace_id for namespace_id in namespace_ids_being_added if c.execute( 'SELECT 1 WHERE EXISTS ( SELECT namespace_id FROM mappings WHERE namespace_id = ? AND service_id = ? );', ( namespace_id, service_id ) ).fetchone() is not None } + existing_tag_ids = { tag_id for tag_id in tag_ids_being_added if c.execute( 'SELECT 1 WHERE EXISTS ( SELECT tag_id FROM mappings WHERE tag_id = ? AND service_id = ? );', ( tag_id, service_id ) ).fetchone() is not None } + existing_hash_ids = { hash_id for hash_id in hash_ids_being_added if c.execute( 'SELECT 1 WHERE EXISTS ( SELECT hash_id FROM mappings WHERE hash_id = ? AND service_id = ? );', ( hash_id, service_id ) ).fetchone() is not None } + + #existing_namespace_ids = { id for id in c.execute( 'SELECT namespace_id FROM mappings WHERE service_id = ? AND namespace_id IN ' + HC.SplayListForDB( namespace_ids_being_added ) + ';', ( service_id, ) ) } + #existing_tag_ids = { id for id in c.execute( 'SELECT tag_id FROM mappings WHERE service_id = ? AND tag_id IN ' + HC.SplayListForDB( tag_ids_being_added ) + ';', ( service_id, ) ) } + #existing_hash_ids = { id for id in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids_being_added ) + ';', ( service_id, ) ) } + + #existing_namespace_ids = { id for ( id, ) in c.execute( 'SELECT namespace_id FROM mappings WHERE service_id = ?;', ( service_id, ) ) } + #existing_tag_ids = { id for ( id, ) in c.execute( 'SELECT tag_id FROM mappings WHERE service_id = ?;', ( service_id, ) ) } + #existing_hash_ids = { id for ( id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( service_id, ) ) } + + for ( namespace_id, tag_id, hash_ids ) in mappings_ids: + + hash_ids = set( hash_ids ) + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, namespace_id, tag_id ) ) } + + hash_ids.difference_update( invalid_hash_ids ) + + if len( hash_ids ) > 0: + + num_mappings = len( hash_ids ) + + namespace_added = namespace_id not in existing_namespace_ids + tag_added = tag_id not in existing_tag_ids + num_new_files = len( hash_ids - existing_hash_ids ) + + existing_namespace_ids.add( namespace_id ) + existing_tag_ids.add( tag_id ) + existing_hash_ids.update( hash_ids ) + + c.executemany( 'INSERT OR IGNORE INTO mappings VALUES ( ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id ) for hash_id in hash_ids ] ) + + self._UpdateAutocompleteTagCacheFromCurrentTags( c, service_id, namespace_id, tag_id, hash_ids, 1 ) + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + c.execute( 'DELETE FROM deleted_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) + + num_deleted_mappings_revoked = c.rowcount + + actual_hash_ids_i_can_delete = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) ] + + c.execute( 'DELETE FROM pending_mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( actual_hash_ids_i_can_delete ) + ';', ( service_id, namespace_id, tag_id ) ) + + self._UpdateAutocompleteTagCacheFromPendingTags( c, service_id, namespace_id, tag_id, actual_hash_ids_i_can_delete, -1 ) + + service_info_updates = [] + + service_info_updates.append( ( num_mappings, service_id, HC.SERVICE_INFO_NUM_MAPPINGS ) ) + if num_deleted_mappings_revoked > 0: service_info_updates.append( ( num_deleted_mappings_revoked, service_id, HC.SERVICE_INFO_NUM_DELETED_MAPPINGS ) ) + if namespace_added: service_info_updates.append( ( 1, service_id, HC.SERVICE_INFO_NUM_NAMESPACES ) ) + if tag_added: service_info_updates.append( ( 1, service_id, HC.SERVICE_INFO_NUM_TAGS ) ) + if num_new_files > 0: service_info_updates.append( ( num_new_files, service_id, HC.SERVICE_INFO_NUM_FILES ) ) + + c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates ) + + + + for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids: + + c.executemany( 'INSERT OR IGNORE INTO deleted_mappings ( service_id, namespace_id, tag_id, hash_id ) VALUES ( ?, ?, ?, ? );', [ ( service_id, namespace_id, tag_id, hash_id ) for hash_id in hash_ids ] ) + + service_info_updates = [] + + num_deleted_mappings = len( hash_ids ) + + service_info_updates.append( ( num_deleted_mappings, service_id, HC.SERVICE_INFO_NUM_DELETED_MAPPINGS ) ) + + removeable_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, namespace_id, tag_id ) ) } + + if len( removeable_hash_ids ) > 0: + + splayed_removeable_hash_ids = HC.SplayListForDB( hash_ids ) + + c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_removeable_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) + + number_existing_mappings_actually_removed = c.rowcount + + self._UpdateAutocompleteTagCacheFromCurrentTags( c, service_id, namespace_id, tag_id, removeable_hash_ids, -1 ) + + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + splayed_removeable_hash_ids + ';', ( service_id, namespace_id, tag_id ) ) + + ( result, ) = c.execute( 'SELECT EXISTS ( SELECT 1 FROM mappings WHERE service_id = ? AND namespace_id = ? );', ( service_id, namespace_id, ) ).fetchone() + + namespace_removed = not bool( result ) + + ( result, ) = c.execute( 'SELECT EXISTS ( SELECT 1 FROM mappings WHERE service_id = ? AND tag_id = ? );', ( service_id, tag_id, ) ).fetchone() + + tag_removed = not bool( result ) + + remaining_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ?;', ( service_id, ) ) } + + num_files_removed = len( removeable_hash_ids - remaining_hash_ids ) + + service_info_updates.append( ( -number_existing_mappings_actually_removed, service_id, HC.SERVICE_INFO_NUM_MAPPINGS ) ) + if namespace_removed: service_info_updates.append( ( -1, service_id, HC.SERVICE_INFO_NUM_NAMESPACES ) ) + if tag_removed: service_info_updates.append( ( -1, service_id, HC.SERVICE_INFO_NUM_TAGS ) ) + if num_files_removed > 0: service_info_updates.append( ( -num_files_removed, service_id, HC.SERVICE_INFO_NUM_FILES ) ) + + + c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates ) + + + # now update the active mappings + + ( precedence, ) = c.execute( 'SELECT precedence FROM tag_service_precedence WHERE service_id = ?;', ( service_id, ) ).fetchone() + + higher_precedence_service_ids = [ id for ( id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence WHERE precedence < ?;', ( precedence, ) ) ] + + splayed_higher_precedence_service_ids = HC.SplayListForDB( higher_precedence_service_ids ) + + for ( namespace_id, tag_id, hash_ids ) in mappings_ids: + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_mappings WHERE service_id IN ' + splayed_higher_precedence_service_ids + ' AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids = set( hash_ids ) - invalid_hash_ids + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM active_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids.difference_update( invalid_hash_ids ) + + c.executemany( 'INSERT OR IGNORE INTO active_mappings VALUES ( ?, ?, ? );', [ ( namespace_id, tag_id, hash_id ) for hash_id in valid_hash_ids ] ) + + self._UpdateAutocompleteTagCacheFromActiveCurrentTags( c, namespace_id, tag_id, valid_hash_ids, 1 ) + + # now for removing any active pending + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM pending_mappings WHERE service_id IN ' + splayed_higher_precedence_service_ids + ' AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids = set( hash_ids ) - invalid_hash_ids + + actual_hash_ids_i_can_delete = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM active_pending_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( namespace_id, tag_id ) ) ] + + c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( actual_hash_ids_i_can_delete ) + ';', ( namespace_id, tag_id ) ) + + self._UpdateAutocompleteTagCacheFromActivePendingTags( c, namespace_id, tag_id, actual_hash_ids_i_can_delete, -1 ) + + + for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids: + + invalid_hash_ids = { hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id IN ' + splayed_higher_precedence_service_ids + ' AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( namespace_id, tag_id ) ) } + + valid_hash_ids = set( hash_ids ) - invalid_hash_ids + + actual_hash_ids_i_can_delete = [ id for ( id, ) in c.execute( 'SELECT hash_id FROM active_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( namespace_id, tag_id ) ) ] + + c.execute( 'DELETE FROM active_mappings WHERE namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( actual_hash_ids_i_can_delete ) + ';', ( namespace_id, tag_id ) ) + + self._UpdateAutocompleteTagCacheFromActiveCurrentTags( c, namespace_id, tag_id, actual_hash_ids_i_can_delete, -1 ) + + + + def _UpdateServerServices( self, c, server_admin_service_identifier, edit_log ): + + server_admin_service_id = self._GetServiceId( c, server_admin_service_identifier ) + + server_admin = self._GetService( c, server_admin_service_id ) + + server_admin_credentials = server_admin.GetCredentials() + + access_key = server_admin_credentials.GetAccessKey() + + ( host, server_admin_port ) = server_admin_credentials.GetAddress() + + recalc_active_mappings = False + + for ( action, data ) in edit_log: + + if action == HC.ADD: + + server_service_identifier = data + + service_key = os.urandom( 32 ) + + service_type = server_service_identifier.GetType() + + service_port = server_service_identifier.GetPort() + + service_name = HC.service_string_lookup[ service_type ] + ' at ' + host + ':' + str( service_port ) + + client_service_identifier = HC.ClientServiceIdentifier( service_key, service_type, service_name ) + + credentials = CC.Credentials( host, service_port, access_key ) + + if service_type == HC.MESSAGE_DEPOT: extra_info = ( 'identity@' + service_name, 180, HydrusMessageHandling.GenerateNewPrivateKey() ) + else: extra_info = None + + self._AddService( c, client_service_identifier, credentials, extra_info ) + + elif action == HC.EDIT: + + ( server_service_identifier, new_port ) = data + + current_port = server_service_identifier.GetPort() + + c.execute( 'UPDATE addresses SET port = ? WHERE host = ? AND port = ?;', ( new_port, host, current_port ) ) + + elif action == HC.DELETE: + + server_service_identifier = data + + service_type = server_service_identifier.GetType() + service_port = server_service_identifier.GetPort() + + service_info = c.execute( 'SELECT service_key, name FROM services, addresses USING ( service_id ) WHERE type = ? AND host = ? AND port = ?;', ( service_type, host, service_port ) ).fetchall() + + for ( service_key, name ) in service_info: + + client_service_identifier = HC.ClientServiceIdentifier( service_key, service_type, name ) + + client_service_id = self._GetServiceId( c, client_service_identifier ) + + c.execute( 'DELETE FROM services WHERE service_id = ?;', ( client_service_id, ) ) + + self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_RESET, client_service_identifier ) ) + + + if len( names ) > 0: recalc_active_mappings = True + + + + if recalc_active_mappings: self._RecalcActiveMappings( c ) + + self.pub( 'notify_new_pending' ) + self.pub( 'notify_new_services' ) + + + def _UpdateServices( self, c, edit_log ): + + recalc_active_mappings = False + + for ( action, details ) in edit_log: + + if action == 'add': + + ( service_identifier, credentials, extra_info ) = details + + self._AddService( c, service_identifier, credentials, extra_info ) + + elif action == 'delete': + + service_identifier = details + + service_id = self._GetServiceId( c, service_identifier ) + + c.execute( 'DELETE FROM services WHERE service_id = ?;', ( service_id, ) ) + + self.pub( 'service_update_db', CC.ServiceUpdate( CC.SERVICE_UPDATE_RESET, service_identifier ) ) + + service_type = service_identifier.GetType() + + if service_type == HC.TAG_REPOSITORY: recalc_active_mappings = True + + elif action == 'edit': + + ( old_service_identifier, ( new_service_identifier, credentials, extra_info ) ) = details + + service_type = old_service_identifier.GetType() + + service_id = self._GetServiceId( c, old_service_identifier ) + + name = new_service_identifier.GetName() + + c.execute( 'UPDATE services SET name = ? WHERE service_id = ?;', ( name, service_id ) ) + + if service_type in HC.REMOTE_SERVICES: + + ( host, port ) = credentials.GetAddress() + + c.execute( 'UPDATE addresses SET host = ?, port = ?, last_error = ? WHERE service_id = ?;', ( host, port, 0, service_id ) ) + + if service_type in HC.RESTRICTED_SERVICES: + + ( account, ) = c.execute( 'SELECT account FROM accounts WHERE service_id = ?;', ( service_id, ) ).fetchone() + + account.MakeStale() + + if credentials.HasAccessKey(): access_key = credentials.GetAccessKey() + else: access_key = '' + + c.execute( 'UPDATE accounts SET access_key = ?, account = ? WHERE service_id = ?;', ( sqlite3.Binary( access_key ), account, service_id ) ) + + + + if service_type == HC.MESSAGE_DEPOT: + + ( identity_name, check_period, private_key, receive_anon ) = extra_info + + contact_id = self._GetContactId( c, service_id ) + + result = c.execute( 'SELECT 1 FROM contacts WHERE name = ? AND contact_id != ?;', ( identity_name, contact_id ) ).fetchone() + + while result is not None: + + identity_name += str( random.randint( 0, 9 ) ) + + result = c.execute( 'SELECT 1 FROM contacts WHERE name = ?;', ( identity_name, ) ).fetchone() + + + c.execute( 'UPDATE contacts SET name = ?, host = ?, port = ? WHERE contact_id = ?;', ( identity_name, host, port, contact_id ) ) + + c.execute( 'UPDATE message_depots SET check_period = ?, private_key = ?, receive_anon = ? WHERE service_id = ?;', ( check_period, private_key, receive_anon, service_id ) ) + + elif service_type in ( HC.RATING_LIKE_REPOSITORY, HC.LOCAL_RATING_LIKE ): + + ( like, dislike ) = extra_info + + c.execute( 'UPDATE ratings_like SET like = ?, dislike = ? WHERE service_id = ?;', ( like, dislike, service_id ) ) + + elif service_type in ( HC.RATING_LIKE_REPOSITORY, HC.LOCAL_RATING_NUMERICAL ): + + ( lower, upper ) = extra_info + + c.execute( 'UPDATE ratings_numerical SET lower = ?, upper = ? WHERE service_id = ?;', ( lower, upper, service_id ) ) + + + + + if recalc_active_mappings: self._RecalcActiveMappings( c ) + + self.pub( 'notify_new_pending' ) + self.pub( 'notify_new_services' ) + + + def _UploadPending( self, c, service_identifier, job_key, cancel_event = threading.Event() ): + + try: + + service_id = self._GetServiceId( c, service_identifier ) + + service_type = service_identifier.GetType() + + service_name = service_identifier.GetName() + + repository = self._GetService( c, service_id ) + + account = repository.GetAccount() + + if service_type == HC.TAG_REPOSITORY: + + HC.pubsub.pub( 'progress_update', job_key, 0, 7, u'gathering pending mappings' ) + + mappings_dict = {} + mappings_hash_ids = set() + + if account.HasPermission( HC.POST_DATA ): + + mappings_dict = HC.BuildKeyToListDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in c.execute( 'SELECT namespace_id, tag_id, hash_id FROM pending_mappings WHERE service_id = ?;', ( service_id, ) ) ] ) + + + mappings = [ ( self._GetNamespaceTag( c, namespace_id, tag_id ), hash_ids ) for ( ( namespace_id, tag_id ), hash_ids ) in mappings_dict.items() ] + + mappings_hash_ids = HC.IntelligentMassUnion( mappings_dict.values() ) + + mappings_hash_ids_to_hashes = self._GetHashIdsToHashes( c, mappings_hash_ids ) + + HC.pubsub.pub( 'progress_update', job_key, 1, 7, u'gathering petitioned mappings' ) + + petitions_dict = {} + petitions_hash_ids = set() + + if account.HasPermission( HC.POST_PETITIONS ): + + petitions_dict = HC.BuildKeyToListDict( [ ( ( reason_id, namespace_id, tag_id ), hash_id ) for ( reason_id, namespace_id, tag_id, hash_id ) in c.execute( 'SELECT reason_id, namespace_id, tag_id, hash_id FROM mapping_petitions WHERE service_id = ?;', ( service_id, ) ) ] ) + + + petitions = [ ( self._GetReason( c, reason_id ), self._GetNamespaceTag( c, namespace_id, tag_id ), hash_ids ) for ( ( reason_id, namespace_id, tag_id ), hash_ids ) in petitions_dict.items() ] + + petitions_hash_ids = HC.IntelligentMassUnion( petitions_dict.values() ) + + petitions_hash_ids_to_hashes = self._GetHashIdsToHashes( c, petitions_hash_ids ) + + if len( mappings ) > 0 or len( petitions ) > 0: + + HC.pubsub.pub( 'progress_update', job_key, 2, 7, u'connecting to repository' ) + + connection = repository.GetConnection() + + HC.pubsub.pub( 'progress_update', job_key, 3, 7, u'posting new mappings' ) + + if len( mappings ) > 0: + + try: + + mappings_object = HC.ClientMappings( mappings, mappings_hash_ids_to_hashes ) + + connection.Post( 'mappings', mappings = mappings_object ) + + except Exception as e: raise Exception( 'Encountered an error while uploading public_mappings:' + os.linesep + unicode( e ) ) + + + HC.pubsub.pub( 'progress_update', job_key, 4, 7, u'posting new petitions' ) + + if len( petitions ) > 0: + + try: + + petitions_object = HC.ClientMappingPetitions( petitions, petitions_hash_ids_to_hashes ) + + connection.Post( 'petitions', petitions = petitions_object ) + + except Exception as e: raise Exception( 'Encountered an error while uploading petitions:' + os.linesep + unicode( e ) ) + + + mappings_ids = [ ( namespace_id, tag_id, hash_ids ) for ( ( namespace_id, tag_id ), hash_ids ) in mappings_dict.items() ] + deleted_mappings_ids = [ ( namespace_id, tag_id, hash_ids ) for ( ( reason_id, namespace_id, tag_id ), hash_ids ) in petitions_dict.items() ] + + HC.pubsub.pub( 'progress_update', job_key, 5, 7, u'saving changes to local database' ) + + self._UpdateMappings( c, service_id, mappings_ids, deleted_mappings_ids ) + + num_mappings = sum( [ len( hash_ids ) for ( namespace_id, tag_id, hash_ids ) in mappings_ids ] ) + num_deleted_mappings = sum( [ len( hash_ids ) for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids ] ) + + self.pub( 'log_message', 'upload mappings', 'uploaded ' + str( num_mappings ) + ' mappings to and deleted ' + str( num_deleted_mappings ) + ' mappings from ' + service_identifier.GetName() ) + + content_updates = [] + + content_updates += [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, hash_ids ), info = self._GetNamespaceTag( c, namespace_id, tag_id ) ) for ( namespace_id, tag_id, hash_ids ) in mappings_ids ] + content_updates += [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ), info = self._GetNamespaceTag( c, namespace_id, tag_id ) ) for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids ] + + HC.pubsub.pub( 'progress_update', job_key, 6, 7, u'saving changes to gui' ) + + self.pub( 'content_updates_data', content_updates ) + self.pub( 'content_updates_gui', content_updates ) + + + HC.pubsub.pub( 'progress_update', job_key, 7, 7, u'done!' ) + + elif service_type == HC.FILE_REPOSITORY: + + uploads = [] + + petitions = [] + + HC.pubsub.pub( 'progress_update', job_key, 0, 1, u'gathering pending and petitioned file info' ) + + if account.HasPermission( HC.POST_DATA ): uploads = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_transfers WHERE service_id_to = ?;', ( service_id, ) ) ] + + if account.HasPermission( HC.POST_PETITIONS ): petitions = HC.BuildKeyToListDict( c.execute( 'SELECT reason, hash FROM reasons, ( hashes, file_petitions USING ( hash_id ) ) USING ( reason_id ) WHERE service_id = ?;', ( service_id, ) ) ).items() + + num_uploads = len( uploads ) + num_petitions = len( petitions ) + + if num_uploads > 0 or num_petitions > 0: + + HC.pubsub.pub( 'progress_update', job_key, 0, num_uploads + 4, u'connecting to repository' ) + + connection = repository.GetConnection() + + if num_uploads > 0: + + error_messages = set() + + good_hash_ids = [] + + for ( index, hash_id ) in enumerate( uploads ): + + HC.pubsub.pub( 'progress_update', job_key, index, num_uploads + 4, u'Uploading file ' + HC.ConvertIntToPrettyString( index + 1 ) + ' of ' + HC.ConvertIntToPrettyString( num_uploads ) ) + + if cancel_event.isSet(): break + + try: + + ( hash, ) = self._GetHashes( c, ( hash_id, ) ) + + file = self._GetFile( hash ) + + connection.Post( 'file', file = file ) + + good_hash_ids.append( hash_id ) + + except Exception as e: error_messages.add( unicode( e ) ) + + + splayed_good_hash_ids = HC.SplayListForDB( good_hash_ids ) + + HC.pubsub.pub( 'progress_update', job_key, num_uploads, num_uploads + 4, u'saving changes to local database' ) + + files_info_rows = c.execute( 'SELECT ?, hash_id, size, mime, ?, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_good_hash_ids + ';', ( service_id, int( time.time() ), self._local_file_service_id ) ).fetchall() + + self._AddFiles( c, files_info_rows ) + + if len( error_messages ) > 0: raise Exception( 'Errors were encountered while trying to upload files to ' + service_name + ':' + os.linesep + os.linesep.join( error_messages ) ) + + HC.pubsub.pub( 'progress_update', job_key, num_uploads + 2, num_uploads + 4, u'saving changes to gui' ) + + if len( good_hash_ids ) > 0: + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, good_hash_ids ) ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, service_identifier, self._GetHashes( c, good_hash_ids ) ) ] ) + + + if num_petitions > 0: + + try: + + HC.pubsub.pub( 'progress_update', job_key, num_uploads + 3, num_uploads + 4, u'uploading petitions' ) + + petitions_object = HC.ClientFilePetitions( petitions ) + + connection.Post( 'petitions', petitions = petitions_object ) + + hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_petitions WHERE service_id = ?;', ( service_id, ) ) ] + + self._DeleteFiles( c, service_id, hash_ids ) + + self.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ) ) ] ) + self.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, service_identifier, self._GetHashes( c, hash_ids ) ) ] ) + + except Exception as e: raise Exception( 'Encountered an error while trying to uploads petitions to '+ service_name + ':' + os.linesep + unicode( e ) ) + + + self.pub( 'log_message', 'upload files', 'uploaded ' + str( num_uploads ) + ' files to and deleted ' + str( num_petitions ) + ' files from ' + service_identifier.GetName() ) + + + HC.pubsub.pub( 'progress_update', job_key, num_uploads + 4, num_uploads + 4, u'done!' ) + + + self.pub( 'notify_new_pending' ) + + except Exception as e: + + time.sleep( 2 ) + + HC.pubsub.pub( 'progress_update', job_key, 0, 1, 'error: ' + unicode( e ) ) + + time.sleep( 3 ) + + HC.pubsub.pub( 'progress_update', job_key, 1, 1, 'quitting' ) + + raise + + + +class DB( ServiceDB ): + + def __init__( self ): + + self._db_path = HC.DB_DIR + os.path.sep + 'client.db' + + self._jobs = Queue.PriorityQueue() + self._pubsubs = [] + + self._InitDB() + + temp_dir = HC.TEMP_DIR + + if os.path.exists( temp_dir ): shutil.rmtree( temp_dir, ignore_errors = True ) + + os.mkdir( temp_dir ) + + ( db, c ) = self._GetDBCursor() + + self._UpdateDB( c ) + + # ####### put a temp db update here! ###### + + # ###### ~~~~~~~~~~~~~~~~~~~~~~~~~~~ ###### + + ( self._local_file_service_id, ) = c.execute( 'SELECT service_id FROM services WHERE type = ?;', ( HC.LOCAL_FILE, ) ).fetchone() + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + self._tag_service_precedence = self._GetTagServicePrecedence( c ) + + if not self._CheckPassword(): raise HC.PermissionException( 'No password!' ) + + threading.Thread( target = self.MainLoop, name = 'Database Main Loop' ).start() + + + def _InitPostGUI( self ): + + port = HC.DEFAULT_LOCAL_FILE_PORT + + local_file_server_service_identifier = HC.ServerServiceIdentifier( HC.LOCAL_FILE, port ) + + self._server = HydrusServer.HydrusHTTPServer( local_file_server_service_identifier ) + + server_thread = threading.Thread( target=self._server.serve_forever ) + server_thread.start() + + connection = httplib.HTTPConnection( '127.0.0.1:' + str( port ) ) + + try: + + connection.connect() + connection.close() + + except: print( 'Could not bind the client to port ' + str( port ) ) + + HC.DAEMONWorker( 'DownloadFiles', self.DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) ) + HC.DAEMONWorker( 'DownloadThumbnails', self.DAEMONDownloadThumbnails, ( 'notify_new_permissions', 'notify_new_thumbnails' ) ) + HC.DAEMONWorker( 'SynchroniseAccounts', self.DAEMONSynchroniseAccounts, ( 'notify_new_services', 'permissions_are_stale' ) ) + HC.DAEMONWorker( 'SynchroniseMessages', self.DAEMONSynchroniseMessages, ( 'notify_new_permissions', 'notify_check_messages' ), period = 60 ) + HC.DAEMONWorker( 'SynchroniseRepositories', self.DAEMONSynchroniseRepositories, ( 'notify_new_permissions', ) ) + HC.DAEMONQueue( 'FlushRepositoryUpdates', self.DAEMONFlushServiceUpdates, 'service_update_db', period = 2 ) + + + def _CheckPassword( self ): + + if self._options[ 'password' ] is not None: + + while True: + + with wx.PasswordEntryDialog( None, 'Enter your password', 'Enter password' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + if hashlib.sha256( dlg.GetValue() ).digest() == self._options[ 'password' ]: return True + else: continue + + else: return False + + + + else: return True + + + def _GetDBCursor( self ): + + db = sqlite3.connect( self._db_path, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES ) + + db.create_function( 'hydrus_hamming', 2, HydrusImageHandling.GetHammingDistance ) + + c = db.cursor() + + c.execute( 'PRAGMA cache_size = 10000;' ) + c.execute( 'PRAGMA foreign_keys = ON;' ) + c.execute( 'PRAGMA recursive_triggers = ON;' ) + + return ( db, c ) + + + def _GetBoorus( self, c ): + + boorus = [ booru for ( booru, ) in c.execute( 'SELECT booru FROM boorus;' ) ] + + return boorus + + + def _GetImageboards( self, c ): + + all_imageboards = [] + + all_sites = c.execute( 'SELECT site_id, name FROM imageboard_sites;' ).fetchall() + + for ( site_id, name ) in all_sites: + + imageboards = [ imageboard for ( imageboard, ) in c.execute( 'SELECT imageboard FROM imageboards WHERE site_id = ? ORDER BY name;', ( site_id, ) ) ] + + all_imageboards.append( ( name, imageboards ) ) + + + return all_imageboards + + + def _GetSiteId( self, c, name ): + + result = c.execute( 'SELECT site_id FROM imageboard_sites WHERE name = ?;', ( name, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO imageboard_sites ( name ) VALUES ( ? );', ( name, ) ) + + site_id = c.lastrowid + + else: ( site_id, ) = result + + return site_id + + + def _InitDB( self ): + + if not os.path.exists( self._db_path ): + + if not os.path.exists( HC.CLIENT_FILES_DIR ): os.mkdir( HC.CLIENT_FILES_DIR ) + if not os.path.exists( HC.CLIENT_THUMBNAILS_DIR ): os.mkdir( HC.CLIENT_THUMBNAILS_DIR ) + + ( db, c ) = self._GetDBCursor() + + c.execute( 'PRAGMA auto_vacuum = 0;' ) # none + c.execute( 'PRAGMA journal_mode=WAL;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + c.execute( 'CREATE TABLE services ( service_id INTEGER PRIMARY KEY, service_key BLOB_BYTES, type INTEGER, name TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX services_service_key_index ON services ( service_key );' ) + + c.execute( 'CREATE TABLE fourchan_pass ( token TEXT, pin TEXT, timeout INTEGER );' ) + + c.execute( 'CREATE TABLE accounts ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, access_key BLOB_BYTES, account TEXT_YAML );' ) + + c.execute( 'CREATE TABLE active_mappings ( namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX active_mappings_tag_id_index ON active_mappings ( tag_id );' ) + c.execute( 'CREATE INDEX active_mappings_hash_id_index ON active_mappings ( hash_id );' ) + + c.execute( 'CREATE TABLE active_pending_mappings ( namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX active_pending_mappings_tag_id_index ON active_pending_mappings ( tag_id );' ) + c.execute( 'CREATE INDEX active_pending_mappings_hash_id_index ON active_pending_mappings ( hash_id );' ) + + c.execute( 'CREATE TABLE addresses ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, host TEXT, port INTEGER, last_error INTEGER );' ) + + c.execute( 'CREATE TABLE autocomplete_tags_cache ( file_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, tag_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, current_count INTEGER, pending_count INTEGER, PRIMARY KEY ( file_service_id, tag_service_id, namespace_id, tag_id ) );' ) + c.execute( 'CREATE INDEX autocomplete_tags_cache_tag_service_id_namespace_id_tag_id_index ON autocomplete_tags_cache ( tag_service_id, namespace_id, tag_id );' ) + + c.execute( 'CREATE TABLE boorus ( name TEXT PRIMARY KEY, booru TEXT_YAML );' ) + + c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' ) + + c.execute( 'CREATE VIRTUAL TABLE conversation_subjects USING fts4( subject );' ) + + c.execute( 'CREATE TABLE deleted_files ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + + c.execute( 'CREATE TABLE deleted_mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX deleted_mappings_hash_id_index ON deleted_mappings ( hash_id );' ) + + c.execute( 'CREATE TABLE existing_tags ( namespace_id INTEGER, tag_id INTEGER, PRIMARY KEY( namespace_id, tag_id ) );' ) + c.execute( 'CREATE INDEX existing_tags_tag_id_index ON existing_tags ( tag_id );' ) + + c.execute( 'CREATE TABLE file_inbox ( hash_id INTEGER PRIMARY KEY );' ) + + c.execute( 'CREATE TABLE files_info ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, size INTEGER, mime INTEGER, timestamp INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX files_info_hash_id ON files_info ( hash_id );' ) + + c.execute( 'CREATE TABLE file_transfers ( service_id_from INTEGER, service_id_to INTEGER REFERENCES services( service_id ) ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id_from, service_id_to, hash_id ), FOREIGN KEY( service_id_from, hash_id ) REFERENCES files_info ON DELETE CASCADE );' ) + c.execute( 'CREATE INDEX file_transfers_service_id_to ON file_transfers ( service_id_to );' ) + c.execute( 'CREATE INDEX file_transfers_hash_id ON file_transfers ( hash_id );' ) + + c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ), FOREIGN KEY( service_id, hash_id ) REFERENCES files_info ON DELETE CASCADE );' ) + c.execute( 'CREATE INDEX file_petitions_hash_id_index ON file_petitions ( hash_id );' ) + + c.execute( 'CREATE TABLE hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES );' ) + c.execute( 'CREATE UNIQUE INDEX hashes_hash_index ON hashes ( hash );' ) + + c.execute( 'CREATE TABLE imageboard_sites ( site_id INTEGER PRIMARY KEY, name TEXT );', ) + + c.execute( 'CREATE TABLE imageboards ( site_id INTEGER, name TEXT, imageboard TEXT_YAML, PRIMARY KEY ( site_id, name ) );', ) + + c.execute( 'CREATE TABLE local_hashes ( hash_id INTEGER PRIMARY KEY, md5 BLOB_BYTES, sha1 BLOB_BYTES );' ) + c.execute( 'CREATE INDEX local_hashes_md5_index ON local_hashes ( md5 );' ) + c.execute( 'CREATE INDEX local_hashes_sha1_index ON local_hashes ( sha1 );' ) + + c.execute( 'CREATE TABLE local_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, rating REAL, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX local_ratings_hash_id_index ON local_ratings ( hash_id );' ) + c.execute( 'CREATE INDEX local_ratings_rating_index ON local_ratings ( rating );' ) + + c.execute( 'CREATE TABLE mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX mappings_hash_id_index ON mappings ( hash_id );' ) + c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' ) + c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' ) + + c.execute( 'CREATE TABLE mapping_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, reason_id ) );' ) + c.execute( 'CREATE INDEX mapping_petitions_hash_id_index ON mapping_petitions ( hash_id );' ) + + c.execute( 'CREATE TABLE message_attachments ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE, hash_id INTEGER );' ) + + c.execute( 'CREATE TABLE message_depots ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, contact_id INTEGER, last_check INTEGER, check_period INTEGER, private_key TEXT, receive_anon INTEGER_BOOLEAN );' ) + c.execute( 'CREATE UNIQUE INDEX message_depots_contact_id_index ON message_depots ( contact_id );' ) + + c.execute( 'CREATE TABLE message_destination_map ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_to INTEGER, status_id INTEGER, PRIMARY KEY ( message_id, contact_id_to ) );' ) + c.execute( 'CREATE INDEX message_destination_map_contact_id_to_index ON message_destination_map ( contact_id_to );' ) + c.execute( 'CREATE INDEX message_destination_map_status_id_index ON message_destination_map ( status_id );' ) + + c.execute( 'CREATE TABLE message_downloads ( service_id INTEGER REFERENCES services ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE );' ) + c.execute( 'CREATE INDEX message_downloads_service_id_index ON message_downloads ( service_id );' ) + + c.execute( 'CREATE TABLE message_drafts ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, recipients_visible INTEGER_BOOLEAN );' ) + + c.execute( 'CREATE TABLE message_inbox ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE );' ) + + c.execute( 'CREATE TABLE message_keys ( message_id INTEGER PRIMARY KEY, message_key BLOB_BYTES );' ) + c.execute( 'CREATE INDEX message_keys_message_key_index ON message_keys ( message_key );' ) + + c.execute( 'CREATE VIRTUAL TABLE message_bodies USING fts4( body );' ) + + c.execute( 'CREATE TABLE incoming_message_statuses ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' ) + + c.execute( 'CREATE TABLE messages ( conversation_id INTEGER REFERENCES message_keys ( message_id ) ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_from INTEGER, timestamp INTEGER, PRIMARY KEY( conversation_id, message_id ) );' ) + c.execute( 'CREATE UNIQUE INDEX messages_message_id_index ON messages ( message_id );' ) + c.execute( 'CREATE INDEX messages_contact_id_from_index ON messages ( contact_id_from );' ) + c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' ) + + c.execute( 'CREATE TABLE namespaces ( namespace_id INTEGER PRIMARY KEY, namespace TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX namespaces_namespace_index ON namespaces ( namespace );' ) + + c.execute( 'CREATE TABLE news ( service_id INTEGER REFERENCES services ON DELETE CASCADE, post TEXT, timestamp INTEGER );' ) + + c.execute( 'CREATE TABLE options ( options TEXT_YAML );', ) + + c.execute( 'CREATE TABLE pending_mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX pending_mappings_hash_id_index ON pending_mappings ( hash_id );' ) + c.execute( 'CREATE INDEX pending_mappings_service_id_tag_id_index ON pending_mappings ( service_id, tag_id );' ) + c.execute( 'CREATE INDEX pending_mappings_service_id_hash_id_index ON pending_mappings ( service_id, hash_id );' ) + + c.execute( 'CREATE TABLE perceptual_hashes ( hash_id INTEGER PRIMARY KEY, phash BLOB_BYTES );' ) + + c.execute( 'CREATE TABLE ratings_filter ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, min REAL, max REAL, PRIMARY KEY( service_id, hash_id ) );' ) + + c.execute( 'CREATE TABLE ratings_numerical ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, lower INTEGER, upper INTEGER );' ) + + c.execute( 'CREATE TABLE ratings_like ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, like TEXT, dislike TEXT );' ) + + c.execute( 'CREATE TABLE reasons ( reason_id INTEGER PRIMARY KEY, reason TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX reasons_reason_index ON reasons ( reason );' ) + + c.execute( 'CREATE TABLE remote_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, count INTEGER, rating REAL, score REAL, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX remote_ratings_hash_id_index ON remote_ratings ( hash_id );' ) + c.execute( 'CREATE INDEX remote_ratings_rating_index ON remote_ratings ( rating );' ) + c.execute( 'CREATE INDEX remote_ratings_score_index ON remote_ratings ( score );' ) + + c.execute( 'CREATE TABLE repositories ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, first_begin INTEGER, next_begin INTEGER );' ) + + c.execute( 'CREATE TABLE service_info ( service_id INTEGER REFERENCES services ON DELETE CASCADE, info_type INTEGER, info INTEGER, PRIMARY KEY ( service_id, info_type ) );' ) + + c.execute( 'CREATE TABLE shutdown_timestamps ( shutdown_type INTEGER PRIMARY KEY, timestamp INTEGER );' ) + + c.execute( 'CREATE TABLE statuses ( status_id INTEGER PRIMARY KEY, status TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX statuses_status_index ON statuses ( status );' ) + + c.execute( 'CREATE TABLE tag_service_precedence ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, precedence INTEGER );' ) + + c.execute( 'CREATE TABLE tags ( tag_id INTEGER PRIMARY KEY, tag TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX tags_tag_index ON tags ( tag );' ) + + c.execute( 'CREATE TABLE urls ( url TEXT PRIMARY KEY, hash_id INTEGER );' ) + c.execute( 'CREATE INDEX urls_hash_id ON urls ( hash_id );' ) + + c.execute( 'CREATE TABLE version ( version INTEGER );' ) + + # inserts + + account = CC.GetUnknownAccount() + account.MakeStale() + + c.execute( 'INSERT INTO services ( service_key, type, name ) VALUES ( ?, ?, ? );', ( sqlite3.Binary( 'local files' ), HC.LOCAL_FILE, 'local files' ) ) + c.execute( 'INSERT INTO services ( service_key, type, name ) VALUES ( ?, ?, ? );', ( sqlite3.Binary( 'local tags' ), HC.LOCAL_TAG, 'local tags' ) ) + + local_tag_service_id = c.lastrowid + + c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) VALUES ( ?, ? );', ( local_tag_service_id, 0 ) ) + + c.executemany( 'INSERT INTO boorus VALUES ( ?, ? );', [ ( booru.GetName(), booru ) for booru in CC.DEFAULT_BOORUS ] ) + + for ( site_name, imageboards ) in CC.DEFAULT_IMAGEBOARDS: + + site_id = self._GetSiteId( c, site_name ) + + c.executemany( 'INSERT INTO imageboards VALUES ( ?, ?, ? );', [ ( site_id, imageboard.GetName(), imageboard ) for imageboard in imageboards ] ) + + + c.execute( 'INSERT INTO namespaces ( namespace_id, namespace ) VALUES ( ?, ? );', ( 1, '' ) ) + + CLIENT_DEFAULT_OPTIONS = {} + + CLIENT_DEFAULT_OPTIONS[ 'default_sort' ] = 0 + CLIENT_DEFAULT_OPTIONS[ 'default_collect' ] = 0 + CLIENT_DEFAULT_OPTIONS[ 'export_path' ] = 'export' + CLIENT_DEFAULT_OPTIONS[ 'hpos' ] = 400 + CLIENT_DEFAULT_OPTIONS[ 'vpos' ] = 700 + CLIENT_DEFAULT_OPTIONS[ 'exclude_deleted_files' ] = False + CLIENT_DEFAULT_OPTIONS[ 'thumbnail_cache_size' ] = 100 * 1048576 + CLIENT_DEFAULT_OPTIONS[ 'preview_cache_size' ] = 25 * 1048576 + CLIENT_DEFAULT_OPTIONS[ 'fullscreen_cache_size' ] = 200 * 1048576 + CLIENT_DEFAULT_OPTIONS[ 'thumbnail_dimensions' ] = [ 150, 125 ] + CLIENT_DEFAULT_OPTIONS[ 'password' ] = None + CLIENT_DEFAULT_OPTIONS[ 'num_autocomplete_chars' ] = 1 + CLIENT_DEFAULT_OPTIONS[ 'gui_capitalisation' ] = False + + system_predicates = {} + + system_predicates[ 'age' ] = ( 0, 0, 0, 7 ) + system_predicates[ 'duration' ] = ( 3, 0, 0 ) + system_predicates[ 'height' ] = ( 1, 1200 ) + system_predicates[ 'limit' ] = 600 + system_predicates[ 'mime' ] = ( 0, 0 ) + system_predicates[ 'num_tags' ] = ( 0, 4 ) + system_predicates[ 'local_rating_numerical' ] = ( 0, 3 ) + system_predicates[ 'local_rating_like' ] = 0 + system_predicates[ 'ratio' ] = ( 0, 16, 9 ) + system_predicates[ 'size' ] = ( 0, 200, 3 ) + system_predicates[ 'width' ] = ( 1, 1920 ) + + CLIENT_DEFAULT_OPTIONS[ 'file_system_predicates' ] = system_predicates + + default_namespace_colours = {} + + default_namespace_colours[ 'system' ] = ( 153, 101, 21 ) + default_namespace_colours[ 'creator' ] = ( 170, 0, 0 ) + default_namespace_colours[ 'character' ] = ( 0, 170, 0 ) + default_namespace_colours[ 'series' ] = ( 170, 0, 170 ) + default_namespace_colours[ None ] = ( 114, 160, 193 ) + default_namespace_colours[ '' ] = ( 0, 111, 250 ) + + CLIENT_DEFAULT_OPTIONS[ 'namespace_colours' ] = default_namespace_colours + + default_sort_by_choices = [] + + default_sort_by_choices.append( ( 'namespaces', [ 'series', 'creator', 'title', 'volume', 'chapter', 'page' ] ) ) + default_sort_by_choices.append( ( 'namespaces', [ 'creator', 'series', 'title', 'volume', 'chapter', 'page' ] ) ) + + CLIENT_DEFAULT_OPTIONS[ 'sort_by' ] = default_sort_by_choices + CLIENT_DEFAULT_OPTIONS[ 'show_all_tags_in_autocomplete' ] = True + + shortcuts = {} + + shortcuts[ wx.ACCEL_NORMAL ] = {} + shortcuts[ wx.ACCEL_CTRL ] = {} + shortcuts[ wx.ACCEL_ALT ] = {} + shortcuts[ wx.ACCEL_SHIFT ] = {} + + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F3 ] = 'manage_tags' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F4 ] = 'manage_ratings' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F5 ] = 'refresh' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F7 ] = 'archive' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F11 ] = 'ratings_filter' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F12 ] = 'filter' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F9 ] = 'new_page' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'B' ) ] = 'frame_back' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'N' ) ] = 'frame_next' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'T' ) ] = 'new_page' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'W' ) ] = 'close_page' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'R' ) ] = 'show_hide_splitters' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'S' ) ] = 'set_search_focus' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'I' ) ] = 'synchronised_wait_switch' + + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_UP ] = 'previous' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_LEFT ] = 'previous' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_UP ] = 'previous' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_LEFT ] = 'previous' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_PAGEUP ] = 'previous' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_PAGEUP ] = 'previous' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_DOWN ] = 'next' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_RIGHT ] = 'next' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_DOWN ] = 'next' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_RIGHT ] = 'next' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_PAGEDOWN ] = 'next' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_PAGEDOWN ] = 'next' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_HOME ] = 'first' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_HOME ] = 'first' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_END ] = 'last' + shortcuts[ wx.ACCEL_CTRL ][ wx.WXK_NUMPAD_END ] = 'last' + + CLIENT_DEFAULT_OPTIONS[ 'shortcuts' ] = shortcuts + + CLIENT_DEFAULT_OPTIONS[ 'default_tag_repository' ] = CC.LOCAL_TAG_SERVICE_IDENTIFIER + CLIENT_DEFAULT_OPTIONS[ 'default_tag_sort' ] = CC.SORT_BY_LEXICOGRAPHIC_ASC + + c.execute( 'INSERT INTO options ( options ) VALUES ( ? );', ( CLIENT_DEFAULT_OPTIONS, ) ) + + c.execute( 'INSERT INTO contacts ( contact_id, contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ?, ? );', ( 1, None, None, 'Anonymous', 'internet', 0 ) ) + + with open( HC.STATIC_DIR + os.sep + 'contact - hydrus admin.yaml', 'rb' ) as f: hydrus_admin = yaml.safe_load( f.read() ) + + ( public_key, name, host, port ) = hydrus_admin.GetInfo() + + contact_key = hydrus_admin.GetContactKey() + + c.execute( 'INSERT OR IGNORE INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( sqlite3.Binary( contact_key ), public_key, name, host, port ) ) + + c.execute( 'INSERT INTO version ( version ) VALUES ( ? );', ( HC.SOFTWARE_VERSION, ) ) + + c.execute( 'COMMIT' ) + + + + def _SaveOptions( self, c ): + + ( old_options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + ( old_width, old_height ) = old_options[ 'thumbnail_dimensions' ] + + ( new_width, new_height ) = self._options[ 'thumbnail_dimensions' ] + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + resize_thumbs = new_width != old_width or new_height != old_height + + if resize_thumbs: + + thumbnail_paths = [ path for path in dircache.listdir( HC.CLIENT_THUMBNAILS_DIR ) if path.endswith( '_resized' ) ] + + for path in thumbnail_paths: os.remove( HC.CLIENT_THUMBNAILS_DIR + os.path.sep + path ) + + self.pub( 'thumbnail_resize' ) + + + self.pub( 'refresh_menu_bar' ) + self.pub( 'options_updated' ) + + + def _SetPassword( self, c, password ): + + if password is not None: self._options[ 'password' ] = hashlib.sha256( password ).digest() + else: self._options[ 'password' ] = None + + self._SaveOptions( c ) + + + def _UpdateBoorus( self, c, edit_log ): + + for ( action, data ) in edit_log: + + if action == 'add': + + name = data + + booru = CC.Booru( name, 'search_url', '+', 1, 'thumbnail', '', 'original image', {} ) + + c.execute( 'INSERT INTO boorus ( name, booru ) VALUES ( ?, ? );', ( name, booru ) ) + + elif action == 'delete': + + name = data + + c.execute( 'DELETE FROM boorus WHERE name = ?;', ( name, ) ) + + elif action == 'edit': + + ( name, booru ) = data + + c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, name ) ) + + + + + def _UpdateImageboards( self, c, site_edit_log ): + + for ( site_action, site_data ) in site_edit_log: + + if site_action == 'add': + + site_name = site_data + + self._GetSiteId( c, site_name ) + + elif site_action == 'delete': + + site_name = site_data + + site_id = self._GetSiteId( c, site_name ) + + c.execute( 'DELETE FROM imageboard_sites WHERE site_id = ?;', ( site_id, ) ) + c.execute( 'DELETE FROM imageboards WHERE site_id = ?;', ( site_id, ) ) + + elif site_action == 'edit': + + ( site_name, edit_log ) = site_data + + site_id = self._GetSiteId( c, site_name ) + + for ( action, data ) in edit_log: + + if action == 'add': + + name = data + + imageboard = CC.Imageboard( name, '', 60, [], {} ) + + c.execute( 'INSERT INTO imageboards ( site_id, name, imageboard ) VALUES ( ?, ?, ? );', ( site_id, name, imageboard ) ) + + elif action == 'delete': + + name = data + + c.execute( 'DELETE FROM imageboards WHERE site_id = ? AND name = ?;', ( site_id, name ) ) + + elif action == 'edit': + + imageboard = data + + name = imageboard.GetName() + + c.execute( 'UPDATE imageboards SET imageboard = ? WHERE site_id = ? AND name = ?;', ( imageboard, site_id, name ) ) + + + + + + + def _UpdateDB( self, c ): + + ( version, ) = c.execute( 'SELECT version FROM version;' ).fetchone() + + if version < HC.SOFTWARE_VERSION: + + c.execute( 'BEGIN IMMEDIATE' ) + + try: + + self._UpdateDBOld( c, version ) + + if version < 51: + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + shortcuts = self._options[ 'shortcuts' ] + + shortcuts[ wx.ACCEL_CTRL ][ ord( 'B' ) ] = 'frame_back' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'N' ) ] = 'frame_next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F11 ] = 'ratings_filter' + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + c.execute( 'CREATE TABLE ratings_filter ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, min REAL, max REAL, PRIMARY KEY( service_id, hash_id ) );' ) + + + if version < 52: + + wx.GetApp().SetSplashText( 'making new indices' ) + + c.execute( 'DROP INDEX mappings_namespace_id_index;' ) + c.execute( 'DROP INDEX mappings_tag_id_index;' ) + + c.execute( 'CREATE INDEX mappings_service_id_tag_id_index ON mappings ( service_id, tag_id );' ) + c.execute( 'CREATE INDEX mappings_service_id_hash_id_index ON mappings ( service_id, hash_id );' ) + + wx.GetApp().SetSplashText( 'making some more new indices' ) + + c.execute( 'DROP INDEX pending_mappings_namespace_id_index;' ) + c.execute( 'DROP INDEX pending_mappings_tag_id_index;' ) + + c.execute( 'CREATE INDEX pending_mappings_service_id_tag_id_index ON pending_mappings ( service_id, tag_id );' ) + c.execute( 'CREATE INDEX pending_mappings_service_id_hash_id_index ON pending_mappings ( service_id, hash_id );' ) + + c.execute( 'CREATE TABLE shutdown_timestamps ( shutdown_type INTEGER PRIMARY KEY, timestamp INTEGER );' ) + + + if version < 54: + + c.execute( 'DROP INDEX services_type_name_index;' ) + + c.execute( 'ALTER TABLE services ADD COLUMN service_key BLOB_BYTES;' ) + c.execute( 'CREATE UNIQUE INDEX services_service_key_index ON services ( service_key );' ) + + service_info = c.execute( 'SELECT service_id, type FROM services;' ).fetchall() + + updates = [] + + for ( service_id, service_type ) in service_info: + + if service_type == HC.LOCAL_FILE: service_key = 'local files' + elif service_type == HC.LOCAL_TAG: service_key = 'local tags' + else: service_key = os.urandom( 32 ) + + updates.append( ( sqlite3.Binary( service_key ), service_id ) ) + + + c.executemany( 'UPDATE services SET service_key = ? WHERE service_id = ?;', updates ) + + c.execute( 'UPDATE files_info SET num_frames = num_frames / 1000 WHERE mime = ?;', ( HC.VIDEO_FLV, ) ) + + + if version < 55: + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + self._options[ 'default_tag_repository' ] = CC.LOCAL_TAG_SERVICE_IDENTIFIER + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 56: + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + self._options[ 'default_tag_sort' ] = CC.SORT_BY_LEXICOGRAPHIC_ASC + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 57: + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + shortcuts = self._options[ 'shortcuts' ] + + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_UP ] = 'previous' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_LEFT ] = 'previous' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_UP ] = 'previous' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_LEFT ] = 'previous' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_PAGEUP ] = 'previous' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_PAGEUP ] = 'previous' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_DOWN ] = 'next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_RIGHT ] = 'next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_DOWN ] = 'next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_RIGHT ] = 'next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_PAGEDOWN ] = 'next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_PAGEDOWN ] = 'next' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_HOME ] = 'first' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_HOME ] = 'first' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_END ] = 'last' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_NUMPAD_END ] = 'last' + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + unknown_account = CC.GetUnknownAccount() + + unknown_account.MakeStale() + + c.execute( 'UPDATE accounts SET account = ?;', ( unknown_account, ) ) + + c.execute( 'UPDATE version SET version = ?;', ( HC.SOFTWARE_VERSION, ) ) + + c.execute( 'COMMIT' ) + + wx.MessageBox( 'The client has updated successfully!' ) + + except: + + c.execute( 'ROLLBACK' ) + + print( traceback.format_exc() ) + + raise Exception( 'Tried to update the client db, but something went wrong:' + os.linesep + traceback.format_exc() ) + + + + self._UpdateDBOldPost( c, version ) + + + def _UpdateDBOld( self, c, version ): + + # upgrade to version 4 was too complicated, needs entire rebuild + + if version < 13: + + c.execute( 'ALTER TABLE public_tag_repository ADD COLUMN first_begin INTEGER;' ) + c.execute( 'ALTER TABLE file_repositories ADD COLUMN first_begin INTEGER;' ) + + c.execute( 'UPDATE public_tag_repository SET first_begin = 0, next_begin = 0, last_error = 0;' ) + c.execute( 'DELETE FROM public_mappings;' ) + c.execute( 'DELETE FROM deleted_public_mappings;' ) + c.execute( 'DELETE FROM public_tag_repository_news;' ) + c.execute( 'DELETE FROM pending_public_mapping_petitions;' ) + + c.execute( 'UPDATE file_repositories SET first_begin = 0, next_begin = 0, last_error = 0;' ) + c.execute( 'DELETE FROM remote_files;' ) + c.execute( 'DELETE FROM deleted_remote_files;' ) + c.execute( 'DELETE FROM file_repository_news;' ) + c.execute( 'DELETE FROM pending_file_petitions;' ) + c.execute( 'DELETE FROM file_downloads;' ) + + + if version < 16: + + c.execute( 'CREATE TABLE accounts ( service_id INTEGER, access_key BLOB_BYTES, account TEXT_YAML );' ) + + c.execute( 'CREATE TABLE addresses ( service_id INTEGER, host TEXT, port INTEGER, last_error INTEGER );' ) + + c.execute( 'CREATE TABLE services ( service_id INTEGER PRIMARY KEY, type INTEGER, name TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX services_type_name_index ON services ( type, name );' ) + + c.execute( 'CREATE TABLE repositories ( service_id INTEGER PRIMARY KEY, first_begin INTEGER, next_begin INTEGER );' ) + + c.execute( 'CREATE TABLE news ( service_id INTEGER, post TEXT, timestamp INTEGER );' ) + + # mappings db + + c.execute( 'PRAGMA mappings_db.auto_vacuum = 1;' ) # full + + c.execute( 'CREATE TABLE mappings_db.deleted_mappings ( service_id INTEGER, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX mappings_db.deleted_mappings_hash_id_index ON deleted_mappings ( hash_id );' ) + + c.execute( 'CREATE TABLE mappings_db.mapping_petitions ( service_id INTEGER, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id, reason_id ) );' ) + c.execute( 'CREATE INDEX mappings_db.mapping_petitions_hash_id_index ON mapping_petitions ( hash_id );' ) + + c.execute( 'CREATE TABLE mappings_db.pending_mappings ( service_id INTEGER, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX mappings_db.pending_mappings_namespace_id_index ON pending_mappings ( namespace_id );' ) + c.execute( 'CREATE INDEX mappings_db.pending_mappings_tag_id_index ON pending_mappings ( tag_id );' ) + c.execute( 'CREATE INDEX mappings_db.pending_mappings_hash_id_index ON pending_mappings ( hash_id );' ) + + c.execute( 'CREATE TABLE mappings_db.mappings ( service_id INTEGER, namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX mappings_db.mappings_namespace_id_index ON mappings ( namespace_id );' ) + c.execute( 'CREATE INDEX mappings_db.mappings_tag_id_index ON mappings ( tag_id );' ) + c.execute( 'CREATE INDEX mappings_db.mappings_hash_id_index ON mappings ( hash_id );' ) + + # active mappings db + + c.execute( 'PRAGMA active_mappings_db.auto_vacuum = 1;' ) # full + + c.execute( 'CREATE TABLE active_mappings_db.active_mappings ( namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX active_mappings_db.active_mappings_tag_id_index ON active_mappings ( tag_id );' ) + c.execute( 'CREATE INDEX active_mappings_db.active_mappings_hash_id_index ON active_mappings ( hash_id );' ) + + # files info db + + c.execute( 'PRAGMA files_info_db.auto_vacuum = 1;' ) # full + + c.execute( 'CREATE TABLE files_info_db.deleted_files ( service_id INTEGER, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + + c.execute( 'CREATE TABLE files_info_db.files_info ( service_id INTEGER, hash_id INTEGER, size INTEGER, mime INTEGER, timestamp INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX files_info_db.files_info_hash_id ON files_info ( hash_id );' ) + + c.execute( 'CREATE TABLE files_info_db.file_transfers ( service_id_from INTEGER, service_id_to INTEGER, hash_id INTEGER, PRIMARY KEY( service_id_from, service_id_to, hash_id ) );' ) + c.execute( 'CREATE INDEX files_info_db.file_transfers_service_id_to ON file_transfers ( service_id_to );' ) + c.execute( 'CREATE INDEX files_info_db.file_transfers_hash_id ON file_transfers ( hash_id );' ) + + c.execute( 'CREATE TABLE files_info_db.file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ) );' ) + c.execute( 'CREATE INDEX files_info_db.file_petitions_hash_id_index ON file_petitions ( hash_id );' ) + + c.execute( 'CREATE TABLE files_info_db.inbox ( hash_id INTEGER PRIMARY KEY );' ) + + # thumbs dbs + + c.execute( 'CREATE TABLE thumbnails_db.thumbnails ( service_id INTEGER, hash_id INTEGER, thumbnail BLOB_BYTES, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE TABLE thumbnails_resized_db.thumbnails_resized ( service_id INTEGER, hash_id INTEGER, thumbnail BLOB_BYTES, PRIMARY KEY( service_id, hash_id ) );' ) + + # copy over + + c.execute( 'INSERT INTO services SELECT file_repository_id, ?, name FROM file_repositories;', ( HC.FILE_REPOSITORY, ) ) + c.execute( 'INSERT INTO addresses SELECT file_repository_id, host, port, last_error FROM file_repositories;' ) + c.execute( 'INSERT INTO accounts SELECT file_repository_id, access_key, account FROM file_repositories;' ) + c.execute( 'INSERT INTO repositories SELECT file_repository_id, first_begin, next_begin FROM file_repositories;' ) + + c.execute( 'INSERT INTO services ( type, name ) VALUES ( ?, ? );', ( HC.LOCAL_FILE, 'local' ) ) + + local_service_id = c.lastrowid + + c.execute( 'INSERT INTO services ( type, name ) VALUES ( ?, ? );', ( HC.TAG_REPOSITORY, 'public tag repository' ) ) + + public_tag_service_id = c.lastrowid + + c.execute( 'INSERT INTO addresses SELECT ?, host, port, last_error FROM public_tag_repository;', ( public_tag_service_id, ) ) + c.execute( 'INSERT INTO accounts SELECT ?, access_key, account FROM public_tag_repository;', ( public_tag_service_id, ) ) + c.execute( 'INSERT INTO repositories SELECT ?, first_begin, next_begin FROM public_tag_repository;', ( public_tag_service_id, ) ) + + c.execute( 'INSERT INTO news SELECT file_repository_id, news, timestamp FROM file_repository_news;' ) + c.execute( 'INSERT INTO news SELECT ?, news, timestamp FROM public_tag_repository_news;', ( public_tag_service_id, ) ) + + c.execute( 'INSERT INTO deleted_mappings SELECT ?, namespace_id, tag_id, hash_id FROM deleted_public_mappings;', ( public_tag_service_id, ) ) + c.execute( 'INSERT INTO mapping_petitions SELECT ?, namespace_id, tag_id, hash_id, reason_id FROM pending_public_mapping_petitions;', ( public_tag_service_id, ) ) + c.execute( 'INSERT INTO pending_mappings SELECT ?, namespace_id, tag_id, hash_id FROM pending_public_mappings;', ( public_tag_service_id, ) ) + c.execute( 'INSERT INTO mappings SELECT ?, namespace_id, tag_id, hash_id FROM public_mappings;', ( public_tag_service_id, ) ) + + c.execute( 'INSERT INTO active_mappings SELECT namespace_id, tag_id, hash_id FROM mappings WHERE service_id = ?;', ( public_tag_service_id, ) ) + + c.execute( 'INSERT INTO deleted_files SELECT ?, hash_id FROM deleted_local_files;', ( local_service_id, ) ) + c.execute( 'INSERT INTO deleted_files SELECT file_repository_id, hash_id FROM deleted_remote_files;' ) + c.execute( 'INSERT INTO files_info SELECT ?, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM local_files;', ( local_service_id, ) ) + c.execute( 'INSERT INTO files_info SELECT file_repository_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM remote_files;' ) + c.execute( 'INSERT INTO file_transfers SELECT file_repository_id, ?, hash_id FROM file_downloads;', ( local_service_id, ) ) + c.execute( 'INSERT INTO file_transfers SELECT ?, file_repository_id, hash_id FROM pending_files;', ( local_service_id, ) ) + c.execute( 'INSERT INTO file_petitions SELECT file_repository_id, reason_id, hash_id FROM pending_file_petitions;' ) + c.execute( 'INSERT INTO inbox SELECT hash_id FROM local_files WHERE inbox = ?;', ( True, ) ) + + c.execute( 'INSERT INTO thumbnails SELECT file_repository_id, hash_id, thumbnail FROM remote_thumbnails;' ) + c.execute( 'INSERT INTO thumbnails SELECT ?, hash_id, thumbnail FROM local_thumbnails;', ( local_service_id, ) ) + c.execute( 'INSERT INTO thumbnails_resized SELECT file_repository_id, hash_id, thumbnail_resized FROM remote_thumbnails_resized;' ) + c.execute( 'INSERT INTO thumbnails_resized SELECT ?, hash_id, thumbnail_resized FROM local_thumbnails_resized;', ( local_service_id, ) ) + + c.execute( 'DROP TABLE file_repositories;' ) + c.execute( 'DROP TABLE public_tag_repository;' ) + + c.execute( 'DROP TABLE file_repository_news;' ) + c.execute( 'DROP TABLE public_tag_repository_news;' ) + + c.execute( 'DROP TABLE deleted_public_mappings;' ) + c.execute( 'DROP TABLE pending_public_mapping_petitions;' ) + c.execute( 'DROP TABLE pending_public_mappings;' ) + c.execute( 'DROP TABLE public_mappings;' ) + + c.execute( 'DROP TABLE main.deleted_local_files;' ) + c.execute( 'DROP TABLE main.deleted_remote_files;' ) + c.execute( 'DROP TABLE main.file_downloads;' ) + c.execute( 'DROP TABLE main.local_files;' ) + c.execute( 'DROP TABLE main.pending_file_petitions;' ) + c.execute( 'DROP TABLE main.pending_files;' ) + c.execute( 'DROP TABLE main.remote_files;' ) + + c.execute( 'DROP TABLE remote_thumbnails;' ) + c.execute( 'DROP TABLE local_thumbnails;' ) + c.execute( 'DROP TABLE remote_thumbnails_resized;' ) + c.execute( 'DROP TABLE local_thumbnails_resized;' ) + + + if version < 19: + + c.execute( 'CREATE TABLE service_info ( service_id INTEGER, info_type INTEGER, info INTEGER, PRIMARY KEY ( service_id, info_type ) );', ) + + c.execute( 'CREATE TABLE tag_service_precedence ( service_id INTEGER PRIMARY KEY, precedence INTEGER );' ) + + c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) SELECT service_id, service_id FROM services WHERE type = ?;', ( HC.TAG_REPOSITORY, ) ) + + + if version < 21: + + c.execute( 'CREATE TABLE files_info_db.perceptual_hashes ( service_id INTEGER, hash_id INTEGER, phash BLOB_BYTES, PRIMARY KEY( service_id, hash_id ) );' ) + + + if version < 22: + + c.execute( 'DELETE FROM perceptual_hashes;' ) + + # there is some type-casting problem here that I can't figure out, so have to do it the long way + # c.execute( 'INSERT INTO perceptual_hashes SELECT service_id, hash_id, CAST hydrus_phash( thumbnail ) FROM thumbnails;' ) + + thumbnail_ids = c.execute( 'SELECT service_id, hash_id FROM thumbnails;' ).fetchall() + + for ( service_id, hash_id ) in thumbnail_ids: + + ( thumbnail, ) = c.execute( 'SELECT thumbnail FROM thumbnails WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() + + phash = HydrusImageHandling.GeneratePerceptualHash( thumbnail ) + + c.execute( 'INSERT INTO perceptual_hashes VALUES ( ?, ?, ? );', ( service_id, hash_id, sqlite3.Binary( phash ) ) ) + + + + if version < 24: + + c.execute( 'CREATE TABLE imageboard_sites ( site_id INTEGER PRIMARY KEY, name TEXT );', ) + + c.execute( 'CREATE TABLE imageboards ( site_id INTEGER, name TEXT, imageboard TEXT_YAML, PRIMARY KEY ( site_id, name ) );', ) + + for ( site_name, imageboards ) in CC.DEFAULT_IMAGEBOARDS: + + site_id = self._GetSiteId( c, site_name ) + + c.executemany( 'INSERT INTO imageboards VALUES ( ?, ?, ? );', [ ( site_id, imageboard.GetName(), imageboard ) for imageboard in imageboards ] ) + + + + if version < 26: + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + self._options[ 'num_autocomplete_chars' ] = 1 + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 27: + + c.execute( 'CREATE TABLE files_info_db.urls ( url TEXT PRIMARY KEY, hash_id INTEGER );' ) + c.execute( 'CREATE INDEX files_info_db.urls_hash_id ON urls ( hash_id );' ) + + + if version < 29: + + files_db_path = HC.DB_DIR + os.path.sep + 'client_files.db' + + c.execute( 'COMMIT' ) + + # can't do it inside a transaction + c.execute( 'ATTACH database "' + files_db_path + '" as files_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + os.mkdir( HC.CLIENT_FILES_DIR ) + + ( local_service_id, ) = c.execute( 'SELECT service_id FROM services WHERE type = ?;', ( HC.LOCAL_FILE, ) ).fetchone() + + all_local_files = c.execute( 'SELECT hash_id, hash FROM files_info, hashes USING ( hash_id ) WHERE service_id = ?;', ( local_service_id, ) ).fetchall() + + for i in range( 0, len( all_local_files ), 100 ): + + wx.GetApp().SetSplashText( 'updating db to v29 ' + str( i ) + '/' + str( len( all_local_files ) ) ) + + local_files_subset = all_local_files[ i : i + 100 ] + + for ( hash_id, hash ) in local_files_subset: + + ( file, ) = c.execute( 'SELECT file FROM files WHERE hash_id = ?', ( hash_id, ) ).fetchone() + + path_to = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path_to, 'wb' ) as f: f.write( file ) + + + c.execute( 'DELETE FROM files WHERE hash_id IN ' + HC.SplayListForDB( [ hash_id for ( hash_id, hash ) in local_files_subset ] ) + ';' ) + + c.execute( 'COMMIT' ) + + # slow truncate happens here! + + c.execute( 'BEGIN IMMEDIATE' ) + + + c.execute( 'COMMIT' ) + + # can't do it inside a transaction + c.execute( 'DETACH DATABASE files_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + os.remove( files_db_path ) + + + if version < 30: + + thumbnails_db_path = HC.DB_DIR + os.path.sep + 'client_thumbnails.db' + thumbnails_resized_db_path = HC.DB_DIR + os.path.sep + 'client_thumbnails_resized.db' + + c.execute( 'COMMIT' ) + + # can't do it inside a transaction + c.execute( 'ATTACH database "' + thumbnails_db_path + '" as thumbnails_db;' ) + + os.mkdir( HC.CLIENT_THUMBNAILS_DIR ) + + all_thumbnails = c.execute( 'SELECT DISTINCT hash_id, hash FROM thumbnails, hashes USING ( hash_id );' ).fetchall() + + all_service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM services;' ) ] + + for i in range( 0, len( all_thumbnails ), 500 ): + + wx.GetApp().SetSplashText( 'updating db to v30 ' + str( i ) + '/' + str( len( all_thumbnails ) ) ) + + thumbnails_subset = all_thumbnails[ i : i + 500 ] + + for ( hash_id, hash ) in thumbnails_subset: + + ( thumbnail, ) = c.execute( 'SELECT thumbnail FROM thumbnails WHERE service_id IN ' + HC.SplayListForDB( all_service_ids ) + ' AND hash_id = ?', ( hash_id, ) ).fetchone() + + path_to = HC.CLIENT_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path_to, 'wb' ) as f: f.write( thumbnail ) + + + + # can't do it inside a transaction + c.execute( 'DETACH DATABASE thumbnails_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + os.remove( thumbnails_db_path ) + os.remove( thumbnails_resized_db_path ) + + all_p_hashes = c.execute( 'SELECT DISTINCT hash_id, phash FROM perceptual_hashes;' ).fetchall() + + c.execute( 'DROP TABLE perceptual_hashes;' ) + + c.execute( 'CREATE TABLE files_info_db.perceptual_hashes ( hash_id INTEGER PRIMARY KEY, phash BLOB_BYTES );' ) + + c.executemany( 'INSERT OR IGNORE INTO perceptual_hashes ( hash_id, phash ) VALUES ( ?, ? );', [ ( hash_id, sqlite3.Binary( phash ) ) for ( hash_id, phash ) in all_p_hashes ] ) + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + default_namespace_colours = {} + + default_namespace_colours[ 'system' ] = ( 153, 101, 21 ) + default_namespace_colours[ 'creator' ] = ( 170, 0, 0 ) + default_namespace_colours[ 'character' ] = ( 0, 170, 0 ) + default_namespace_colours[ 'series' ] = ( 170, 0, 170 ) + default_namespace_colours[ None ] = ( 114, 160, 193 ) + default_namespace_colours[ '' ] = ( 0, 111, 250 ) + + self._options[ 'namespace_colours' ] = default_namespace_colours + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 31: + + c.execute( 'CREATE TABLE boorus ( name TEXT PRIMARY KEY, booru TEXT_YAML );', ) + + c.executemany( 'INSERT INTO boorus VALUES ( ?, ? );', [ ( booru.GetName(), booru ) for booru in CC.DEFAULT_BOORUS ] ) + + + if version < 33: + + try: c.execute( 'SELECT name, booru FROM boorus;' ).fetchall() + except: + + c.execute( 'CREATE TABLE boorus ( name TEXT PRIMARY KEY, booru TEXT_YAML );', ) + + c.executemany( 'INSERT INTO boorus VALUES ( ?, ? );', [ ( booru.GetName(), booru ) for booru in CC.DEFAULT_BOORUS ] ) + + + c.execute( 'CREATE TABLE local_hashes ( hash_id INTEGER PRIMARY KEY, md5 BLOB_BYTES, sha1 BLOB_BYTES );' ) + c.execute( 'CREATE INDEX local_hashes_md5_index ON local_hashes ( md5 );' ) + c.execute( 'CREATE INDEX local_hashes_sha1_index ON local_hashes ( sha1 );' ) + + ( local_service_id, ) = c.execute( 'SELECT service_id FROM services WHERE type = ?;', ( HC.LOCAL_FILE, ) ).fetchone() + + hashes = c.execute( 'SELECT hash_id, hash FROM hashes, files_info USING ( hash_id ) WHERE service_id = ?;', ( local_service_id, ) ).fetchall() + + for i in range( 0, len( hashes ), 100 ): + + wx.GetApp().SetSplashText( 'updating db to v33 ' + str( i ) + '/' + str( len( hashes ) ) ) + + hashes_subset = hashes[ i : i + 100 ] + + inserts = [] + + for ( hash_id, hash ) in hashes_subset: + + path = HC.CLIENT_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path, 'rb' ) as f: file = f.read() + + md5 = hashlib.md5( file ).digest() + + sha1 = hashlib.sha1( file ).digest() + + inserts.append( ( hash_id, sqlite3.Binary( md5 ), sqlite3.Binary( sha1 ) ) ) + + + c.executemany( 'INSERT INTO local_hashes ( hash_id, md5, sha1 ) VALUES ( ?, ?, ? );', inserts ) + + + + if version < 35: + + c.execute( 'CREATE TABLE active_pending_mappings ( namespace_id INTEGER, tag_id INTEGER, hash_id INTEGER, PRIMARY KEY( namespace_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX active_pending_mappings_tag_id_index ON active_pending_mappings ( tag_id );' ) + c.execute( 'CREATE INDEX active_pending_mappings_hash_id_index ON active_pending_mappings ( hash_id );' ) + + service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM tag_service_precedence ORDER BY precedence DESC;' ) ] + + first_round = True + + for service_id in service_ids: + + c.execute( 'INSERT OR IGNORE INTO active_pending_mappings SELECT namespace_id, tag_id, hash_id FROM pending_mappings WHERE service_id = ?;', ( service_id, ) ) + + # is this incredibly inefficient? + # if this is O( n-squared ) or whatever, just rewrite it as two queries using indices + if not first_round: c.execute( 'DELETE FROM active_pending_mappings WHERE namespace_id || "," || tag_id || "," || hash_id IN ( SELECT namespace_id || "," || tag_id || "," || hash_id FROM deleted_mappings WHERE service_id = ? );', ( service_id, ) ) + + first_round = False + + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + default_sort_by_choices = [] + + default_sort_by_choices.append( ( 'namespaces', [ 'series', 'creator', 'title', 'volume', 'chapter', 'page' ] ) ) + default_sort_by_choices.append( ( 'namespaces', [ 'creator', 'series', 'title', 'volume', 'chapter', 'page' ] ) ) + + self._options[ 'sort_by' ] = default_sort_by_choices + + self._options[ 'default_sort' ] = 0 + self._options[ 'default_collect' ] = 0 + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 36: + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + self._options[ 'gui_capitalisation' ] = False + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 37: + + # reconfig inbox -> file_inbox + + c.execute( 'CREATE TABLE file_inbox ( hash_id INTEGER PRIMARY KEY );' ) + + c.execute( 'INSERT INTO file_inbox SELECT hash_id FROM inbox;' ) + + c.execute( 'DROP TRIGGER inbox_insert_trigger;' ) + c.execute( 'DROP TRIGGER inbox_delete_trigger;' ) + + c.execute( 'DROP TABLE inbox;' ) + + inserts = [] + inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id IN ( SELECT service_id FROM files_info WHERE hash_id = new.hash_id ) AND info_type = ' + str( HC.SERVICE_INFO_NUM_INBOX ) + ';' ) + c.execute( 'CREATE TRIGGER file_inbox_insert_trigger AFTER INSERT ON file_inbox BEGIN ' + ' '.join( inserts ) + ' END;' ) + deletes = [] + deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id IN ( SELECT service_id FROM files_info WHERE hash_id = old.hash_id ) AND info_type = ' + str( HC.SERVICE_INFO_NUM_INBOX ) + ';' ) + c.execute( 'CREATE TRIGGER file_inbox_delete_trigger DELETE ON file_inbox BEGIN ' + ' '.join( deletes ) + ' END;' ) + + # now set up new messaging stuff + + c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' ) + + c.execute( 'CREATE VIRTUAL TABLE conversation_subjects USING fts4( subject );' ) + + c.execute( 'CREATE TABLE message_attachments ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE, hash_id INTEGER );' ) + + c.execute( 'CREATE TABLE message_depots ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, contact_id INTEGER, last_check INTEGER, check_period INTEGER, private_key TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX message_depots_contact_id_index ON message_depots ( contact_id );' ) + + c.execute( 'CREATE TABLE message_destination_map ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_to INTEGER, status_id INTEGER, PRIMARY KEY ( message_id, contact_id_to ) );' ) + c.execute( 'CREATE INDEX message_destination_map_contact_id_to_index ON message_destination_map ( contact_id_to );' ) + c.execute( 'CREATE INDEX message_destination_map_status_id_index ON message_destination_map ( status_id );' ) + + c.execute( 'CREATE TABLE message_downloads ( service_id INTEGER REFERENCES services ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE );' ) + c.execute( 'CREATE INDEX message_downloads_service_id_index ON message_downloads ( service_id );' ) + + c.execute( 'CREATE TABLE message_drafts ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, recipients_visible INTEGER_BOOLEAN );' ) + + c.execute( 'CREATE TABLE message_inbox ( message_id INTEGER PRIMARY KEY REFERENCES message_keys ON DELETE CASCADE );' ) + + c.execute( 'CREATE TABLE message_keys ( message_id INTEGER PRIMARY KEY, message_key BLOB_BYTES );' ) + c.execute( 'CREATE INDEX message_keys_message_key_index ON message_keys ( message_key );' ) + + c.execute( 'CREATE VIRTUAL TABLE message_bodies USING fts4( body );' ) + + c.execute( 'CREATE TABLE messages ( conversation_id INTEGER REFERENCES message_keys ( message_id ) ON DELETE CASCADE, message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_id_from INTEGER, timestamp INTEGER, PRIMARY KEY( conversation_id, message_id ) );' ) + c.execute( 'CREATE UNIQUE INDEX messages_message_id_index ON messages ( message_id );' ) + c.execute( 'CREATE INDEX messages_contact_id_from_index ON messages ( contact_id_from );' ) + c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' ) + + c.execute( 'CREATE TABLE statuses ( status_id INTEGER PRIMARY KEY, status TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX statuses_status_index ON statuses ( status );' ) + + c.execute( 'INSERT INTO contacts ( contact_id, contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ?, ? );', ( 1, None, None, 'Anonymous', 'internet', 0 ) ) + # fill the contact key and public key info in for hydrus admin + + + if version < 38: + + c.execute( 'COMMIT' ) + c.execute( 'PRAGMA journal_mode=WAL;' ) # possibly didn't work last time, cause of sqlite dll issue + c.execute( 'BEGIN IMMEDIATE' ) + + contacts_contents = c.execute( 'SELECT * FROM contacts;' ).fetchall() + + c.execute( 'DROP TABLE contacts;' ) + + c.execute( 'CREATE TABLE contacts ( contact_id INTEGER PRIMARY KEY, contact_key BLOB_BYTES, public_key TEXT, name TEXT, host TEXT, port INTEGER );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_name_index ON contacts ( name );' ) + + c.executemany( 'INSERT INTO contacts VALUES ( ?, ?, ?, ?, ?, ? );', contacts_contents ) + + c.execute( 'CREATE TABLE message_statuses_to_apply ( message_id INTEGER, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' ) + + + if version < 39: + + # I accidentally added some buffer public keys in v38, so this is to str() them + updates = [ ( str( public_key ), contact_id ) for ( contact_id, public_key ) in c.execute( 'SELECT contact_id, public_key FROM contacts;' ).fetchall() ] + + c.executemany( 'UPDATE contacts SET public_key = ? WHERE contact_id = ?;', updates ) + + with open( HC.STATIC_DIR + os.sep + 'contact - hydrus admin.yaml', 'rb' ) as f: hydrus_admin = yaml.safe_load( f.read() ) + + ( public_key, name, host, port ) = hydrus_admin.GetInfo() + + contact_key = hydrus_admin.GetContactKey() + + c.execute( 'INSERT OR IGNORE INTO contacts ( contact_key, public_key, name, host, port ) VALUES ( ?, ?, ?, ?, ? );', ( sqlite3.Binary( contact_key ), public_key, name, host, port ) ) + + + if version < 41: + + # better name and has foreign key assoc + + c.execute( 'CREATE TABLE incoming_message_statuses ( message_id INTEGER REFERENCES message_keys ON DELETE CASCADE, contact_key BLOB_BYTES, status_id INTEGER, PRIMARY KEY ( message_id, contact_key ) );' ) + + incoming_status_inserts = c.execute( 'SELECT * FROM message_statuses_to_apply;' ).fetchall() + + c.executemany( 'INSERT INTO incoming_message_statuses VALUES ( ?, ?, ? );', incoming_status_inserts ) + + c.execute( 'DROP TABLE message_statuses_to_apply;' ) + + # delete all drafts cause of plaintext->xml conversion + + message_ids = [ message_id for ( message_id, ) in c.execute( 'SELECT message_id FROM message_drafts;' ) ] + + c.execute( 'DELETE FROM message_keys WHERE message_id IN ' + HC.SplayListForDB( message_ids ) + ';' ) + c.execute( 'DELETE FROM message_bodies WHERE docid IN ' + HC.SplayListForDB( message_ids ) + ';' ) + c.execute( 'DELETE FROM conversation_subjects WHERE docid IN ' + HC.SplayListForDB( message_ids ) + ';' ) + + c.execute( 'ALTER TABLE message_depots ADD COLUMN receive_anon INTEGER_BOOLEAN' ) + c.execute( 'UPDATE message_depots SET receive_anon = ?;', ( True, ) ) + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + system_predicates = {} + + system_predicates[ 'age' ] = ( 0, 0, 0, 7 ) + system_predicates[ 'duration' ] = ( 3, 0, 0 ) + system_predicates[ 'height' ] = ( 1, 1200 ) + system_predicates[ 'limit' ] = 600 + system_predicates[ 'mime' ] = ( 0, 0 ) + system_predicates[ 'num_tags' ] = ( 0, 4 ) + system_predicates[ 'ratio' ] = ( 0, 16, 9 ) + system_predicates[ 'size' ] = ( 0, 200, 3 ) + system_predicates[ 'width' ] = ( 1, 1920 ) + + self._options[ 'file_system_predicates' ] = system_predicates + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 42: + + self._RecalcActiveMappings( c ) + + c.execute( 'CREATE TABLE autocomplete_tags_cache ( file_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, tag_service_id INTEGER REFERENCES services ( service_id ) ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, current_count INTEGER, pending_count INTEGER, PRIMARY KEY ( file_service_id, tag_service_id, namespace_id, tag_id ) );' ) + c.execute( 'CREATE INDEX autocomplete_tags_cache_tag_service_id_namespace_id_tag_id_index ON autocomplete_tags_cache ( tag_service_id, namespace_id, tag_id );' ) + + c.execute( 'DROP TRIGGER files_info_insert_trigger;' ) + c.execute( 'DROP TRIGGER files_info_delete_trigger;' ) + + c.execute( 'DROP TRIGGER mappings_insert_trigger;' ) + c.execute( 'DROP TRIGGER mappings_delete_trigger;' ) + + inserts = [] + inserts.append( 'DELETE FROM deleted_files WHERE service_id = new.service_id AND hash_id = new.hash_id;' ) + inserts.append( 'DELETE FROM file_transfers WHERE service_id_to = new.service_id AND hash_id = new.hash_id;' ) + inserts.append( 'UPDATE service_info SET info = info + new.size WHERE service_id = new.service_id AND info_type = ' + str( HC.SERVICE_INFO_TOTAL_SIZE ) + ';' ) + inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_FILES ) + ';' ) + inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND new.mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND info_type = ' + str( HC.SERVICE_INFO_NUM_THUMBNAILS ) + ';' ) + inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_INBOX ) + ';' ) + inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';' ) + inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = new.service_id;' ) + c.execute( 'CREATE TRIGGER files_info_insert_trigger AFTER INSERT ON files_info BEGIN ' + ' '.join( inserts ) + ' END;' ) + deletes = [] + deletes.append( 'DELETE FROM file_petitions WHERE service_id = old.service_id AND hash_id = old.hash_id;' ) + deletes.append( 'UPDATE service_info SET info = info - old.size WHERE service_id = old.service_id AND info_type = ' + str( HC.SERVICE_INFO_TOTAL_SIZE ) + ';' ) + deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_FILES ) + ';' ) + deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND old.mime IN ' + HC.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND info_type = ' + str( HC.SERVICE_INFO_NUM_THUMBNAILS ) + ';' ) + deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_INBOX ) + ';' ) + deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';' ) + deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = old.service_id;' ) + c.execute( 'CREATE TRIGGER files_info_delete_trigger DELETE ON files_info BEGIN ' + ' '.join( deletes ) + ' END;' ) + + inserts = [] + inserts.append( 'DELETE FROM deleted_mappings WHERE service_id = new.service_id AND hash_id = new.hash_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' ) + inserts.append( 'DELETE FROM pending_mappings WHERE service_id = new.service_id AND hash_id = new.hash_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' ) + inserts.append( 'UPDATE service_info SET info = info + 1 WHERE service_id = new.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_MAPPINGS ) + ';' ) + inserts.append( 'DELETE FROM service_info WHERE service_id = new.service_id AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS ) ) + ';' ) + inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = new.service_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' ) + inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' ) + c.execute( 'CREATE TRIGGER mappings_insert_trigger AFTER INSERT ON mappings BEGIN ' + ' '.join( inserts ) + ' END;' ) + deletes = [] + deletes.append( 'DELETE FROM mapping_petitions WHERE service_id = old.service_id AND hash_id = old.hash_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' ) + deletes.append( 'UPDATE service_info SET info = info - 1 WHERE service_id = old.service_id AND info_type = ' + str( HC.SERVICE_INFO_NUM_MAPPINGS ) + ';' ) + deletes.append( 'DELETE FROM service_info WHERE service_id = old.service_id AND info_type IN ' + HC.SplayListForDB( ( HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS ) ) + ';' ) + deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = old.service_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' ) + deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' ) + c.execute( 'CREATE TRIGGER mappings_delete_trigger DELETE ON mappings BEGIN ' + ' '.join( deletes ) + ' END;' ) + + inserts = [] + inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = new.service_id AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' ) + inserts.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = new.namespace_id AND tag_id = new.tag_id;' ) + c.execute( 'CREATE TRIGGER pending_mappings_insert_trigger AFTER INSERT ON pending_mappings BEGIN ' + ' '.join( inserts ) + ' END;' ) + deletes = [] + deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = old.service_id AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' ) + deletes.append( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id IS NULL AND namespace_id = old.namespace_id AND tag_id = old.tag_id;' ) + c.execute( 'CREATE TRIGGER pending_mappings_delete_trigger DELETE ON pending_mappings BEGIN ' + ' '.join( deletes ) + ' END;' ) + + # All of 4chan's post urls are now https. There is a 301 redirect from the http, but let's update anyway. + + all_imageboards = c.execute( 'SELECT site_id, name, imageboard FROM imageboards;' ).fetchall() + + for ( site_id, name, imageboard ) in all_imageboards: + + imageboard._post_url = imageboard._post_url.replace( 'http', 'https' ) + + + c.executemany( 'UPDATE imageboards SET imageboard = ? WHERE site_id = ? AND name = ?;', [ ( imageboard, site_id, name ) for ( site_id, name, imageboard ) in all_imageboards ] ) + + + if version < 43: + + name = 'konachan' + search_url = 'http://konachan.com/post?page=%index%&tags=%tags%' + search_separator = '+' + gallery_advance_num = 1 + thumb_classname = 'thumb' + image_id = None + image_data = 'View larger version' + tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + + booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) ) + + + if version < 44: + + name = 'e621' + + result = c.execute( 'SELECT booru FROM boorus WHERE name = ?;', ( name, ) ).fetchone() + + if result is not None: + + ( booru, ) = result + + ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData() + + thumb_classname = 'thumb blacklist' # from thumb_blacklisted + + booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, booru.GetName() ) ) + + + name = 'rule34@booru.org' + + result = c.execute( 'SELECT booru FROM boorus WHERE name = ?;', ( name, ) ).fetchone() + + if result is not None: + + ( booru, ) = result + + ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData() + + gallery_advance_num = 50 + + booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + c.execute( 'UPDATE boorus SET booru = ? WHERE name = ?;', ( booru, booru.GetName() ) ) + + + c.execute( 'DROP TRIGGER files_info_insert_trigger;' ) + c.execute( 'DROP TRIGGER files_info_delete_trigger;' ) + c.execute( 'DROP TRIGGER deleted_files_insert_trigger;' ) + c.execute( 'DROP TRIGGER deleted_files_delete_trigger;' ) + c.execute( 'DROP TRIGGER file_inbox_insert_trigger;' ) + c.execute( 'DROP TRIGGER file_inbox_delete_trigger;' ) + c.execute( 'DROP TRIGGER mappings_insert_trigger;' ) + c.execute( 'DROP TRIGGER mappings_delete_trigger;' ) + c.execute( 'DROP TRIGGER deleted_mappings_insert_trigger;' ) + c.execute( 'DROP TRIGGER deleted_mappings_delete_trigger;' ) + c.execute( 'DROP TRIGGER pending_mappings_insert_trigger;' ) + c.execute( 'DROP TRIGGER pending_mappings_delete_trigger;' ) + + c.execute( 'UPDATE services SET name = ? WHERE name = ?;', ( 'local files renamed', 'local files' ) ) + c.execute( 'UPDATE services SET name = ? WHERE type = ?;', ( 'local files', HC.LOCAL_FILE ) ) + + c.execute( 'INSERT INTO services ( type, name ) VALUES ( ?, ? );', ( HC.LOCAL_TAG, 'local tags' ) ) + + local_tag_service_id = c.lastrowid + + c.execute( 'INSERT INTO tag_service_precedence ( service_id, precedence ) SELECT ?, CASE WHEN MIN( precedence ) NOT NULL THEN MIN( precedence ) - 1 ELSE 0 END FROM tag_service_precedence;', ( local_tag_service_id, ) ) + + + if version < 46: + + name = 'rule34@paheal' + search_url = 'http://rule34.paheal.net/post/list/%tags%/%index%' + search_separator = '%20' + gallery_advance_num = 1 + thumb_classname = 'thumb' + image_id = 'main_image' + image_data = None + tag_classnames_to_namespaces = { 'tag_name' : '' } + + booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) ) + + name = 'tbib' + search_url = 'http://tbib.org/index.php?page=post&s=list&tags=%tags%&pid=%index%' + search_separator = '+' + gallery_advance_num = 25 + thumb_classname = 'thumb' + image_id = None + image_data = 'Original image' + tag_classnames_to_namespaces = { 'tag-type-general' : '', 'tag-type-character' : 'character', 'tag-type-copyright' : 'series', 'tag-type-artist' : 'creator' } + + booru = CC.Booru( name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + c.execute( 'INSERT INTO boorus VALUES ( ?, ? );', ( booru.GetName(), booru ) ) + + + if version < 48: + + c.execute( 'CREATE TABLE local_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, rating REAL, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX local_ratings_hash_id_index ON local_ratings ( hash_id );' ) + c.execute( 'CREATE INDEX local_ratings_rating_index ON local_ratings ( rating );' ) + + c.execute( 'CREATE TABLE remote_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, count INTEGER, rating REAL, score REAL, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX remote_ratings_hash_id_index ON remote_ratings ( hash_id );' ) + c.execute( 'CREATE INDEX remote_ratings_rating_index ON remote_ratings ( rating );' ) + c.execute( 'CREATE INDEX remote_ratings_score_index ON remote_ratings ( score );' ) + + c.execute( 'CREATE TABLE ratings_numerical ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, lower INTEGER, upper INTEGER );' ) + + c.execute( 'CREATE TABLE ratings_like ( service_id INTEGER PRIMARY KEY REFERENCES services ON DELETE CASCADE, like TEXT, dislike TEXT );' ) + + + if version < 49: + + result = c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( '', ) ).fetchone() + + if result is not None: + + ( tag_id, ) = result + + c.execute( 'DELETE FROM mappings WHERE tag_id = ?;', ( tag_id, ) ) + c.execute( 'DELETE FROM pending_mappings WHERE tag_id = ?;', ( tag_id, ) ) + c.execute( 'DELETE FROM active_mappings WHERE tag_id = ?;', ( tag_id, ) ) + c.execute( 'DELETE FROM active_pending_mappings WHERE tag_id = ?;', ( tag_id, ) ) + + + wx.GetApp().SetSplashText( 'making new cache, may take a minute' ) + + c.execute( 'CREATE TABLE existing_tags ( namespace_id INTEGER, tag_id INTEGER, PRIMARY KEY( namespace_id, tag_id ) );' ) + c.execute( 'CREATE INDEX existing_tags_tag_id_index ON existing_tags ( tag_id );' ) + + all_tag_ids = set() + + all_tag_ids.update( c.execute( 'SELECT namespace_id, tag_id FROM mappings;' ).fetchall() ) + all_tag_ids.update( c.execute( 'SELECT namespace_id, tag_id FROM pending_mappings;' ).fetchall() ) + + c.executemany( 'INSERT INTO existing_tags ( namespace_id, tag_id ) VALUES ( ?, ? );', all_tag_ids ) + + ( self._options, ) = c.execute( 'SELECT options FROM options;' ).fetchone() + + self._options[ 'show_all_tags_in_autocomplete' ] = True + + self._options[ 'file_system_predicates' ][ 'local_rating_numerical' ] = ( 0, 3 ) + self._options[ 'file_system_predicates' ][ 'local_rating_like' ] = 0 + + shortcuts = {} + + shortcuts[ wx.ACCEL_NORMAL ] = {} + shortcuts[ wx.ACCEL_CTRL ] = {} + shortcuts[ wx.ACCEL_ALT ] = {} + shortcuts[ wx.ACCEL_SHIFT ] = {} + + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F3 ] = 'manage_tags' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F4 ] = 'manage_ratings' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F5 ] = 'refresh' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F7 ] = 'archive' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F12 ] = 'filter' + shortcuts[ wx.ACCEL_NORMAL ][ wx.WXK_F9 ] = 'new_page' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'T' ) ] = 'new_page' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'W' ) ] = 'close_page' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'R' ) ] = 'show_hide_splitters' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'S' ) ] = 'set_search_focus' + shortcuts[ wx.ACCEL_CTRL ][ ord( 'I' ) ] = 'synchronised_wait_switch' + + self._options[ 'shortcuts' ] = shortcuts + + c.execute( 'UPDATE options SET options = ?;', ( self._options, ) ) + + + if version < 50: + + c.execute( 'CREATE TABLE fourchan_pass ( token TEXT, pin TEXT, timeout INTEGER );' ) + + + + def _UpdateDBOldPost( self, c, version ): + + if version == 34: # == is important here + + try: + + main_db_path = HC.DB_DIR + os.path.sep + 'client_main.db' + mappings_db_path = HC.DB_DIR + os.path.sep + 'client_mappings.db' + active_mappings_db_path = HC.DB_DIR + os.path.sep + 'client_active_mappings.db' + files_info_db_path = HC.DB_DIR + os.path.sep + 'client_files_info.db' + + if os.path.exists( main_db_path ): + + # can't do it inside transaction + + wx.GetApp().SetSplashText( 'consolidating db - preparing' ) + + c.execute( 'ATTACH database "' + main_db_path + '" as main_db;' ) + c.execute( 'ATTACH database "' + files_info_db_path + '" as files_info_db;' ) + c.execute( 'ATTACH database "' + mappings_db_path + '" as mappings_db;' ) + c.execute( 'ATTACH database "' + active_mappings_db_path + '" as active_mappings_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + c.execute( 'REPLACE INTO main.services SELECT * FROM main_db.services;' ) + + all_service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM main.services;' ) ] + + c.execute( 'DELETE FROM main_db.accounts WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.addresses WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.news WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.repositories WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.service_info WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.tag_service_precedence WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'DELETE FROM mappings_db.deleted_mappings WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM mappings_db.mappings WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM mappings_db.mapping_petitions WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM mappings_db.pending_mappings WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'DELETE FROM files_info_db.deleted_files WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM files_info_db.files_info WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM files_info_db.file_transfers WHERE service_id_to NOT IN ' + HC.SplayListForDB( all_service_ids ) + ' OR service_id_from NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM files_info_db.file_petitions WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'DELETE FROM main.options;' ) + + wx.GetApp().SetSplashText( 'consolidating db - 1/4' ) + + c.execute( 'REPLACE INTO main.accounts SELECT * FROM main_db.accounts;' ) + c.execute( 'REPLACE INTO main.addresses SELECT * FROM main_db.addresses;' ) + c.execute( 'REPLACE INTO main.boorus SELECT * FROM main_db.boorus;' ) + c.execute( 'REPLACE INTO main.hashes SELECT * FROM main_db.hashes;' ) + c.execute( 'REPLACE INTO main.imageboard_sites SELECT * FROM main_db.imageboard_sites;' ) + c.execute( 'REPLACE INTO main.imageboards SELECT * FROM main_db.imageboards;' ) + c.execute( 'REPLACE INTO main.local_hashes SELECT * FROM main_db.local_hashes;' ) + c.execute( 'REPLACE INTO main.namespaces SELECT * FROM main_db.namespaces;' ) + c.execute( 'REPLACE INTO main.news SELECT * FROM main_db.news;' ) + c.execute( 'REPLACE INTO main.options SELECT * FROM main_db.options;' ) + c.execute( 'REPLACE INTO main.reasons SELECT * FROM main_db.reasons;' ) + c.execute( 'REPLACE INTO main.repositories SELECT * FROM main_db.repositories;' ) + # don't do service info, so it gets recalced naturally + c.execute( 'REPLACE INTO main.tag_service_precedence SELECT * FROM main_db.tag_service_precedence;' ) + c.execute( 'REPLACE INTO main.tags SELECT * FROM main_db.tags;' ) + # don't do version, lol + + wx.GetApp().SetSplashText( 'consolidating db - 2/4' ) + + c.execute( 'REPLACE INTO main.deleted_mappings SELECT * FROM mappings_db.deleted_mappings;' ) + c.execute( 'REPLACE INTO main.mappings SELECT * FROM mappings_db.mappings;' ) + c.execute( 'REPLACE INTO main.mapping_petitions SELECT * FROM mappings_db.mapping_petitions;' ) + c.execute( 'REPLACE INTO main.pending_mappings SELECT * FROM mappings_db.pending_mappings;' ) + + wx.GetApp().SetSplashText( 'consolidating db - 3/4' ) + + c.execute( 'REPLACE INTO main.active_mappings SELECT * FROM active_mappings_db.active_mappings;' ) + + wx.GetApp().SetSplashText( 'consolidating db - 4/4' ) + + c.execute( 'REPLACE INTO main.deleted_files SELECT * FROM files_info_db.deleted_files;' ) + c.execute( 'REPLACE INTO main.files_info SELECT * FROM files_info_db.files_info;' ) + c.execute( 'REPLACE INTO main.file_transfers SELECT * FROM files_info_db.file_transfers;' ) + c.execute( 'REPLACE INTO main.file_petitions SELECT * FROM files_info_db.file_petitions;' ) + c.execute( 'REPLACE INTO main.inbox SELECT * FROM files_info_db.inbox;' ) + c.execute( 'REPLACE INTO main.perceptual_hashes SELECT * FROM files_info_db.perceptual_hashes;' ) + c.execute( 'REPLACE INTO main.urls SELECT * FROM files_info_db.urls;' ) + + c.execute( 'COMMIT' ) + + wx.GetApp().SetSplashText( 'consolidating db - cleaning up' ) + + c.execute( 'DETACH database main_db;' ) + c.execute( 'DETACH database files_info_db;' ) + c.execute( 'DETACH database mappings_db;' ) + c.execute( 'DETACH database active_mappings_db;' ) + + os.remove( main_db_path ) + os.remove( mappings_db_path ) + os.remove( active_mappings_db_path ) + os.remove( files_info_db_path ) + + + except: + + print( traceback.format_exc() ) + + try: c.execute( 'ROLLBACK' ) + except: pass + + raise Exception( 'Tried to update the client db, but something went wrong:' + os.linesep + traceback.format_exc() ) + + + + + def _Vacuum( self ): + + ( db, c ) = self._GetDBCursor() + + c.execute( 'VACUUM' ) + + c.execute( 'REPLACE INTO shutdown_timestamps ( shutdown_type, timestamp ) VALUES ( ?, ? );', ( CC.SHUTDOWN_TIMESTAMP_VACUUM, int( time.time() ) ) ) + + self.pub( 'log_message', 'database', 'vacuumed successfully' ) + + + def pub( self, topic, *args, **kwargs ): self._pubsubs.append( ( topic, args, kwargs ) ) + + def DAEMONDownloadFiles( self ): + + all_downloads = self.Read( 'all_downloads', HC.LOW_PRIORITY ) + + num_downloads = sum( [ len( hashes ) for ( service_identifier, hashes ) in all_downloads.items() ] ) + + for ( service_identifier, hashes ) in all_downloads.items(): + + try: + + try: file_repository = self.Read( 'service', HC.LOW_PRIORITY, service_identifier ) + except: continue + + HC.pubsub.pub( 'downloads_status', HC.ConvertIntToPrettyString( num_downloads ) + ' file downloads' ) + + if file_repository.CanDownload(): + + connection = file_repository.GetConnection() + + for hash in hashes: + + if HC.shutdown: return + + file = connection.Get( 'file', hash = hash.encode( 'hex' ) ) + + num_downloads -= 1 + + wx.GetApp().WaitUntilGoodTimeToUseGUIThread() + + HC.pubsub.pub( 'downloads_status', HC.ConvertIntToPrettyString( num_downloads ) + ' file downloads' ) + + self.Write( 'import_file', HC.LOW_PRIORITY, file ) + + HC.pubsub.pub( 'content_updates_data', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] ) + HC.pubsub.pub( 'content_updates_gui', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ADD, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( hash, ) ) ] ) + + self.pub( 'log_message', 'download files daemon', 'downloaded ' + hash.encode( 'hex' ) + ' from ' + file_repository.GetServiceIdentifier().GetName() ) + + time.sleep( 0.25 ) + + + + except: pass # if bad download, the repo gets dinged an error. no need to do anything here + + + if num_downloads == 0: HC.pubsub.pub( 'downloads_status', 'no file downloads' ) + elif num_downloads > 0: HC.pubsub.pub( 'downloads_status', HC.ConvertIntToPrettyString( num_downloads ) + ' inactive file downloads' ) + + + def DAEMONDownloadThumbnails( self ): + + service_identifiers = self.Read( 'service_identifiers', HC.LOW_PRIORITY, ( HC.FILE_REPOSITORY, ) ) + + thumbnail_hashes_i_have = { path.decode( 'hex' ) for path in dircache.listdir( HC.CLIENT_THUMBNAILS_DIR ) if not path.endswith( '_resized' ) } + + for service_identifier in service_identifiers: + + thumbnail_hashes_i_should_have = self.Read( 'thumbnail_hashes_i_should_have', HC.LOW_PRIORITY, service_identifier ) + + thumbnail_hashes_i_need = list( thumbnail_hashes_i_should_have - thumbnail_hashes_i_have ) + + if len( thumbnail_hashes_i_need ) > 0: + + try: file_repository = self.Read( 'service', HC.LOW_PRIORITY, service_identifier ) + except: continue + + if file_repository.CanDownload(): + + try: + + connection = file_repository.GetConnection() + + num_per_round = 50 + + for i in range( 0, len( thumbnail_hashes_i_need ), num_per_round ): + + if HC.shutdown: return + + thumbnails = [] + + for hash in thumbnail_hashes_i_need[ i : i + num_per_round ]: thumbnails.append( ( hash, connection.Get( 'thumbnail', hash = hash.encode( 'hex' ) ) ) ) + + wx.GetApp().WaitUntilGoodTimeToUseGUIThread() + + self.Write( 'thumbnails', HC.LOW_PRIORITY, thumbnails ) + + self.pub( 'add_thumbnail_count', service_identifier, len( thumbnails ) ) + + thumbnail_hashes_i_have.update( { hash for ( hash, thumbnail ) in thumbnails } ) + + self.pub( 'log_message', 'download thumbnails daemon', 'downloaded ' + str( len( thumbnails ) ) + ' thumbnails from ' + service_identifier.GetName() ) + + time.sleep( 0.25 ) + + + except: pass # if bad download, the repo gets dinged an error. no need to do anything here + + + + + + def DAEMONFlushServiceUpdates( self, update_log ): self.Write( 'service_updates', HC.HIGH_PRIORITY, update_log ) + + def DAEMONSynchroniseAccounts( self ): + + services = self.Read( 'services', HC.LOW_PRIORITY, HC.RESTRICTED_SERVICES ) + + for service in services: + + account = service.GetAccount() + service_identifier = service.GetServiceIdentifier() + credentials = service.GetCredentials() + + if not account.IsBanned() and account.IsStale() and credentials.HasAccessKey() and not service.HasRecentError(): + + try: + + connection = service.GetConnection() + + connection.Get( 'account' ) + + HC.pubsub.pub( 'log_message', 'synchronise accounts daemon', 'successfully refreshed account for ' + service_identifier.GetName() ) + + except Exception as e: + + name = service_identifier.GetName() + + error_message = 'failed to refresh account for ' + name + ':' + os.linesep + os.linesep + unicode( e ) + + HC.pubsub.pub( 'log_error', 'synchronise accounts daemon', error_message ) + + print( error_message ) + + + + + HC.pubsub.pub( 'notify_new_permissions' ) + + + def DAEMONSynchroniseMessages( self ): + + service_identifiers = self.Read( 'service_identifiers', HC.LOW_PRIORITY, ( HC.MESSAGE_DEPOT, ) ) + + for service_identifier in service_identifiers: + + try: + + name = service_identifier.GetName() + + service_type = service_identifier.GetType() + + try: service = self.Read( 'service', HC.LOW_PRIORITY, service_identifier ) + except: continue + + if service.CanCheck(): + + contact = service.GetContact() + + connection = service.GetConnection() + + private_key = service.GetPrivateKey() + + # is the account associated? + + if not contact.HasPublicKey(): + + try: + + public_key = HydrusMessageHandling.GetPublicKey( private_key ) + + connection.Post( 'contact', public_key = public_key ) + + self.Write( 'contact_associated', HC.HIGH_PRIORITY, service_identifier ) + + service = self.Read( 'service', HC.LOW_PRIORITY, service_identifier ) + + contact = service.GetContact() + + HC.pubsub.pub( 'log_message', 'synchronise messages daemon', 'associated public key with account at ' + service_identifier.GetName() ) + + except: + + continue + + + + # see if there are any new message_keys to download or statuses + + last_check = service.GetLastCheck() + + ( message_keys, statuses ) = connection.Get( 'messageinfosince', since = last_check ) + + decrypted_statuses = [] + + for status in statuses: + + try: decrypted_statuses.append( HydrusMessageHandling.UnpackageDeliveredStatus( status, private_key ) ) + except: pass + + + new_last_check = int( time.time() ) - 5 + + self.Write( 'message_info_since', HC.LOW_PRIORITY, service_identifier, message_keys, decrypted_statuses, new_last_check ) + + if len( message_keys ) > 0: HC.pubsub.pub( 'log_message', 'synchronise messages daemon', 'checked ' + service_identifier.GetName() + ' up to ' + HC.ConvertTimestampToPrettyTime( new_last_check ) + ', finding ' + str( len( message_keys ) ) + ' new messages' ) + + + self.WaitUntilGoodTimeToUseDBThread() + + # try to download any messages that still need downloading + + if service.CanDownload(): + + serverside_message_keys = self.Read( 'message_keys_to_download', HC.LOW_PRIORITY, service_identifier ) + + if len( serverside_message_keys ) > 0: + + connection = service.GetConnection() + + private_key = service.GetPrivateKey() + + num_processed = 0 + + for serverside_message_key in serverside_message_keys: + + self.WaitUntilGoodTimeToUseDBThread() + + try: + + encrypted_message = connection.Get( 'message', message_key = serverside_message_key.encode( 'hex' ) ) + + message = HydrusMessageHandling.UnpackageDeliveredMessage( encrypted_message, private_key ) + + self.Write( 'message', HC.LOW_PRIORITY, message, serverside_message_key = serverside_message_key ) + + num_processed += 1 + + except Exception as e: + + if issubclass( e, httplib.HTTPException ): break # it was an http error; try again later + + + + if num_processed > 0: + + HC.pubsub.pub( 'log_message', 'synchronise messages daemon', 'downloaded and parsed ' + str( num_processed ) + ' messages from ' + service_identifier.GetName() ) + + + + + except Exception as e: + + error_message = 'failed to check ' + name + ':' + os.linesep + os.linesep + unicode( e ) + + HC.pubsub.pub( 'log_error', 'synchronise messages daemon', error_message ) + + print( error_message ) + + + + self.Write( 'flush_message_statuses', HC.LOW_PRIORITY ) + + # send messages to recipients and update my status to sent/failed + + messages_to_send = self.Read( 'messages_to_send', HC.LOW_PRIORITY ) + + for ( message_key, contacts_to ) in messages_to_send: + + message = self.Read( 'transport_message', HC.LOW_PRIORITY, message_key ) + + contact_from = message.GetContactFrom() + + from_anon = contact_from is None or contact_from.GetName() == 'Anonymous' + + if not from_anon: + + my_public_key = contact_from.GetPublicKey() + my_contact_key = contact_from.GetContactKey() + + my_message_depot = self.Read( 'service', HC.LOW_PRIORITY, contact_from ) + + from_connection = my_message_depot.GetConnection() + + + service_status_updates = [] + local_status_updates = [] + + for contact_to in contacts_to: + + public_key = contact_to.GetPublicKey() + contact_key = contact_to.GetContactKey() + + encrypted_message = HydrusMessageHandling.PackageMessageForDelivery( message, public_key ) + + try: + + to_connection = contact_to.GetConnection() + + to_connection.Post( 'message', message = encrypted_message, contact_key = contact_key ) + + status = 'sent' + + except: + + print( traceback.format_exc() ) + + HC.pubsub.pub( 'message', 'Sending a message failed: ' + os.linesep + traceback.format_exc() ) + + status = 'failed' + + + status_key = hashlib.sha256( contact_key + message_key ).digest() + + if not from_anon: service_status_updates.append( ( status_key, HydrusMessageHandling.PackageStatusForDelivery( ( message_key, contact_key, status ), my_public_key ) ) ) + + local_status_updates.append( ( contact_key, status ) ) + + + if not from_anon: from_connection.Post( 'message_statuses', contact_key = my_contact_key, statuses = service_status_updates ) + + self.Write( 'message_statuses', HC.LOW_PRIORITY, message_key, local_status_updates ) + + + self.Read( 'status_num_inbox', HC.LOW_PRIORITY ) + + + def DAEMONSynchroniseRepositories( self ): + + service_identifiers = self.Read( 'service_identifiers', HC.LOW_PRIORITY, HC.REPOSITORIES ) + + for service_identifier in service_identifiers: + + if HC.shutdown: raise Exception( 'Application shutting down!' ) + + try: + + name = service_identifier.GetName() + + service_type = service_identifier.GetType() + + try: service = self.Read( 'service', HC.LOW_PRIORITY, service_identifier ) + except: continue + + if service.CanUpdate(): + + connection = service.GetConnection() + + while service.CanUpdate(): + + if HC.shutdown: raise Exception( 'Application shutting down!' ) + + first_begin = service.GetFirstBegin() + + next_begin = service.GetNextBegin() + + if first_begin == 0: update_index_string = 'initial update' + else: update_index_string = 'update ' + str( ( ( next_begin - first_begin ) / HC.UPDATE_DURATION ) + 1 ) + + HC.pubsub.pub( 'service_status', 'Downloading and parsing ' + update_index_string + ' for ' + name ) + + update = connection.Get( 'update', begin = next_begin ) + + if service_type == HC.TAG_REPOSITORY: + + HC.pubsub.pub( 'service_status', 'Generating tags for ' + name ) + + self.Write( 'generate_tag_ids', HC.LOW_PRIORITY, update.GetTags() ) + + + updates = update.SplitIntoSubUpdates() + + num_updates = len( updates ) + + for ( i, sub_update ) in enumerate( updates ): + + wx.GetApp().WaitUntilGoodTimeToUseGUIThread() + + self.WaitUntilGoodTimeToUseDBThread() + + HC.pubsub.pub( 'service_status', 'Processing ' + update_index_string + ' part ' + str( i + 1 ) + '/' + str( num_updates ) + ' for ' + name ) + + self.Write( 'update', HC.LOW_PRIORITY, service_identifier, sub_update ) + + + HC.pubsub.pub( 'log_message', 'synchronise repositories daemon', 'successfully updated ' + service_identifier.GetName() + ' to ' + update_index_string + ' (' + HC.ConvertTimestampToPrettyTime( update.GetEnd() ) + ')' ) + + HC.pubsub.pub( 'notify_new_pending' ) + + now = int( time.time() ) + + for ( news, timestamp ) in update.GetNews(): + + if now - timestamp < 86400 * 7: HC.pubsub.pub( 'message', service_identifier.GetName() + ' at ' + time.ctime( timestamp ) + ':' + os.linesep + os.linesep + news ) + + + try: service = self.Read( 'service', HC.LOW_PRIORITY, service_identifier ) + except: break + + + HC.pubsub.pub( 'service_status', '' ) + + + except Exception as e: + + error_message = 'failed to update ' + name + ':' + os.linesep + os.linesep + unicode( e ) + + HC.pubsub.pub( 'log_error', 'synchronise repositories daemon', error_message ) + + HC.pubsub.pub( 'service_status', error_message ) + + print( error_message ) + print( traceback.format_exc() ) + + + + + def ProcessRequest( self, request_type, request, request_args ): + + response = HC.ResponseContext( 200 ) + + if request_type == HC.GET: + + if request == 'file': + + hash = request_args[ 'hash' ] + + file = self.Read( 'file', HC.HIGH_PRIORITY, hash ) + + mime = HC.GetMimeFromString( file ) + + response = HC.ResponseContext( 200, mime = mime, body = file, filename = hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ] ) + + elif request == 'thumbnail': + + hash = request_args[ 'hash' ] + + thumbnail = self.Read( 'thumbnail', HC.HIGH_PRIORITY, hash ) + + mime = HC.GetMimeFromString( thumbnail ) + + response = HC.ResponseContext( 200, mime = mime, body = thumbnail, filename = hash.encode( 'hex' ) + '_thumbnail' + HC.mime_ext_lookup[ mime ] ) + + + elif request_type == HC.POST: pass # nothing here yet! + + return response + + + def _MainLoop_JobInternal( self, c, job ): + + action = job.GetAction() + + job_type = job.GetType() + + args = job.GetArgs() + + kwargs = job.GetKWArgs() + + if job_type in ( 'read', 'read_write' ): + + if job_type == 'read': c.execute( 'BEGIN DEFERRED' ) + else: c.execute( 'BEGIN IMMEDIATE' ) + + try: + + result = self._MainLoop_Read( c, action, args, kwargs ) + + c.execute( 'COMMIT' ) + + for ( topic, args, kwargs ) in self._pubsubs: HC.pubsub.pub( topic, *args, **kwargs ) + + if action != 'do_file_query': job.PutResult( result ) + + except Exception as e: + + c.execute( 'ROLLBACK' ) + + print( 'while attempting a read on the database, the hydrus client encountered the following problem:' ) + print( traceback.format_exc() ) + + ( exception_type, value, tb ) = sys.exc_info() + + new_e = type( e )( os.linesep.join( traceback.format_exception( exception_type, value, tb ) ) ) + + job.PutResult( new_e ) + + + elif job_type in ( 'write', 'write_special' ): + + if job_type == 'write': c.execute( 'BEGIN IMMEDIATE' ) + + try: + + self._MainLoop_Write( c, action, args, kwargs ) + + if job_type == 'write': c.execute( 'COMMIT' ) + + for ( topic, args, kwargs ) in self._pubsubs: HC.pubsub.pub( topic, *args, **kwargs ) + + except Exception as e: + + if job_type == 'write': c.execute( 'ROLLBACK' ) + + print( 'while attempting a write on the database, the hydrus client encountered the following problem:' ) + print( traceback.format_exc() ) + + action = job.GetAction() + + ( exception_type, value, tb ) = sys.exc_info() + + new_e = type( e )( os.linesep.join( traceback.format_exception( exception_type, value, tb ) ) ) + + if action not in ( 'import_file', 'import_file_from_page' ): HC.pubsub.pub( 'exception', new_e ) + + + + + def _MainLoop_Read( self, c, action, args, kwargs ): + + if action == '4chan_pass': result = self._Get4chanPass( c, *args, **kwargs ) + elif action == 'all_downloads': result = self._GetAllDownloads( c, *args, **kwargs ) + elif action == 'autocomplete_contacts': result = self._GetAutocompleteContacts( c, *args, **kwargs ) + elif action == 'autocomplete_tags': result = self._GetAutocompleteTags( c, *args, **kwargs ) + elif action == 'boorus': result = self._GetBoorus( c, *args, **kwargs ) + elif action == 'contact_names': result = self._GetContactNames( c, *args, **kwargs ) + elif action == 'do_file_query': result = self._DoFileQuery( c, *args, **kwargs ) + elif action == 'do_message_query': result = self._DoMessageQuery( c, *args, **kwargs ) + elif action == 'file': result = self._GetFile( *args, **kwargs ) + elif action == 'file_system_predicates': result = self._GetFileSystemPredicates( c, *args, **kwargs ) + elif action == 'identities_and_contacts': result = self._GetIdentitiesAndContacts( c, *args, **kwargs ) + elif action == 'identities': result = self._GetIdentities( c, *args, **kwargs ) + elif action == 'imageboards': result = self._GetImageboards( c, *args, **kwargs ) + elif action == 'md5_status': result = self._GetMD5Status( c, *args, **kwargs ) + elif action == 'media_results': result = self._GetMediaResultsFromHashes( c, *args, **kwargs ) + elif action == 'message_keys_to_download': result = self._GetMessageKeysToDownload( c, *args, **kwargs ) + elif action == 'message_system_predicates': result = self._GetMessageSystemPredicates( c, *args, **kwargs ) + elif action == 'messages_to_send': result = self._GetMessagesToSend( c, *args, **kwargs ) + elif action == 'news': result = self._GetNews( c, *args, **kwargs ) + elif action == 'nums_pending': result = self._GetNumsPending( c, *args, **kwargs ) + elif action == 'options': result = self._options + elif action == 'pending': result = self._GetPending( c, *args, **kwargs ) + elif action == 'ratings_filter': result = self._GetRatingsFilter( c, *args, **kwargs ) + elif action == 'ratings_media_result': result = self._GetRatingsMediaResult( c, *args, **kwargs ) + elif action == 'resolution': result = self._GetResolution( c, *args, **kwargs ) + elif action == 'service': result = self._GetService( c, *args, **kwargs ) + elif action == 'service_identifiers': result = self._GetServiceIdentifiers( c, *args, **kwargs ) + elif action == 'service_info': result = self._GetServiceInfo( c, *args, **kwargs ) + elif action == 'services': result = self._GetServices( c, *args, **kwargs ) + elif action == 'shutdown_timestamps': result = self._GetShutdownTimestamps( c, *args, **kwargs ) + elif action == 'status_num_inbox': result = self._DoStatusNumInbox( c, *args, **kwargs ) + elif action == 'tag_service_precedence': result = self._tag_service_precedence + elif action == 'thumbnail': result = self._GetThumbnail( *args, **kwargs ) + elif action == 'thumbnail_hashes_i_should_have': result = self._GetThumbnailHashesIShouldHave( c, *args, **kwargs ) + elif action == 'transport_message': result = self._GetTransportMessage( c, *args, **kwargs ) + elif action == 'transport_messages_from_draft': result = self._GetTransportMessagesFromDraft( c, *args, **kwargs ) + elif action == 'url_status': result = self._GetURLStatus( c, *args, **kwargs ) + else: raise Exception( 'db received an unknown read command: ' + action ) + + return result + + + def _MainLoop_Write( self, c, action, args, kwargs ): + + if action == '4chan_pass': self._Set4chanPass( c, *args, **kwargs ) + elif action == 'add_downloads': self._AddDownloads( c, *args, **kwargs ) + elif action == 'add_uploads': self._AddUploads( c, *args, **kwargs ) + elif action == 'archive_conversation': self._ArchiveConversation( c, *args, **kwargs ) + elif action == 'contact_associated': self._AssociateContact( c, *args, **kwargs ) + elif action == 'content_updates': self._ProcessContentUpdates( c, *args, **kwargs ) + elif action == 'copy_files': self._CopyFiles( *args, **kwargs ) + elif action == 'delete_conversation': self._DeleteConversation( c, *args, **kwargs ) + elif action == 'delete_draft': self._DeleteDraft( c, *args, **kwargs ) + elif action == 'delete_orphans': self._DeleteOrphans( c, *args, **kwargs ) + elif action == 'delete_pending': self._DeletePending( c, *args, **kwargs ) + elif action == 'draft_message': self._DraftMessage( c, *args, **kwargs ) + elif action == 'export_files': self._ExportFiles( *args, **kwargs ) + elif action == 'fatten_autocomplete_cache': self._FattenAutocompleteCache( c, *args, **kwargs ) + elif action == 'flush_message_statuses': self._FlushMessageStatuses( c, *args, **kwargs ) + elif action == 'generate_tag_ids': self._GenerateTagIdsEfficiently( c, *args, **kwargs ) + elif action == 'import_file': self._ImportFile( c, *args, **kwargs ) + elif action == 'import_file_from_page': self._ImportFilePage( c, *args, **kwargs ) + elif action == 'inbox_conversation': self._InboxConversation( c, *args, **kwargs ) + elif action == 'message': self._AddMessage( c, *args, **kwargs ) + elif action == 'message_info_since': self._AddMessageInfoSince( c, *args, **kwargs ) + elif action == 'message_statuses': self._UpdateMessageStatuses( c, *args, **kwargs ) + elif action == 'petition_files': self._PetitionFiles( c, *args, **kwargs ) + elif action == 'reset_service': self._ResetService( c, *args, **kwargs ) + elif action == 'save_options': self._SaveOptions( c, *args, **kwargs ) + elif action == 'service_updates': self._AddServiceUpdates( c, *args, **kwargs ) + elif action == 'set_password': self._SetPassword( c, *args, **kwargs ) + elif action == 'set_tag_service_precedence': self._SetTagServicePrecedence( c, *args, **kwargs ) + elif action == 'thumbnails': self._AddThumbnails( c, *args, **kwargs ) + elif action == 'update': self._AddUpdate( c, *args, **kwargs ) + elif action == 'update_boorus': self._UpdateBoorus( c, *args, **kwargs ) + elif action == 'update_contacts': self._UpdateContacts( c, *args, **kwargs ) + elif action == 'update_imageboards': self._UpdateImageboards( c, *args, **kwargs ) + elif action == 'update_server_services': self._UpdateServerServices( c, *args, **kwargs ) + elif action == 'update_services': self._UpdateServices( c, *args, **kwargs ) + elif action == 'upload_pending': self._UploadPending( c, *args, **kwargs ) + elif action == 'vacuum': self._Vacuum() + else: raise Exception( 'db received an unknown write command: ' + action ) + + + def MainLoop( self ): + + ( db, c ) = self._GetDBCursor() + + while not ( HC.shutdown and self._jobs.empty() ): + + try: + + ( priority, job ) = self._jobs.get( timeout = 1 ) + + self._pubsubs = [] + + try: + + if isinstance( job, HC.JobServer ): + + ( service_identifier, account_identifier, ip, request_type, request, request_args, request_length ) = job.GetInfo() + + # for now, we don't care about most of this here + # the server has already verified the ip and so on + + # do the server first before you do it here! + # just leave process request for now + + else: self._MainLoop_JobInternal( c, job ) + + except: + + self._jobs.put( ( priority, job ) ) # couldn't lock db; put job back on queue + + time.sleep( 5 ) + + + except: pass # no jobs this second; let's see if we should shutdown + + + + def Read( self, action, priority, *args, **kwargs ): + + if action in ( 'service_info', 'system_predicates' ): job_type = 'read_write' + else: job_type = 'read' + + job = HC.JobInternal( action, job_type, *args, **kwargs ) + + if HC.shutdown: raise Exception( 'Application has shutdown!' ) + + self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first + + if action != 'do_file_query': return job.GetResult() + + + def ReadFile( self, hash ): + + return self._GetFile( hash ) + + + def ReadThumbnail( self, hash, full_size = False ): + + return self._GetThumbnail( hash, full_size ) + + + def WaitUntilGoodTimeToUseDBThread( self ): + + while True: + + if HC.shutdown: raise Exception( 'Client shutting down!' ) + elif self._jobs.empty(): return + else: time.sleep( 0.04 ) + + + + def Write( self, action, priority, *args, **kwargs ): + + if action == 'vacuum': job_type = 'write_special' + else: job_type = 'write' + + job = HC.JobInternal( action, job_type, *args, **kwargs ) + + if HC.shutdown: raise Exception( 'Application has shutdown!' ) + + self._jobs.put( ( priority, job ) ) + + \ No newline at end of file diff --git a/include/ClientGUI.py b/include/ClientGUI.py new file mode 100755 index 00000000..6bb55d77 --- /dev/null +++ b/include/ClientGUI.py @@ -0,0 +1,2109 @@ +import httplib +import HydrusConstants as HC +import ClientConstants as CC +import ClientConstantsMessages +import ClientGUICommon +import ClientGUIDialogs +import ClientGUIMessages +import ClientGUIPages +import os +import random +import subprocess +import sys +import threading +import time +import traceback +import webbrowser +import wx + +# timers + +ID_TIMER_UPDATES = wx.NewId() + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +class FrameGUI( ClientGUICommon.Frame ): + + def __init__( self ): + + ClientGUICommon.Frame.__init__( self, None, title = wx.GetApp().PrepStringForDisplay( 'Hydrus Client' ) ) + + self.SetDropTarget( ClientGUICommon.FileDropTarget( self.ImportFiles ) ) + + self._statusbar = self.CreateStatusBar() + self._statusbar.SetFieldsCount( 4 ) + self._statusbar.SetStatusWidths( [ -1, 400, 200, 200 ] ) + + self._statusbar_media = '' + self._statusbar_service = '' + self._statusbar_inbox = '' + self._statusbar_downloads = '' + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + self.SetMinSize( ( 920, 600 ) ) + + self.Maximize() + + self._notebook = wx.Notebook( self ) + self._notebook.Bind( wx.EVT_MIDDLE_DOWN, self.EventNotebookMiddleClick ) + self._notebook.Bind( wx.EVT_RIGHT_DCLICK, self.EventNotebookMiddleClick ) + self._notebook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventNotebookPageChanged ) + + wx.GetApp().SetTopWindow( self ) + + self.RefreshAcceleratorTable() + + self.Bind( wx.EVT_MENU, self.EventMenu ) + self.Bind( wx.EVT_CLOSE, self.EventExit ) + self.Bind( wx.EVT_SET_FOCUS, self.EventFocus ) + + HC.pubsub.sub( self, 'NewCompose', 'new_compose_frame' ) + HC.pubsub.sub( self, 'NewPageImportBooru', 'new_page_import_booru' ) + HC.pubsub.sub( self, 'NewPageImportGallery', 'new_page_import_gallery' ) + HC.pubsub.sub( self, 'NewPageImportHDD', 'new_hdd_import' ) + HC.pubsub.sub( self, 'NewPageImportThreadWatcher', 'new_page_import_thread_watcher' ) + HC.pubsub.sub( self, 'NewPageImportURL', 'new_page_import_url' ) + HC.pubsub.sub( self, 'NewPageMessages', 'new_page_messages' ) + HC.pubsub.sub( self, 'NewPagePetitions', 'new_page_petitions' ) + HC.pubsub.sub( self, 'NewPageQuery', 'new_page_query' ) + HC.pubsub.sub( self, 'NewPageThreadDumper', 'new_thread_dumper' ) + HC.pubsub.sub( self, 'NewSimilarTo', 'new_similar_to' ) + HC.pubsub.sub( self, 'RefreshMenuBar', 'refresh_menu_bar' ) + HC.pubsub.sub( self, 'RefreshMenuBar', 'notify_new_pending' ) + HC.pubsub.sub( self, 'RefreshMenuBar', 'notify_new_permissions' ) + HC.pubsub.sub( self, 'RefreshMenuBar', 'notify_new_services' ) + HC.pubsub.sub( self, 'RefreshAcceleratorTable', 'options_updated' ) + HC.pubsub.sub( self, 'RefreshStatusBar', 'refresh_status' ) + HC.pubsub.sub( self, 'SetDownloadsStatus', 'downloads_status' ) + HC.pubsub.sub( self, 'SetInboxStatus', 'inbox_status' ) + HC.pubsub.sub( self, 'SetServiceStatus', 'service_status' ) + + self._NewPageQuery( CC.LOCAL_FILE_SERVICE_IDENTIFIER ) + + self.RefreshMenuBar() + + self._RefreshStatusBar() + + self.Show( True ) + + + def _AboutWindow( self ): + + aboutinfo = wx.AboutDialogInfo() + + aboutinfo.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + aboutinfo.SetName( 'hydrus client' ) + aboutinfo.SetVersion( str( HC.SOFTWARE_VERSION ) ) + aboutinfo.SetDescription( CC.CLIENT_DESCRIPTION ) + + with open( HC.BASE_DIR + os.path.sep + 'license.txt', 'rb' ) as f: license = f.read() + + aboutinfo.SetLicense( license ) + + aboutinfo.SetDevelopers( [ 'Anonymous' ] ) + aboutinfo.SetWebSite( 'http://hydrus.x10.mx/' ) + + wx.AboutBox( aboutinfo ) + + + def _AccountInfo( self, service_identifier ): + + with wx.TextEntryDialog( self, 'Access key' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + subject_access_key = dlg.GetValue().decode( 'hex' ) + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + account_info = connection.Get( 'accountinfo', subject_access_key = subject_access_key.encode( 'hex' ) ) + + wx.MessageBox( str( account_info ) ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + + def _AutoRepoSetup( self ): + + message = 'This will attempt to set up your client with my repositories\' credentials, letting you tag on the public tag repository and see some files.' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: + + try: + + edit_log = [] + + tag_repo_identifier = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.TAG_REPOSITORY, 'public tag repository' ) + tag_repo_credentials = CC.Credentials( '98.214.1.156', 45871, '4a285629721ca442541ef2c15ea17d1f7f7578b0c3f4f5f2a05f8f0ab297786f'.decode( 'hex' ) ) + + edit_log.append( ( 'add', ( tag_repo_identifier, tag_repo_credentials, None ) ) ) + + file_repo_identifier = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.FILE_REPOSITORY, 'read-only art file repository' ) + file_repo_credentials = CC.Credentials( '98.214.1.156', 45872, '8f8a3685abc19e78a92ba61d84a0482b1cfac176fd853f46d93fe437a95e40a5'.decode( 'hex' ) ) + + edit_log.append( ( 'add', ( file_repo_identifier, file_repo_credentials, None ) ) ) + + wx.GetApp().Write( 'update_services', edit_log ) + + wx.MessageBox( 'Done!' ) + + except: wx.MessageBox( traceback.format_exc() ) + + + + + def _AutoServerSetup( self ): + + message = 'This will attempt to start the server in the same install directory as this client, initialise it, and store the resultant admin accounts.' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: + + try: + + try: + + connection = httplib.HTTPConnection( '127.0.0.1', HC.DEFAULT_SERVER_ADMIN_PORT ) + + connection.connect() + + connection.close() + + already_running = True + + except: + + already_running = False + + + if already_running: + + message = 'The server appears to be already running. Either that, or something else is using port ' + str( HC.DEFAULT_SERVER_ADMIN_PORT ) + '.' + os.linesep + 'Would you like to try to initialise the server that is already running?' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_NO: return + + + else: + + try: + + my_scriptname = sys.argv[0] + + if my_scriptname.endswith( 'pyw' ): subprocess.Popen( [ 'pythonw', HC.BASE_DIR + os.path.sep + 'server.pyw' ] ) + else: + + # The problem here is that, for mystical reasons, a PyInstaller exe can't launch another using subprocess, so we do it via explorer. + + subprocess.Popen( [ 'explorer', HC.BASE_DIR + os.path.sep + 'server.exe' ] ) + + + time.sleep( 5 ) # give it time to init its db + + except: + + wx.MessageBox( 'I tried to start the server, but something failed!' ) + wx.MessageBox( traceback.format_exc() ) + return + + + + edit_log = [] + + admin_service_identifier = HC.ClientServiceIdentifier( os.urandom( 32 ), HC.SERVER_ADMIN, 'local server admin' ) + admin_service_credentials = CC.Credentials( '127.0.0.1', HC.DEFAULT_SERVER_ADMIN_PORT, '' ) + + edit_log.append( ( 'add', ( admin_service_identifier, admin_service_credentials, None ) ) ) + + wx.GetApp().Write( 'update_services', edit_log ) + + i = 0 + + while True: + + time.sleep( i + 1 ) + + try: + + service = wx.GetApp().Read( 'service', admin_service_identifier ) + + break + + except: pass + + i += 1 + + if i > 5: + + wx.MessageBox( 'For some reason, I could not add the new server to the db! Perhaps it is very busy. Please contact the administrator, or sort it out yourself!' ) + + return + + + + connection = service.GetConnection() + + connection.Get( 'init' ) + + edit_log = [] + + tag_service_identifier = HC.ServerServiceIdentifier( HC.TAG_REPOSITORY, HC.DEFAULT_SERVICE_PORT ) + file_service_identifier = HC.ServerServiceIdentifier( HC.FILE_REPOSITORY, HC.DEFAULT_SERVICE_PORT + 1 ) + + edit_log.append( ( HC.ADD, tag_service_identifier ) ) + edit_log.append( ( HC.ADD, file_service_identifier ) ) + + connection.Post( 'servicesmodification', edit_log = edit_log ) + + wx.GetApp().Write( 'update_server_services', admin_service_identifier, edit_log ) + + wx.MessageBox( 'Done! Check services->review services to see your new server and its services.' ) + + except: wx.MessageBox( traceback.format_exc() ) + + + + + def _BackupService( self, service_identifier ): + + message = 'This will tell the service to lock and copy its database files. It will not be able to serve any requests until the operation is complete.' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + with wx.BusyCursor(): connection.Post( 'backup' ) + + wx.MessageBox( 'Done!' ) + + + + + def _CloseCurrentPage( self ): + + selection = self._notebook.GetSelection() + + if selection != wx.NOT_FOUND: + + page = self._notebook.GetPage( selection ) + + try: page.TryToClose() + except: return + + self._notebook.DeletePage( selection ) + + + + def _DeletePending( self, service_identifier ): + + try: + + with ClientGUIDialogs.DialogYesNo( self, 'Are you sure you want to delete the pending data for ' + service_identifier.GetName() + '?' ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'delete_pending', service_identifier ) + + + except: wx.MessageBox( traceback.format_exc() ) + + + def _News( self, service_identifier ): + + try: + + with ClientGUIDialogs.DialogNews( self, service_identifier ) as dlg: dlg.ShowModal() + + except: wx.MessageBox( traceback.format_exc() ) + + + def _Stats( self, service_identifier ): + + try: + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + stats = connection.Get( 'stats' ) + + wx.MessageBox( str( stats ) ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def _EditServices( self ): + + try: + + with ClientGUIDialogs.DialogManageServices( self ) as dlg: dlg.ShowModal() + + except: wx.MessageBox( traceback.format_exc() ) + + + def _FetchIP( self, service_identifier ): + + with wx.TextEntryDialog( self, 'File Hash' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + hash = dlg.GetValue().decode( 'hex' ) + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + with wx.BusyCursor(): ( ip, timestamp ) = connection.Get( 'ip', hash = hash.encode( 'hex' ) ) + + message = 'File Hash: ' + hash.encode( 'hex' ) + os.linesep + 'Uploader\'s IP: ' + ip + os.linesep + 'Upload Time (GMT): ' + time.asctime( time.gmtime( int( timestamp ) ) ) + + print( message ) + + wx.MessageBox( message + os.linesep + 'This has been written to the log.' ) + + except Exception as e: + wx.MessageBox( traceback.format_exc() ) + wx.MessageBox( unicode( e ) ) + + + + + def _ImportFiles( self, paths = [] ): + + try: + + with ClientGUIDialogs.DialogSelectLocalFiles( self, paths ) as dlg: dlg.ShowModal() + + except Exception as e: + + wx.MessageBox( traceback.format_exc() ) + wx.MessageBox( unicode( e ) ) + + + def _Manage4chanPass( self ): + + try: + + with ClientGUIDialogs.DialogManage4chanPass( self ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def _ManageAccountTypes( self, service_identifier ): + + try: + + with ClientGUIDialogs.DialogManageAccountTypes( self, service_identifier ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def _ManageBoorus( self ): + + try: + + with ClientGUIDialogs.DialogManageBoorus( self ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() ) + + + def _ManageContacts( self ): + + try: + + with ClientGUIDialogs.DialogManageContacts( self ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() ) + + + def _ManageImageboards( self ): + + try: + + with ClientGUIDialogs.DialogManageImageboards( self ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() ) + + + def _ManageOptions( self, service_identifier ): + + try: + + if service_identifier.GetType() == HC.LOCAL_FILE: + + with ClientGUIDialogs.DialogManageOptionsLocal( self ) as dlg: dlg.ShowModal() + + else: + + if service_identifier.GetType() == HC.FILE_REPOSITORY: + + with ClientGUIDialogs.DialogManageOptionsFileRepository( self, service_identifier ) as dlg: dlg.ShowModal() + + elif service_identifier.GetType() == HC.TAG_REPOSITORY: + + with ClientGUIDialogs.DialogManageOptionsTagRepository( self, service_identifier ) as dlg: dlg.ShowModal() + + elif service_identifier.GetType() == HC.MESSAGE_DEPOT: + + with ClientGUIDialogs.DialogManageOptionsMessageDepot( self, service_identifier ) as dlg: dlg.ShowModal() + + elif service_identifier.GetType() == HC.SERVER_ADMIN: + + with ClientGUIDialogs.DialogManageOptionsServerAdmin( self, service_identifier ) as dlg: dlg.ShowModal() + + + + except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() ) + + + def _ManageServices( self, service_identifier ): + + try: + + with ClientGUIDialogs.DialogManageServer( self, service_identifier ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() ) + + + def _ManageTagServicePrecedence( self ): + + try: + + with ClientGUIDialogs.DialogManageTagServicePrecedence( self ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) + traceback.format_exc() ) + + + def _ModifyAccount( self, service_identifier ): + + service = wx.GetApp().Read( 'service', service_identifier ) + + with wx.TextEntryDialog( self, 'Enter the access key for the account to be modified' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: access_key = dlg.GetValue().decode( 'hex' ) + except: + + wx.MessageBox( 'Could not parse that access key' ) + + return + + + subject_identifiers = ( HC.AccountIdentifier( access_key = access_key ), ) + + try: + + with ClientGUIDialogs.DialogModifyAccounts( self, service_identifier, subject_identifiers ) as dlg2: dlg2.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + + def _NewAccounts( self, service_identifier ): + + try: + + with ClientGUIDialogs.DialogInputNewAccounts( self, service_identifier ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def _NewPageImportBooru( self ): + + try: + + with ClientGUIDialogs.DialogSelectBooru( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + booru = dlg.GetBooru() + + new_page = ClientGUIPages.PageImportBooru( self._notebook, booru ) + + self._notebook.AddPage( new_page, booru.GetName(), select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + new_page.SetSearchFocus() + + + + except Exception as e: + wx.MessageBox( traceback.format_exc() ) + wx.MessageBox( unicode( e ) ) + + + def _NewPageImportGallery( self, name ): + + try: + + if name == 'deviant art by artist': new_page = ClientGUIPages.PageImportDeviantArt( self._notebook ) + elif name == 'hentai foundry by artist': new_page = ClientGUIPages.PageImportHentaiFoundryArtist( self._notebook ) + elif name == 'hentai foundry by tags': new_page = ClientGUIPages.PageImportHentaiFoundryTags( self._notebook ) + + self._notebook.AddPage( new_page, name, select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + new_page.SetSearchFocus() + + except Exception as e: + wx.MessageBox( traceback.format_exc() ) + wx.MessageBox( unicode( e ) ) + + + def _NewPageImportThreadWatcher( self ): + + try: + + new_page = ClientGUIPages.PageImportThreadWatcher( self._notebook ) + + self._notebook.AddPage( new_page, 'thread watcher', select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + new_page.SetSearchFocus() + + except Exception as e: + wx.MessageBox( traceback.format_exc() ) + wx.MessageBox( unicode( e ) ) + + + def _NewPageImportURL( self ): + + new_page = ClientGUIPages.PageImportURL( self._notebook ) + + self._notebook.AddPage( new_page, 'download', select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + new_page.SetSearchFocus() + + + def _NewPageLog( self ): + + new_page = ClientGUIPages.PageLog( self._notebook ) + + self._notebook.AddPage( new_page, 'log', select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + + def _NewPageMessages( self, identity ): + + new_page = ClientGUIPages.PageMessages( self._notebook, identity ) + + self._notebook.AddPage( new_page, identity.GetName() + ' messages', select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + new_page.SetSearchFocus() + + + def _NewPagePetitions( self, service_identifier = None ): + + if service_identifier is None: service_identifier = ClientGUIDialogs.SelectServiceIdentifier( service_types = HC.REPOSITORIES, permission = HC.RESOLVE_PETITIONS ) + + if service_identifier is not None: + + service = wx.GetApp().Read( 'service', service_identifier ) + + account = service.GetAccount() + + if not account.HasPermission( HC.RESOLVE_PETITIONS ): return + + self._notebook.AddPage( ClientGUIPages.PagePetitions( self._notebook, service_identifier ), service_identifier.GetName() + ' petitions', select = True ) + + + + def _NewPageQuery( self, service_identifier, tags = [] ): + + if service_identifier is None: service_identifier = ClientGUIDialogs.SelectServiceIdentifier( service_types = ( HC.FILE_REPOSITORY, ) ) + + if service_identifier is not None: + + new_page = ClientGUIPages.PageQuery( self._notebook, service_identifier, tags ) + + self._notebook.AddPage( new_page, 'files', select = True ) + + new_page.SetSearchFocus() + + + + def _OpenExportFolder( self ): + + export_path = HC.ConvertPortablePathToAbsPath( self._options[ 'export_path' ] ) + + if export_path is None: wx.MessageBox( 'Export folder is missing or not set.' ) + else: + + export_path = os.path.normpath( export_path ) # windows complains about those forward slashes when launching from the command line + + if 'Windows' in os.environ.get( 'os' ): subprocess.Popen( [ 'explorer', export_path ] ) + else: subprocess.Popen( [ 'explorer', export_path ] ) + + + + def _PostNews( self, service_identifier ): + + with wx.TextEntryDialog( self, 'Enter the news you would like to post.' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + news = dlg.GetValue() + + try: + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + with wx.BusyCursor(): connection.Post( 'news', news = news ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + + def _RefreshStatusBar( self ): + + page = self._notebook.GetCurrentPage() + + if page is None: media_status = '' + else: media_status = page.GetPrettyStatus() + + self._statusbar_media = media_status + + self._statusbar.SetStatusText( self._statusbar_media, number = 0 ) + self._statusbar.SetStatusText( self._statusbar_service, number = 1 ) + self._statusbar.SetStatusText( self._statusbar_inbox, number = 2 ) + self._statusbar.SetStatusText( self._statusbar_downloads, number = 3 ) + + + def _ReviewServices( self ): + + try: FrameReviewServices() + except: wx.MessageBox( traceback.format_exc() ) + + + def _SetPassword( self ): + + message = '''You can set a password to be asked for whenever the client starts. + +Though not foolproof, it will stop noobs from easily seeing your files if you leave your machine unattended. + +Do not ever forget your password! If you do, you'll have to manually insert a yaml-dumped python dictionary into a sqlite database or recompile from source to regain easy access. This is not trivial. + +The password is cleartext here but obscured in the entry dialog. Enter a blank password to remove.''' + + with wx.TextEntryDialog( self, message, 'Enter new password' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + password = dlg.GetValue() + + if password == '': password = None + + wx.GetApp().Write( 'set_password', password ) + + + + + def _SetSearchFocus( self ): + + page = self._notebook.GetCurrentPage() + + if page is not None: page.SetSearchFocus() + + + def _SetSynchronisedWait( self ): + + page = self._notebook.GetCurrentPage() + + if page is not None: page.SetSynchronisedWait() + + + def _UploadPending( self, service_identifier ): + + job_key = os.urandom( 32 ) + + if service_identifier.GetType() == HC.TAG_REPOSITORY: cancel_event = None + else: cancel_event = threading.Event() + + with ClientGUIDialogs.DialogProgress( self, job_key, cancel_event ) as dlg: + + wx.GetApp().Write( 'upload_pending', service_identifier, job_key, cancel_event ) + + dlg.ShowModal() + + + + def _VacuumDatabase( self ): + + message = 'This will rebuild the database, rewriting all indices and tables to be contiguous, optimising most operations. If you have a large database, it will take some time.' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'vacuum' ) + + + + def EventExit( self, event ): + + page = self._notebook.GetCurrentPage() + + if page is not None and self.IsMaximized(): + + ( self._options[ 'hpos' ], self._options[ 'vpos' ] ) = page.GetSashPositions() + + with wx.BusyCursor(): wx.GetApp().Write( 'save_options' ) + + + for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]: + + try: page.TryToClose() + except: return + + + self.Hide() + + # for some insane reason, the read makes the controller block until the writes are done!??! + # hence the hide, to make it appear the destroy is actually happening on time + + wx.GetApp().MaintainDB() + + self.Destroy() + + + def EventFocus( self, event ): + + page = self._notebook.GetCurrentPage() + + if page is not None: page.SetMediaFocus() + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'account_info': self._AccountInfo( data ) + elif command == 'auto_repo_setup': self._AutoRepoSetup() + elif command == 'auto_server_setup': self._AutoServerSetup() + elif command == 'backup_service': self._BackupService( data ) + elif command == 'clear_caches': wx.GetApp().ClearCaches() + elif command == 'close_page': self._CloseCurrentPage() + elif command == 'debug_options': wx.MessageBox( str( wx.GetApp().Read( 'options' ) ) ) + elif command == 'delete_pending': self._DeletePending( data ) + elif command == 'edit_services': self._EditServices() + elif command == 'exit': self.EventExit( event ) + elif command == 'fetch_ip': self._FetchIP( data ) + elif command == 'forum': webbrowser.open( 'http://hydrus.x10.mx/forum' ) + elif command == 'help': webbrowser.open( 'file://' + HC.BASE_DIR + '/help/index.html' ) + elif command == 'help_about': self._AboutWindow() + elif command == 'help_shortcuts': wx.MessageBox( CC.SHORTCUT_HELP ) + elif command == 'import': self._ImportFiles() + elif command == 'manage_4chan_pass': self._Manage4chanPass() + elif command == 'manage_account_types': self._ManageAccountTypes( data ) + elif command == 'manage_boorus': self._ManageBoorus() + elif command == 'manage_contacts': self._ManageContacts() + elif command == 'manage_imageboards': self._ManageImageboards() + elif command == 'manage_services': self._ManageServices( data ) + elif command == 'manage_tag_service_precedence': self._ManageTagServicePrecedence() + elif command == 'modify_account': self._ModifyAccount( data ) + elif command == 'new_accounts': self._NewAccounts( data ) + elif command == 'new_import_booru': self._NewPageImportBooru() + elif command == 'new_import_thread_watcher': self._NewPageImportThreadWatcher() + elif command == 'new_import_url': self._NewPageImportURL() + elif command == 'new_log_page': self._NewPageLog() + elif command == 'new_messages_page': self._NewPageMessages( data ) + elif command == 'new_page': FramePageChooser() + elif command == 'new_page_query': self._NewPageQuery( data ) + elif command == 'news': self._News( data ) + elif command == 'open_export_folder': self._OpenExportFolder() + elif command == 'options': self._ManageOptions( data ) + elif command == 'petitions': self._NewPagePetitions( data ) + elif command == 'post_news': self._PostNews( data ) + elif command == 'refresh': + + page = self._notebook.GetCurrentPage() + + if page is not None: page.RefreshQuery() + + elif command == 'review_services': self._ReviewServices() + elif command == 'show_hide_splitters': + + page = self._notebook.GetCurrentPage() + + if page is not None: page.ShowHideSplit() + + elif command == 'set_password': self._SetPassword() + elif command == 'set_search_focus': self._SetSearchFocus() + elif command == 'site': webbrowser.open( 'http://hydrus.x10.mx/' ) + elif command == 'stats': self._Stats( data ) + elif command == 'synchronised_wait_switch': self._SetSynchronisedWait() + elif command == 'tumblr': webbrowser.open( 'http://hydrus.tumblr.com/' ) + elif command == 'twitter': webbrowser.open( 'http://twitter.com/#!/hydrusnetwork' ) + elif command == 'upload_pending': self._UploadPending( data ) + elif command == 'vacuum_db': self._VacuumDatabase() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + wx.MessageBox( traceback.format_exc() ) + + + + + def EventNotebookMiddleClick( self, event ): + + ( tab_index, flags ) = self._notebook.HitTest( ( event.GetX(), event.GetY() ) ) + + page = self._notebook.GetPage( tab_index ) + + try: page.TryToClose() + except: return + + self._notebook.DeletePage( tab_index ) + + + def EventNotebookPageChanged( self, event ): + + old_selection = event.GetOldSelection() + selection = event.GetSelection() + + if old_selection != -1: self._notebook.GetPage( old_selection ).PageHidden() + + if selection != -1: self._notebook.GetPage( selection ).PageShown() + + self._RefreshStatusBar() + + event.Skip( True ) + + + def ImportFiles( self, paths ): self._ImportFiles( paths ) + + def NewCompose( self, identity ): + + draft_key = os.urandom( 32 ) + conversation_key = draft_key + subject = '' + contact_from = identity + contacts_to = [] + recipients_visible = False + body = '' + attachments = [] + + empty_draft_message = ClientConstantsMessages.DraftMessage( draft_key, conversation_key, subject, contact_from, contacts_to, recipients_visible, body, attachments, is_new = True ) + + try: FrameComposeMessage( empty_draft_message ) + except: wx.MessageBox( traceback.format_exc() ) + + def NewPageImportBooru( self ): self._NewPageImportBooru() + + def NewPageImportGallery( self, name ): self._NewPageImportGallery( name ) + + def NewPageImportHDD( self, paths, **kwargs ): + + new_page = ClientGUIPages.PageImportHDD( self._notebook, paths, **kwargs ) + + self._notebook.AddPage( new_page, 'import', select = True ) + + self._notebook.SetSelection( self._notebook.GetPageCount() - 1 ) + + + def NewPageImportThreadWatcher( self ): self._NewPageImportThreadWatcher() + + def NewPageImportURL( self ): self._NewPageImportURL() + + def NewPageMessages( self, identity ): self._NewPageMessages( identity ) + + def NewPagePetitions( self, service_identifier ): self._NewPagePetitions( service_identifier ) + + def NewPageQuery( self, service_identifier, tags = [] ): self._NewPageQuery( service_identifier, tags = tags ) + + def NewPageThreadDumper( self, hashes ): + + with ClientGUIDialogs.DialogSelectImageboard( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + imageboard = dlg.GetImageboard() + + new_page = ClientGUIPages.PageThreadDumper( self._notebook, imageboard, hashes ) + + self._notebook.AddPage( new_page, 'imageboard dumper', select = True ) + + new_page.SetSearchFocus() + + + + + def NewSimilarTo( self, file_service_identifier, hash ): self._NewPageQuery( file_service_identifier, [ 'system:similar_to=' + hash.encode( 'hex' ) + u'\u2248' + '5' ] ) + + def RefreshAcceleratorTable( self ): + + interested_actions = [ 'archive', 'close_page', 'filter', 'ratings_filter', 'manage_ratings', 'manage_tags', 'new_page', 'refresh', 'set_search_focus', 'show_hide_splitters', 'synchronised_wait_switch' ] + + entries = [] + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): entries.extend( [ ( modifier, key, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) for ( key, action ) in key_dict.items() if action in interested_actions ] ) + + self.SetAcceleratorTable( wx.AcceleratorTable( entries ) ) + + + def RefreshMenuBar( self ): + + p = wx.GetApp().PrepStringForDisplay + + services = wx.GetApp().Read( 'services' ) + + tag_repositories = [ service for service in services if service.GetServiceIdentifier().GetType() == HC.TAG_REPOSITORY ] + + tag_service_identifiers = [ repository.GetServiceIdentifier() for repository in tag_repositories ] + download_tag_service_identifiers = [ repository.GetServiceIdentifier() for repository in tag_repositories if repository.GetAccount().HasPermission( HC.GET_DATA ) ] + petition_resolve_tag_service_identifiers = [ repository.GetServiceIdentifier() for repository in tag_repositories if repository.GetAccount().HasPermission( HC.RESOLVE_PETITIONS ) ] + admin_tag_service_identifiers = [ repository.GetServiceIdentifier() for repository in tag_repositories if repository.GetAccount().HasPermission( HC.GENERAL_ADMIN ) ] + + file_repositories = [ service for service in services if service.GetServiceIdentifier().GetType() == HC.FILE_REPOSITORY ] + + file_service_identifiers = [ repository.GetServiceIdentifier() for repository in file_repositories ] + download_file_service_identifiers = [ repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.GET_DATA ) ] + petition_resolve_file_service_identifiers = [ repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.RESOLVE_PETITIONS ) ] + admin_file_service_identifiers = [ repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.GENERAL_ADMIN ) ] + + message_depots = [ service for service in services if service.GetServiceIdentifier().GetType() == HC.MESSAGE_DEPOT ] + + admin_message_depots = [ message_depot.GetServiceIdentifier() for message_depot in message_depots if message_depot.GetAccount().HasPermission( HC.GENERAL_ADMIN ) ] + + identities = wx.GetApp().Read( 'identities' ) + + servers_admin = [ service for service in services if service.GetServiceIdentifier().GetType() == HC.SERVER_ADMIN ] + + server_admin_identifiers = [ service.GetServiceIdentifier() for service in servers_admin if service.GetAccount().HasPermission( HC.GENERAL_ADMIN ) ] + + nums_pending = wx.GetApp().Read( 'nums_pending' ) + + menu = wx.MenuBar() + + file = wx.Menu() + file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'import' ), p( '&Import Files' ), p( 'Add new files to the database.' ) ) + file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'open_export_folder' ), p( 'Open E&xport Folder' ), p( 'Open the export folder so you can easily access files you have exported.' ) ) + file.AppendSeparator() + file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'options', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), p( '&Options' ) ) + file.AppendSeparator() + file.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'exit' ), p( '&Exit' ) ) + + menu.Append( file, p( '&File' ) ) + + view = wx.Menu() + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'refresh' ), p( '&Refresh' ), p( 'Refresh the current view.' ) ) + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'show_hide_splitters' ), p( 'Show/Hide Splitters' ), p( 'Show or hide the current page\'s splitters.' ) ) + view.AppendSeparator() + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page' ), p( 'Pick a New &Page' ), p( 'Pick a new page.' ) ) + view.AppendSeparator() + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page_query', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), p( '&New Local Search' ), p( 'Open a new search tab for your files' ) ) + for s_i in file_service_identifiers: view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_page_query', s_i ), p( 'New ' + s_i.GetName() + ' Search' ), p( 'Open a new search tab for ' + s_i.GetName() + '.' ) ) + if len( petition_resolve_tag_service_identifiers ) > 0 or len( petition_resolve_file_service_identifiers ) > 0: + + view.AppendSeparator() + for s_i in petition_resolve_tag_service_identifiers: view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'petitions', s_i ), p( s_i.GetName() + ' Petitions' ), p( 'Open a petition tab for ' + s_i.GetName() ) ) + for s_i in petition_resolve_file_service_identifiers: view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'petitions', s_i ), p( s_i.GetName() + ' Petitions' ), p( 'Open a petition tab for ' + s_i.GetName() ) ) + + view.AppendSeparator() + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_import_url' ), p( '&New URL Download Page' ), p( 'Open a new tab to download files from galleries or threads.' ) ) + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_import_booru' ), p( '&New Booru Download Page' ), p( 'Open a new tab to download files from a booru.' ) ) + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_import_thread_watcher' ), p( '&New Thread Watcher Page' ), p( 'Open a new tab to watch a thread.' ) ) + view.AppendSeparator() + if len( identities ) > 0: + + for identity in identities: view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_messages_page', identity ), p( identity.GetName() + ' Message Page' ), p( 'Open a new tab to review the messages for ' + identity.GetName() ) ) + view.AppendSeparator() + + view.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_log_page' ), p( '&New Log Page' ), p( 'Open a new tab to show recently logged events.' ) ) + + menu.Append( view, p( '&View' ) ) + + database = wx.Menu() + database.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'set_password' ), p( 'Set a &Password' ), p( 'Set a password for the database so only you can access it.' ) ) + database.AppendSeparator() + #database.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'reindex_db' ), '&reindex', 'reindex the database.' ) + database.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'vacuum_db' ), p( '&Vacuum' ), p( 'Rebuild the Database.' ) ) + database.AppendSeparator() + database.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'clear_caches' ), p( '&Clear Caches' ), p( 'Fully clear the fullscreen, preview and thumbnail caches.' ) ) + + menu.Append( database, p( '&Database' ) ) + + if len( nums_pending ) > 0: + + pending = wx.Menu() + + for ( service_identifier, num_pending ) in nums_pending.items(): + + if num_pending > 0: + + service_type = service_identifier.GetType() + + submenu = wx.Menu() + + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'upload_pending', service_identifier ), p( '&Upload' ), p( 'Upload ' + service_identifier.GetName() + '\'s Pending and Petitions.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete_pending', service_identifier ), p( '&Forget' ), p( 'Clear ' + service_identifier.GetName() + '\'s Pending and Petitions.' ) ) + + pending.AppendMenu( CC.ID_NULL, p( service_identifier.GetName() + ' Pending (' + HC.ConvertIntToPrettyString( num_pending ) + ')' ), submenu ) + + + + num_pending_total = sum( nums_pending.values() ) + + menu.Append( pending, p( '&Pending (' + HC.ConvertIntToPrettyString( num_pending_total ) + ')' ) ) + + + services = wx.Menu() + services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'review_services' ), p( '&Review Services' ), p( 'Review your services.' ) ) + services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'edit_services' ), p( '&Add, Remove or Edit Services' ), p( 'Edit your services.' ) ) + if len( download_tag_service_identifiers ) > 1: services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_tag_service_precedence' ), p( '&Manage Tag Service Precedence' ), p( 'Change the order in which tag repositories\' taxonomies will be added to the database.' ) ) + services.AppendSeparator() + services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_boorus' ), p( 'Manage &Boorus' ), p( 'Change the html parsing information for boorus to download from.' ) ) + services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_imageboards' ), p( 'Manage &Imageboards' ), p( 'Change the html POST form information for imageboards to dump to.' ) ) + services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_4chan_pass' ), p( 'Manage &4chan Pass' ), p( 'Set up your 4chan pass, so you can dump without having to fill in a captcha.' ) ) + services.AppendSeparator() + services.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_contacts' ), p( 'Manage &Contacts and Identities' ), p( 'Change the names and addresses of the people you talk to.' ) ) + services.AppendSeparator() + submenu = wx.Menu() + for s_i in tag_service_identifiers: submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'news', s_i ), p( s_i.GetName() ), p( 'Review ' + s_i.GetName() + '\'s past news.' ) ) + for s_i in file_service_identifiers: submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'news', s_i ), p( s_i.GetName() ), p( 'Review ' + s_i.GetName() + '\'s past news.' ) ) + services.AppendMenu( CC.ID_NULL, p( 'News' ), submenu ) + + menu.Append( services, p( '&Services' ) ) + + if len( admin_tag_service_identifiers ) > 0 or len( admin_file_service_identifiers ) > 0 or len( server_admin_identifiers ) > 0: + + admin = wx.Menu() + + for s_i in admin_tag_service_identifiers: + + submenu = wx.Menu() + + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_accounts', s_i ), p( 'Create New &Accounts' ), p( 'Create new accounts.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_account_types', s_i ), p( '&Manage Account Types' ), p( 'Add, edit and delete account types for the tag repository.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'modify_account', s_i ), p( '&Modify an Account' ), p( 'Modify a specific account\'s type and expiration.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'account_info', s_i ), p( '&Get an Account\'s Info' ), p( 'Fetch information about an account from the tag repository.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'options', s_i ), p( '&Options' ), p( 'Set the tag repository\'s options.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'stats', s_i ), p( '&Get Stats' ), p( 'Fetch operating statistics from the tag repository.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'post_news', s_i ), p( '&Post News' ), p( 'Post a news item to the tag repository.' ) ) + + admin.AppendMenu( CC.ID_NULL, p( s_i.GetName() ), submenu ) + + + for s_i in admin_file_service_identifiers: + + submenu = wx.Menu() + + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_accounts', s_i ), p( 'Create New &Accounts' ), p( 'Create new accounts.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_account_types', s_i ), p( '&Manage Account Types' ), p( 'Add, edit and delete account types for the file repository.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'modify_account', s_i ), p( '&Modify an Account' ), p( 'Modify a specific account\'s type and expiration.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'account_info', s_i ), p( '&Get an Account\'s Info' ), p( 'Fetch information about an account from the file repository.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fetch_ip', s_i ), p( '&Get an Uploader\'s IP Address' ), p( 'Fetch an uploader\'s ip address.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'options', s_i ), p( '&Options' ), p( 'Set the file repository\'s options.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'stats', s_i ), p( '&Get Stats' ), p( 'Fetch operating statistics from the file repository.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'post_news', s_i ), p( '&Post News' ), p( 'Post a news item to the file repository.' ) ) + + admin.AppendMenu( CC.ID_NULL, p( s_i.GetName() ), submenu ) + + + for s_i in admin_message_depots: + + submenu = wx.Menu() + + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_accounts', s_i ), p( 'Create New &Accounts' ), p( 'Create new accounts.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_account_types', s_i ), p( '&Manage Account Types' ), p( 'Add, edit and delete account types for the file repository.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'modify_account', s_i ), p( '&Modify an Account' ), p( 'Modify a specific account\'s type and expiration.' ) ) + + admin.AppendMenu( CC.ID_NULL, p( s_i.GetName() ), submenu ) + + + for s_i in server_admin_identifiers: + + submenu = wx.Menu() + + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'options', s_i ), p( '&Options' ), p( 'Set the server\'s options.' ) ) + submenu.AppendSeparator() + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_services', s_i ), p( 'Manage &Services' ), p( 'Add, edit, and delete this server\'s services.' ) ) + submenu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'backup_service', s_i ), p( 'Make a &Backup' ), p( 'Back up this server\'s database.' ) ) + + admin.AppendMenu( CC.ID_NULL, p( s_i.GetName() ), submenu ) + + + menu.Append( admin, p( '&Admin' ) ) + + + help = wx.Menu() + help.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'help' ), p( '&Help' ) ) + dont_know = wx.Menu() + dont_know.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'auto_repo_setup' ), p( 'Just set up some repositories for me, please' ) ) + dont_know.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'auto_server_setup' ), p( 'Just set up the server on this computer, please' ) ) + help.AppendMenu( wx.ID_NONE, p( 'I don\'t know what I am doing' ), dont_know ) + links = wx.Menu() + tumblr = wx.MenuItem( links, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'tumblr' ), p( 'Tumblr' ) ) + tumblr.SetBitmap( wx.Bitmap( HC.STATIC_DIR + os.path.sep + 'tumblr.png' ) ) + twitter = wx.MenuItem( links, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'twitter' ), p( 'Twitter' ) ) + twitter.SetBitmap( wx.Bitmap( HC.STATIC_DIR + os.path.sep + 'twitter.png' ) ) + site = wx.MenuItem( links, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'site' ), p( 'Site' ) ) + site.SetBitmap( wx.Bitmap( HC.STATIC_DIR + os.path.sep + 'file_repository_small.png' ) ) + forum = wx.MenuItem( links, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'forum' ), p( 'Forum' ) ) + forum.SetBitmap( wx.Bitmap( HC.STATIC_DIR + os.path.sep + 'file_repository_small.png' ) ) + links.AppendItem( tumblr ) + links.AppendItem( twitter ) + links.AppendItem( site ) + links.AppendItem( forum ) + help.AppendMenu( wx.ID_NONE, p( 'Links' ), links ) + debug = wx.Menu() + debug.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'debug_options' ), p( 'Options' ) ) + help.AppendMenu( wx.ID_NONE, p( 'Debug' ), debug ) + help.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'help_shortcuts' ), p( '&Shortcuts' ) ) + help.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'help_about' ), p( '&About' ) ) + + menu.Append( help, p( '&Help' ) ) + + old_menu = self.GetMenuBar() + + self.SetMenuBar( menu ) + + if old_menu is not None: old_menu.Destroy() + + + def RefreshStatusBar( self ): self._RefreshStatusBar() + + def SetDownloadsStatus( self, status ): + + if self.IsShown(): + + self._statusbar_downloads = status + + self._RefreshStatusBar() + + + + def SetInboxStatus( self, status ): + + if self.IsShown(): + + self._statusbar_inbox = status + + self._RefreshStatusBar() + + + + def SetServiceStatus( self, status ): + + if self.IsShown(): + + self._statusbar_service = status + + self._RefreshStatusBar() + + + +class FrameComposeMessage( ClientGUICommon.Frame ): + + def __init__( self, empty_draft_message ): + + ClientGUICommon.Frame.__init__( self, None, title = wx.GetApp().PrepStringForDisplay( 'Compose Message' ) ) + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + self.SetInitialSize( ( 920, 600 ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._draft_panel = ClientGUIMessages.DraftPanel( self, empty_draft_message ) + + vbox.AddF( self._draft_panel, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + self.Show( True ) + + HC.pubsub.sub( self, 'DeleteConversation', 'delete_conversation_gui' ) + HC.pubsub.sub( self, 'DeleteDraft', 'delete_draft_gui' ) + + + def DeleteConversation( self, conversation_key ): + + if self._draft_panel.GetConversationKey() == conversation_key: self.Destroy() + + + def DeleteDraft( self, draft_key ): + + if draft_key == self._draft_panel.GetDraftKey(): self.Destroy() + + +class FramePageChooser( ClientGUICommon.Frame ): + + def __init__( self ): + + def InitialiseControls(): + + self._button_hidden = wx.Button( self ) + self._button_hidden.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + self._button_hidden.Hide() + + self._button_1 = wx.Button( self, label = '', id = 1 ) + self._button_2 = wx.Button( self, label = '', id = 2 ) + self._button_3 = wx.Button( self, label = '', id = 3 ) + self._button_4 = wx.Button( self, label = '', id = 4 ) + self._button_5 = wx.Button( self, label = '', id = 5 ) + self._button_6 = wx.Button( self, label = '', id = 6 ) + self._button_7 = wx.Button( self, label = '', id = 7 ) + self._button_8 = wx.Button( self, label = '', id = 8 ) + self._button_9 = wx.Button( self, label = '', id = 9 ) + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + gridbox = wx.GridSizer( 0, 3 ) + + gridbox.AddF( self._button_1, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_2, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_3, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_4, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_5, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_6, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_7, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_8, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( self._button_9, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( gridbox ) + + self.SetInitialSize( ( 420, 210 ) ) + + + ClientGUICommon.Frame.__init__( self, None, title = wx.GetApp().PrepStringForDisplay( 'New Page' ) ) + + self.Center() + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + self._keycodes_to_ids = {} + + self._keycodes_to_ids[ wx.WXK_NUMPAD1 ] = 1 + self._keycodes_to_ids[ wx.WXK_NUMPAD2 ] = 2 + self._keycodes_to_ids[ wx.WXK_NUMPAD3 ] = 3 + self._keycodes_to_ids[ wx.WXK_NUMPAD4 ] = 4 + self._keycodes_to_ids[ wx.WXK_NUMPAD5 ] = 5 + self._keycodes_to_ids[ wx.WXK_NUMPAD6 ] = 6 + self._keycodes_to_ids[ wx.WXK_NUMPAD7 ] = 7 + self._keycodes_to_ids[ wx.WXK_NUMPAD8 ] = 8 + self._keycodes_to_ids[ wx.WXK_NUMPAD9 ] = 9 + + self._keycodes_to_ids[ wx.WXK_UP ] = 2 + self._keycodes_to_ids[ wx.WXK_DOWN ] = 8 + self._keycodes_to_ids[ wx.WXK_LEFT ] = 4 + self._keycodes_to_ids[ wx.WXK_RIGHT ] = 6 + + InitialiseControls() + + InitialisePanel() + + self._services = wx.GetApp().Read( 'services' ) + + self._petition_service_identifiers = [ service.GetServiceIdentifier() for service in self._services if service.GetServiceIdentifier().GetType() in HC.REPOSITORIES and service.GetAccount().HasPermission( HC.RESOLVE_PETITIONS ) ] + + self._identities = wx.GetApp().Read( 'identities' ) + + self._InitButtons( 'home' ) + + self.Bind( wx.EVT_BUTTON, self.EventButton ) + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self._button_hidden.SetFocus() + + self.Show( True ) + + + def _AddEntry( self, button, entry ): + + id = button.GetId() + + self._command_dict[ id ] = entry + + ( entry_type, obj ) = entry + + if entry_type == 'menu': button.SetLabel( obj ) + elif entry_type in ( 'page_query', 'page_petitions' ): + + name = obj.GetName() + + button.SetLabel( name ) + + elif entry_type == 'page_import_booru': button.SetLabel( 'booru' ) + elif entry_type == 'page_import_gallery': button.SetLabel( obj ) + elif entry_type == 'page_messages': + + name = obj.GetName() + + button.SetLabel( name ) + + elif entry_type == 'page_import_thread_watcher': button.SetLabel( 'thread watcher' ) + elif entry_type == 'page_import_url': button.SetLabel( 'url' ) + + button.Show() + + + def _InitButtons( self, menu_keyword ): + + self._command_dict = {} + + if menu_keyword == 'home': + + entries = [ ( 'menu', 'files' ), ( 'menu', 'download' ) ] + + if len( self._petition_service_identifiers ) > 0: entries.append( ( 'menu', 'petitions' ) ) + + if len( self._identities ) > 0: entries.append( ( 'menu', 'messages' ) ) + + elif menu_keyword == 'files': + + file_repos = [ ( 'page_query', service_identifier ) for service_identifier in [ service.GetServiceIdentifier() for service in self._services ] if service_identifier.GetType() == HC.FILE_REPOSITORY ] + + entries = [ ( 'page_query', CC.LOCAL_FILE_SERVICE_IDENTIFIER ) ] + file_repos + + elif menu_keyword == 'download': entries = [ ( 'page_import_url', None ), ( 'page_import_booru', None ), ( 'page_import_thread_watcher', None ), ( 'page_import_gallery', 'deviant art by artist' ), ( 'page_import_gallery', 'hentai foundry by artist' ), ( 'page_import_gallery', 'hentai foundry by tags' ) ] + elif menu_keyword == 'messages': entries = [ ( 'page_messages', identity ) for identity in self._identities ] + elif menu_keyword == 'petitions': entries = [ ( 'page_petitions', service_identifier ) for service_identifier in self._petition_service_identifiers ] + + if len( entries ) <= 4: + + self._button_1.Hide() + self._button_3.Hide() + self._button_5.Hide() + self._button_7.Hide() + self._button_9.Hide() + + usable_buttons = [ self._button_2, self._button_4, self._button_6, self._button_8 ] + + elif len( entries ) <= 9: usable_buttons = [ self._button_1, self._button_2, self._button_3, self._button_4, self._button_5, self._button_6, self._button_7, self._button_8, self._button_9 ] + else: + + pass # sort out a multi-page solution? maybe only if this becomes a big thing; the person can always select from the menus, yeah? + + usable_buttons = [ self._button_1, self._button_2, self._button_3, self._button_4, self._button_5, self._button_6, self._button_7, self._button_8, self._button_9 ] + entries = entries[:9] + + + for entry in entries: self._AddEntry( usable_buttons.pop( 0 ), entry ) + + for button in usable_buttons: button.Hide() + + + def EventButton( self, event ): + + id = event.GetId() + + if id in self._command_dict: + + ( entry_type, obj ) = self._command_dict[ id ] + + if entry_type == 'menu': self._InitButtons( obj ) + else: + + if entry_type == 'page_query': HC.pubsub.pub( 'new_page_query', obj ) + elif entry_type == 'page_import_booru': HC.pubsub.pub( 'new_page_import_booru' ) + elif entry_type == 'page_import_gallery': HC.pubsub.pub( 'new_page_import_gallery', obj ) + elif entry_type == 'page_import_thread_watcher': HC.pubsub.pub( 'new_page_import_thread_watcher' ) + elif entry_type == 'page_import_url': HC.pubsub.pub( 'new_page_import_url' ) + elif entry_type == 'page_messages': HC.pubsub.pub( 'new_page_messages', obj ) + elif entry_type == 'page_petitions': HC.pubsub.pub( 'new_page_petitions', obj ) + + self.Destroy() + + + + self._button_hidden.SetFocus() + + + def EventKeyDown( self, event ): + + if event.KeyCode in self._keycodes_to_ids.keys(): + + id = self._keycodes_to_ids[ event.KeyCode ] + + new_event = wx.CommandEvent( wx.wxEVT_COMMAND_BUTTON_CLICKED, winid = id ) + + self.ProcessEvent( new_event ) + + elif event.KeyCode == wx.WXK_ESCAPE: self.Destroy() + else: event.Skip() + + +class FrameReviewServices( ClientGUICommon.Frame ): + + def __init__( self ): + + def InitialiseControls(): + + self._listbook = ClientGUICommon.ListBook( self ) + + self._edit = wx.Button( self, label='add, remove or edit services' ) + self._edit.Bind( wx.EVT_BUTTON, self.EventEdit ) + + self._ok = wx.Button( self, label='ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._listbook, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( self._edit, FLAGS_SMALL_INDENT ) + vbox.AddF( self._ok, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + self.SetInitialSize( ( 880, 620 ) ) + + + ( pos_x, pos_y ) = wx.GetApp().GetGUI().GetPositionTuple() + + pos = ( pos_x + 25, pos_y + 50 ) + + ClientGUICommon.Frame.__init__( self, None, title = wx.GetApp().PrepStringForDisplay( 'Review Services' ), pos = pos ) + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + InitialiseControls() + + self._InitialiseServices() + + InitialisePanel() + + self.Show( True ) + + HC.pubsub.sub( self, 'RefreshServices', 'notify_new_services' ) + + + def _InitialiseServices( self ): + + self._listbook.DeleteAllPages() + + listbook_dict = {} + + service_identifiers = wx.GetApp().Read( 'service_identifiers' ) + + for service_identifier in service_identifiers: + + service_type = service_identifier.GetType() + + if service_type in ( HC.LOCAL_FILE, HC.LOCAL_TAG ): + + page = FrameReviewServicesServicePanel( self._listbook, service_identifier ) + + name = service_identifier.GetName() + + self._listbook.AddPage( page, name ) + + else: + + if service_type not in listbook_dict: + + listbook = ClientGUICommon.ListBook( self._listbook ) + + listbook_dict[ service_type ] = listbook + + if service_type == HC.TAG_REPOSITORY: name = 'tags' + elif service_type == HC.FILE_REPOSITORY: name = 'files' + elif service_type == HC.MESSAGE_DEPOT: name = 'message depots' + elif service_type == HC.SERVER_ADMIN: name = 'servers admin' + elif service_type == HC.LOCAL_RATING_LIKE: name = 'local ratings like' + elif service_type == HC.LOCAL_RATING_NUMERICAL: name = 'local ratings numerical' + + self._listbook.AddPage( listbook, name ) + + + listbook = listbook_dict[ service_type ] + + page = ( FrameReviewServicesServicePanel, [ listbook, service_identifier ], {} ) + + name = service_identifier.GetName() + + listbook.AddPage( page, name ) + + + + wx.CallAfter( self._listbook.Layout ) + + + def EventEdit( self, event ): + + try: + + with ClientGUIDialogs.DialogManageServices( self ) as dlg: dlg.ShowModal() + + except: wx.MessageBox( traceback.format_exc() ) + + + def EventOk( self, event ): self.Destroy() + + def RefreshServices( self ): self._InitialiseServices() + +class FrameReviewServicesServicePanel( wx.ScrolledWindow ): + + def __init__( self, parent, service_identifier ): + + wx.ScrolledWindow.__init__( self, parent ) + + self.SetScrollRate( 0, 20 ) + + self._service_identifier = service_identifier + + self._service = wx.GetApp().Read( 'service', self._service_identifier ) + + service_type = service_identifier.GetType() + + if service_type in HC.RESTRICTED_SERVICES: + + account = self._service.GetAccount() + + account_type = account.GetAccountType() + + expires = account.GetExpires() + + + def InitialiseControls(): + + if service_type in HC.RESTRICTED_SERVICES: + + self._account_type = wx.StaticText( self ) + + self._age = ClientGUICommon.Gauge( self ) + + if expires is None: self._age.Hide() + + self._age_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + if service_type in HC.RESTRICTED_SERVICES: + + if service_type in HC.REPOSITORIES: + + self._updates = ClientGUICommon.Gauge( self ) + + self._updates_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + + self._bytes = ClientGUICommon.Gauge( self ) + + if max_num_bytes is None: self._bytes.Hide() + + self._bytes_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + self._requests = ClientGUICommon.Gauge( self ) + + if max_num_requests is None: self._requests.Hide() + + self._requests_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + + + if service_type in ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ): + + self._files_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + self._deleted_files_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + if service_type == HC.FILE_REPOSITORY: + + self._num_thumbs = 0 + self._num_local_thumbs = 0 + + self._thumbnails = ClientGUICommon.Gauge( self ) + + self._thumbnails_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + + elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + self._tags_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + if service_type == HC.TAG_REPOSITORY: + + self._deleted_tags_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + + elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + self._ratings_text = wx.StaticText( self, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + + if service_type in HC.RESTRICTED_SERVICES: + + if service_type in HC.REPOSITORIES: + + self._reset = wx.Button( self, label='reset cache' ) + self._reset.Bind( wx.EVT_BUTTON, self.EventServiceReset ) + + + if service_type == HC.SERVER_ADMIN: + + self._init = wx.Button( self, label='initialise server' ) + self._init.Bind( wx.EVT_BUTTON, self.EventServerInitialise ) + + + self._refresh = wx.Button( self, label='refresh account' ) + self._refresh.Bind( wx.EVT_BUTTON, self.EventServiceRefreshAccount ) + + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self, label = '- service -' ), FLAGS_SMALL_INDENT ) + + if service_type in HC.RESTRICTED_SERVICES: + + perm_vbox = wx.BoxSizer( wx.VERTICAL ) + + perm_vbox.AddF( wx.StaticText( self, label = '- service permissions -' ), FLAGS_SMALL_INDENT ) + perm_vbox.AddF( self._account_type, FLAGS_EXPAND_PERPENDICULAR ) + perm_vbox.AddF( self._age, FLAGS_EXPAND_PERPENDICULAR ) + perm_vbox.AddF( self._age_text, FLAGS_EXPAND_PERPENDICULAR ) + perm_vbox.AddF( self._bytes, FLAGS_EXPAND_PERPENDICULAR ) + perm_vbox.AddF( self._bytes_text, FLAGS_EXPAND_PERPENDICULAR ) + perm_vbox.AddF( self._requests, FLAGS_EXPAND_PERPENDICULAR ) + perm_vbox.AddF( self._requests_text, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( perm_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + + if service_type in HC.REPOSITORIES: + + repo_vbox = wx.BoxSizer( wx.VERTICAL ) + + repo_vbox.AddF( wx.StaticText( self, label = '- repository synchronisation -' ), FLAGS_SMALL_INDENT ) + + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + + repo_vbox.AddF( self._updates, FLAGS_EXPAND_PERPENDICULAR ) + repo_vbox.AddF( self._updates_text, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( repo_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + + if service_type in HC.REPOSITORIES + [ HC.LOCAL_FILE, HC.LOCAL_TAG, HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ]: + + info_vbox = wx.BoxSizer( wx.VERTICAL ) + + info_vbox.AddF( wx.StaticText( self, label = '- service information -' ), FLAGS_SMALL_INDENT ) + + if service_type in ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ): + + info_vbox.AddF( self._files_text, FLAGS_EXPAND_PERPENDICULAR ) + info_vbox.AddF( self._deleted_files_text, FLAGS_EXPAND_PERPENDICULAR ) + + if service_type == HC.FILE_REPOSITORY: + + info_vbox.AddF( self._thumbnails, FLAGS_EXPAND_PERPENDICULAR ) + info_vbox.AddF( self._thumbnails_text, FLAGS_EXPAND_PERPENDICULAR ) + + + elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + info_vbox.AddF( self._tags_text, FLAGS_EXPAND_PERPENDICULAR ) + + if service_type == HC.TAG_REPOSITORY: + + info_vbox.AddF( self._deleted_tags_text, FLAGS_EXPAND_PERPENDICULAR ) + + + elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + info_vbox.AddF( self._ratings_text, FLAGS_EXPAND_PERPENDICULAR ) + + + vbox.AddF( info_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + + if service_type in HC.RESTRICTED_SERVICES: + + repo_buttons_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + if service_type in HC.REPOSITORIES: repo_buttons_hbox.AddF( self._reset, FLAGS_MIXED ) + + if service_type == HC.SERVER_ADMIN: repo_buttons_hbox.AddF( self._init, FLAGS_MIXED ) + + repo_buttons_hbox.AddF( self._refresh, FLAGS_MIXED ) + + vbox.AddF( repo_buttons_hbox, FLAGS_BUTTON_SIZERS ) + + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + self._DisplayService() + + self._timer_updates = wx.Timer( self, id = ID_TIMER_UPDATES ) + + if service_type in HC.REPOSITORIES: + + self.Bind( wx.EVT_TIMER, self.EventTimerUpdates, id = ID_TIMER_UPDATES ) + + self._timer_updates.Start( 1000, wx.TIMER_CONTINUOUS ) + + + HC.pubsub.sub( self, 'ProcessServiceUpdate', 'service_update_gui' ) + HC.pubsub.sub( self, 'AddThumbnailCount', 'add_thumbnail_count' ) + + + def _DisplayAccountInfo( self ): + + self._service = wx.GetApp().Read( 'service', self._service_identifier ) + + service_type = self._service_identifier.GetType() + + now = int( time.time() ) + + if service_type in HC.RESTRICTED_SERVICES: + + account = self._service.GetAccount() + + account_type = account.GetAccountType() + + self._account_type.SetLabel( account_type.ConvertToString() ) + + if service_type in HC.REPOSITORIES: + + if not account.IsBanned(): + + created = account.GetCreated() + expires = account.GetExpires() + + if expires is None: self._age.Hide() + else: + + self._age.Show() + + self._age.SetRange( expires - created ) + self._age.SetValue( min( now - created, expires - created ) ) + + + self._age_text.SetLabel( account.GetExpiresString() ) + + first_begin = self._service.GetFirstBegin() + next_begin = self._service.GetNextBegin() + + if first_begin == 0: + + num_updates = 0 + num_updates_downloaded = 0 + + self._updates.SetValue( 0 ) + + else: + + num_updates = ( now - first_begin ) / HC.UPDATE_DURATION + num_updates_downloaded = ( next_begin - first_begin ) / HC.UPDATE_DURATION + + self._updates.SetRange( num_updates ) + self._updates.SetValue( num_updates_downloaded ) + + + self._updates_text.SetLabel( HC.ConvertIntToPrettyString( num_updates_downloaded ) + '/' + HC.ConvertIntToPrettyString( num_updates ) + ' - ' + self._service.GetUpdateStatus() ) + + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + ( used_bytes, used_requests ) = account.GetUsedData() + + if max_num_bytes is None: self._bytes.Hide() + else: + + self._bytes.Show() + + self._bytes.SetRange( max_num_bytes ) + self._bytes.SetValue( used_bytes ) + + + self._bytes_text.SetLabel( account.GetUsedBytesString() ) + + if max_num_requests is None: self._requests.Hide() + else: + + self._requests.Show() + + self._requests.SetValue( max_num_requests ) + self._requests.SetValue( min( used_requests, max_num_requests ) ) + + + self._requests_text.SetLabel( account.GetUsedRequestsString() ) + + + + + + def _DisplayNumThumbs( self ): + + self._thumbnails.SetRange( self._num_thumbs ) + self._thumbnails.SetValue( min( self._num_local_thumbs, self._num_thumbs ) ) + + self._thumbnails_text.SetLabel( HC.ConvertIntToPrettyString( self._num_local_thumbs ) + '/' + HC.ConvertIntToPrettyString( self._num_thumbs ) + ' thumbnails downloaded' ) + + + def _DisplayService( self ): + + service_type = self._service_identifier.GetType() + + self._DisplayAccountInfo() + + if service_type in HC.REPOSITORIES + [ HC.LOCAL_FILE, HC.LOCAL_TAG, HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ]: + + service_info = wx.GetApp().Read( 'service_info', self._service_identifier ) + + if service_type in ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ): + + num_files = service_info[ HC.SERVICE_INFO_NUM_FILES ] + total_size = service_info[ HC.SERVICE_INFO_TOTAL_SIZE ] + num_deleted_files = service_info[ HC.SERVICE_INFO_NUM_DELETED_FILES ] + + self._files_text.SetLabel( HC.ConvertIntToPrettyString( num_files ) + ' files, totalling ' + HC.ConvertIntToBytes( total_size ) ) + + self._deleted_files_text.SetLabel( HC.ConvertIntToPrettyString( num_deleted_files ) + ' deleted files' ) + + if service_type == HC.FILE_REPOSITORY: + + self._num_thumbs = service_info[ HC.SERVICE_INFO_NUM_THUMBNAILS ] + self._num_local_thumbs = service_info[ HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ] + + self._DisplayNumThumbs() + + + elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + num_files = service_info[ HC.SERVICE_INFO_NUM_FILES ] + num_namespaces = service_info[ HC.SERVICE_INFO_NUM_NAMESPACES ] + num_tags = service_info[ HC.SERVICE_INFO_NUM_TAGS ] + num_mappings = service_info[ HC.SERVICE_INFO_NUM_MAPPINGS ] + + self._tags_text.SetLabel( HC.ConvertIntToPrettyString( num_files ) + ' hashes, ' + HC.ConvertIntToPrettyString( num_namespaces ) + ' namespaces, ' + HC.ConvertIntToPrettyString( num_tags ) + ' tags, totalling ' + HC.ConvertIntToPrettyString( num_mappings ) + ' mappings' ) + + if service_type == HC.TAG_REPOSITORY: + + num_deleted_mappings = service_info[ HC.SERVICE_INFO_NUM_DELETED_MAPPINGS ] + + self._deleted_tags_text.SetLabel( HC.ConvertIntToPrettyString( num_deleted_mappings ) + ' deleted mappings' ) + + + elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + num_ratings = service_info[ HC.SERVICE_INFO_NUM_FILES ] + + self._ratings_text.SetLabel( str( num_ratings ) + ' files rated' ) + + + + if service_type == HC.SERVER_ADMIN: + + if self._service.IsInitialised(): + + self._init.Disable() + self._refresh.Enable() + + else: + + self._init.Enable() + self._refresh.Disable() + + + + + def AddThumbnailCount( self, service_identifier, count ): + + if service_identifier == self._service_identifier: + + self._num_local_thumbs += count + + self._DisplayNumThumbs() + + + + def EventServerInitialise( self, event ): + + try: + + service = wx.GetApp().Read( 'service', self._service_identifier ) + + connection = service.GetConnection() + + connection.Get( 'init' ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def EventServiceRefreshAccount( self, event ): + + try: + + connection = self._service.GetConnection() + + connection.Get( 'account' ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def EventServiceReset( self, event ): + + message = 'This will remove all cached information for ' + self._service_identifier.GetName() + ' from the database. It will take time to resynchronise.' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: + + with wx.BusyCursor(): wx.GetApp().Write( 'reset_service', self._service_identifier ) + + + + + def EventTimerUpdates( self, event ): + + now = int( time.time() ) + + first_begin = self._service.GetFirstBegin() + next_begin = self._service.GetNextBegin() + + if first_begin == 0: + + num_updates = 0 + num_updates_downloaded = 0 + + else: + + num_updates = ( now - first_begin ) / HC.UPDATE_DURATION + num_updates_downloaded = ( next_begin - first_begin ) / HC.UPDATE_DURATION + + + self._updates_text.SetLabel( HC.ConvertIntToPrettyString( num_updates_downloaded ) + '/' + HC.ConvertIntToPrettyString( num_updates ) + ' - ' + self._service.GetUpdateStatus() ) + + + def ProcessServiceUpdate( self, update ): + + service_identifier = update.GetServiceIdentifier() + + if service_identifier == self._service_identifier: + + action = update.GetAction() + + if action == CC.SERVICE_UPDATE_RESET: self._service_identifier = update.GetInfo() + + if action in ( CC.SERVICE_UPDATE_ACCOUNT, CC.SERVICE_UPDATE_REQUEST_MADE ): wx.CallLater( 200, self._DisplayAccountInfo ) + else: + wx.CallLater( 200, self._DisplayService ) + wx.CallLater( 400, self.Layout ) # ugly hack, but it works for now + + + +class FrameSplash( ClientGUICommon.Frame ): + + def __init__( self ): + + wx.Frame.__init__( self, None, style = wx.FRAME_NO_TASKBAR | wx.FRAME_SHAPED, title = 'hydrus client' ) + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + self._bmp = wx.EmptyBitmap( 154, 220, 32 ) # 32 bit for transparency? + + self.SetSize( ( 154, 220 ) ) + + self.Center() + + # this is 124 x 166 + self._hydrus = wx.Image( HC.STATIC_DIR + os.path.sep + 'hydrus_splash.png', type=wx.BITMAP_TYPE_PNG ).ConvertToBitmap() + + dc = wx.BufferedDC( wx.ClientDC( self ), self._bmp ) + + dc.SetBackground( wx.Brush( wx.WHITE ) ) + + dc.Clear() + + dc.DrawBitmap( self._hydrus, 15, 15 ) + + self.Bind( wx.EVT_PAINT, self.EventPaint ) + + self.Show( True ) + + self.Bind( wx.EVT_MOUSE_EVENTS, self.OnMouseEvents ) + + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._bmp ) + + def OnMouseEvents( self, event ): pass + + def SetText( self, text ): + + dc = wx.BufferedDC( wx.ClientDC( self ), self._bmp ) + + dc.SetBackground( wx.Brush( wx.WHITE ) ) + + dc.Clear() + + dc.DrawBitmap( self._hydrus, 15, 15 ) + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( width, height ) = dc.GetTextExtent( text ) + + x = ( 154 - width ) / 2 + + dc.DrawText( text, x, 200 ) + + \ No newline at end of file diff --git a/include/ClientGUICanvas.py b/include/ClientGUICanvas.py new file mode 100755 index 00000000..f1be2d92 --- /dev/null +++ b/include/ClientGUICanvas.py @@ -0,0 +1,2827 @@ +import HydrusConstants as HC +import ClientConstants as CC +import ClientGUICommon +import ClientGUIDialogs +import ClientGUIMixins +import collections +import os +import Queue +import random +import threading +import time +import traceback +import urllib +import wx +import wx.lib.flashwin + +ID_TIMER_ANIMATED = wx.NewId() +ID_TIMER_SLIDESHOW = wx.NewId() +ID_TIMER_MEDIA_INFO_DISPLAY = wx.NewId() + +ANIMATED_SCANBAR_HEIGHT = 20 +ANIMATED_SCANBAR_CARET_WIDTH = 10 + +# Zooms + +ZOOMINS = [ 0.01, 0.05, 0.1, 0.15, 0.2, 0.3, 0.5, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.5, 2.0, 3.0, 5.0, 10.0, 20.0 ] +ZOOMOUTS = [ 20.0, 10.0, 5.0, 3.0, 2.0, 1.5, 1.2, 1.1, 1.0, 0.9, 0.8, 0.7, 0.5, 0.3, 0.2, 0.15, 0.1, 0.05, 0.01 ] + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +class Canvas(): + + def __init__( self, file_service_identifier, image_cache ): + + self._file_service_identifier = file_service_identifier + self._image_cache = image_cache + + self._service_identifiers_to_services = {} + + self._focus_holder = wx.Window( self ) + self._focus_holder.Hide() + self._focus_holder.SetEventHandler( self ) + + self._options = wx.GetApp().Read( 'options' ) + + self._current_media = None + self._current_display_media = None + self._media_window = None + self._current_zoom = 1.0 + + self._last_drag_coordinates = None + self._total_drag_delta = ( 0, 0 ) + + self.SetBackgroundColour( wx.WHITE ) + + self._canvas_bmp = wx.EmptyBitmap( 0, 0, 24 ) + + self.Bind( wx.EVT_SIZE, self.EventResize ) + + self.Bind( wx.EVT_PAINT, self.EventPaint ) + + + def _ChangeFrame( self, direction ): + + if self._current_media.GetMime() == HC.IMAGE_GIF and self._current_media.HasDuration(): self._media_window.ChangeFrame( direction ) + + + def _DrawBackgroundBitmap( self ): + + ( client_width, client_height ) = self.GetClientSize() + + cdc = wx.ClientDC( self ) + + dc = wx.BufferedDC( cdc, self._canvas_bmp ) + + dc.SetBackground( wx.Brush( wx.WHITE ) ) + + dc.Clear() + + if self._current_media is not None: + + # tags on the top left + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + tags_cdpp = self._current_media.GetTags() + + ( current, deleted, pending, petitioned ) = tags_cdpp.GetUnionCDPP() + + tags_i_want_to_display = list( current.union( pending ).union( petitioned ) ) + + tags_i_want_to_display.sort() + + current_y = 3 + + namespace_colours = self._options[ 'namespace_colours' ] + + for tag in tags_i_want_to_display: + + if tag in current: display_string = tag + elif tag in pending: display_string = '(+) ' + tag + elif tag in petitioned: display_string = '(-) ' + tag + + if ':' in tag: + + ( namespace, sub_tag ) = tag.split( ':', 1 ) + + if namespace in namespace_colours: ( r, g, b ) = namespace_colours[ namespace ] + else: ( r, g, b ) = namespace_colours[ None ] + + else: ( r, g, b ) = namespace_colours[ '' ] + + dc.SetTextForeground( wx.Colour( r, g, b ) ) + + ( x, y ) = dc.GetTextExtent( display_string ) + + dc.DrawText( display_string, 5, current_y ) + + current_y += y + + + dc.SetTextForeground( wx.BLACK ) + + # icons + + icons_to_show = [] + + if self._current_media.HasInbox(): icons_to_show.append( CC.GlobalBMPs.inbox_bmp ) + + file_service_identifiers = self._current_media.GetFileServiceIdentifiersCDPP() + + if self._file_service_identifier.GetType() == HC.LOCAL_FILE: + + if len( file_service_identifiers.GetPendingRemote() ) > 0: icons_to_show.append( CC.GlobalBMPs.file_repository_pending_bmp ) + elif len( file_service_identifiers.GetCurrentRemote() ) > 0: icons_to_show.append( CC.GlobalBMPs.file_repository_bmp ) + + elif self._file_service_identifier in file_service_identifiers.GetCurrentRemote(): + + if self._file_service_identifier in file_service_identifiers.GetPetitionedRemote(): icons_to_show.append( CC.GlobalBMPs.file_repository_petitioned_bmp ) + + + current_x = client_width - 18 + + for icon_bmp in icons_to_show: + + dc.DrawBitmap( icon_bmp, current_x, 2 ) + + current_x -= 20 + + + # top right + + top_right_strings = [] + + collections_string = self._GetCollectionsString() + + if len( collections_string ) > 0: top_right_strings.append( collections_string ) + + ( local_ratings, remote_ratings ) = self._current_display_media.GetRatings() + + service_identifiers_to_ratings = local_ratings.GetServiceIdentifiersToRatings() + + for ( service_identifier, rating ) in service_identifiers_to_ratings.items(): + + if rating is None: continue + + service_type = service_identifier.GetType() + + if service_identifier in self._service_identifiers_to_services: service = self._service_identifiers_to_services[ service_identifier ] + else: + + service = wx.GetApp().Read( 'service', service_identifier ) + + self._service_identifiers_to_services[ service_identifier ] = service + + + if service_type == HC.LOCAL_RATING_LIKE: + + ( like, dislike ) = service.GetExtraInfo() + + if rating == 1: s = like + elif rating == 0: s = dislike + + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + ( lower, upper ) = service.GetExtraInfo() + + s = HC.ConvertNumericalRatingToPrettyString( lower, upper, rating ) + + + top_right_strings.append( s ) + + + if len( top_right_strings ) > 0: + + current_y = 3 + + if len( icons_to_show ) > 0: current_y += 16 + + for s in top_right_strings: + + ( x, y ) = dc.GetTextExtent( s ) + + dc.DrawText( s, client_width - x - 3, current_y ) + + current_y += y + + + + info_string = self._GetInfoString() + + ( x, y ) = dc.GetTextExtent( info_string ) + + dc.DrawText( info_string, ( client_width - x ) / 2, client_height - y - 3 ) + + index_string = self._GetIndexString() + + if len( index_string ) > 0: + + ( x, y ) = dc.GetTextExtent( index_string ) + + dc.DrawText( index_string, client_width - x - 3, client_height - y - 3 ) + + + + + def _DrawCurrentMedia( self ): + + ( my_width, my_height ) = self.GetClientSize() + + if my_width > 0 and my_height > 0: + + if self._current_media is not None: self._SizeAndPositionMediaWindow() + + + + def _GetCollectionsString( self ): return '' + + def _GetInfoString( self ): + + info_string = self._current_media.GetPrettyInfo() + + return info_string + + + def _GetIndexString( self ): return '' + + def _GetMediaWindowSizeAndPosition( self ): + + ( my_width, my_height ) = self.GetClientSize() + + ( original_width, original_height ) = self._current_display_media.GetResolution() + + media_width = int( round( original_width * self._current_zoom ) ) + media_height = int( round( original_height * self._current_zoom ) ) + + if self._current_display_media.GetMime() == HC.IMAGE_GIF and self._current_display_media.HasDuration(): media_height += ANIMATED_SCANBAR_HEIGHT + + ( drag_x, drag_y ) = self._total_drag_delta + + x_offset = ( my_width - media_width ) / 2 + drag_x + y_offset = ( my_height - media_height ) / 2 + drag_y + + new_size = ( media_width, media_height ) + new_position = ( x_offset, y_offset ) + + return ( new_size, new_position ) + + + def _ManageRatings( self ): + + if self._current_media is not None: + + try: + with ClientGUIDialogs.DialogManageRatings( self, ( self._current_media, ) ) as dlg: dlg.ShowModal() + except: wx.MessageBox( 'Had a problem displaying the manage ratings dialog from fullscreen.' ) + + + + def _ManageTags( self ): + + if self._current_media is not None: + + try: + with ClientGUIDialogs.DialogManageTags( self, self._file_service_identifier, ( self._current_media, ) ) as dlg: dlg.ShowModal() + except: wx.MessageBox( 'Had a problem displaying the manage tags dialog from fullscreen.' ) + + + + def _PrefetchImages( self ): pass + + def _RecalcZoom( self ): + + if self._current_display_media is None: self._current_zoom = 1.0 + else: + + ( my_width, my_height ) = self.GetClientSize() + + ( media_width, media_height ) = self._current_display_media.GetResolution() + + if media_width > my_width or media_height > my_height: + + width_zoom = my_width / float( media_width ) + + height_zoom = my_height / float( media_height ) + + self._current_zoom = min( ( width_zoom, height_zoom ) ) + + else: self._current_zoom = 1.0 + + + + def _ShouldSkipInputDueToFlash( self ): + + if self._current_display_media.GetMime() in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ): + + ( x, y ) = self._media_window.GetPosition() + ( width, height ) = self._media_window.GetSize() + + ( mouse_x, mouse_y ) = self.ScreenToClient( wx.GetMousePosition() ) + + if mouse_x > x and mouse_x < x + width and mouse_y > y and mouse_y < y + height: return True + + + return False + + + def _SizeAndPositionMediaWindow( self ): + + ( new_size, new_position ) = self._GetMediaWindowSizeAndPosition() + + if new_size != self._media_window.GetSize(): self._media_window.SetSize( new_size ) + if new_position != self._media_window.GetPosition(): self._media_window.SetPosition( new_position ) + + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp, wx.BUFFER_VIRTUAL_AREA ) + + def EventResize( self, event ): + + ( my_width, my_height ) = self.GetClientSize() + + self._canvas_bmp = wx.EmptyBitmap( my_width, my_height, 24 ) + + if self._media_window is not None: + + ( media_width, media_height ) = self._media_window.GetClientSize() + + if my_width != media_width or my_height != media_height: + + with wx.FrozenWindow( self ): + + self._RecalcZoom() + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + + else: self._DrawBackgroundBitmap() + + + def KeepCursorAlive( self ): pass + + def SetMedia( self, media ): + + if media != self._current_media: + + with wx.FrozenWindow( self ): + + self._current_media = media + self._current_display_media = None + self._total_drag_delta = ( 0, 0 ) + self._last_drag_coordinates = None + + if self._media_window is not None: + + self._media_window.Destroy() + self._media_window = None + + + if self._current_media is not None: + + self._current_display_media = self._current_media.GetDisplayMedia() + + if self._current_display_media.GetFileServiceIdentifiersCDPP().HasLocal(): + + self._RecalcZoom() + + if self._current_display_media.GetMime() in HC.IMAGES: + + ( initial_size, initial_position ) = self._GetMediaWindowSizeAndPosition() + + self._media_window = Image( self, self._current_display_media, self._image_cache, initial_size, initial_position ) + + elif self._current_display_media.GetMime() == HC.APPLICATION_FLASH: + + self._media_window = wx.lib.flashwin.FlashWindow( self ) + + file_hash = self._current_display_media.GetHash() + + self._media_window.movie = HC.CLIENT_FILES_DIR + os.path.sep + file_hash.encode( 'hex' ) + + elif self._current_display_media.GetMime() == HC.VIDEO_FLV: + + self._media_window = wx.lib.flashwin.FlashWindow( self ) + + file_hash = self._current_display_media.GetHash() + + flash_vars = [] + flash_vars.append( ( 'flv', HC.CLIENT_FILES_DIR + os.path.sep + file_hash.encode( 'hex' ) ) ) + flash_vars.append( ( 'margin', '0' ) ) + flash_vars.append( ( 'autoload', '1' ) ) + flash_vars.append( ( 'autoplay', '1' ) ) + flash_vars.append( ( 'showvolume', '1' ) ) + flash_vars.append( ( 'showtime', '1' ) ) + flash_vars.append( ( 'loop', '1' ) ) + + f = urllib.urlencode( flash_vars ) + + self._media_window.flashvars = f + self._media_window.movie = HC.STATIC_DIR + os.path.sep + 'player_flv_maxi_1.6.0.swf' + + + self._PrefetchImages() + + else: self._current_media = None + + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + + +class CanvasRatingsFilterPanel( Canvas, wx.Window ): + + def __init__( self, parent ): + + wx.Window.__init__( self, parent, style = wx.SIMPLE_BORDER | wx.WANTS_CHARS ) + Canvas.__init__( self, CC.LOCAL_FILE_SERVICE_IDENTIFIER, wx.GetApp().GetFullscreenImageCache() ) + + wx.CallAfter( self.Refresh ) + + self.Bind( wx.EVT_MOTION, self.EventDrag ) + self.Bind( wx.EVT_LEFT_DOWN, self.EventDragBegin ) + self.Bind( wx.EVT_RIGHT_DOWN, self.GetParent().GetParent().EventMouseDown ) + self.Bind( wx.EVT_MIDDLE_DOWN, self.GetParent().GetParent().EventMouseDown ) + self.Bind( wx.EVT_LEFT_UP, self.EventDragEnd ) + self.Bind( wx.EVT_MOUSEWHEEL, self.EventMouseWheel ) + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self._timer_media_info_display = wx.Timer( self, id = ID_TIMER_MEDIA_INFO_DISPLAY ) + + self.Bind( wx.EVT_TIMER, self.EventTimerMediaInfoDisplay, id = ID_TIMER_MEDIA_INFO_DISPLAY ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + + def _ZoomIn( self ): + + if self._current_media is not None: + + for zoom in ZOOMINS: + + if self._current_zoom < zoom: + + if self._current_media.GetMime() in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ): + + # because of the event passing under mouse, we want to preserve whitespace around flash + + ( original_width, original_height ) = self._current_display_media.GetResolution() + + ( my_width, my_height ) = self.GetClientSize() + + new_media_width = int( round( original_width * zoom ) ) + new_media_height = int( round( original_height * zoom ) ) + + if new_media_width >= my_width or new_media_height >= my_height: return + + + with wx.FrozenWindow( self ): + + ( drag_x, drag_y ) = self._total_drag_delta + + zoom_ratio = zoom / self._current_zoom + + self._total_drag_delta = ( int( drag_x * zoom_ratio ), int( drag_y * zoom_ratio ) ) + + self._current_zoom = zoom + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + break + + + + + + def _ZoomOut( self ): + + if self._current_media is not None: + + for zoom in ZOOMOUTS: + + if self._current_zoom > zoom: + + with wx.FrozenWindow( self ): + + ( drag_x, drag_y ) = self._total_drag_delta + + zoom_ratio = zoom / self._current_zoom + + self._total_drag_delta = ( int( drag_x * zoom_ratio ), int( drag_y * zoom_ratio ) ) + + self._current_zoom = zoom + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + break + + + + + + def _ZoomSwitch( self ): + + ( my_width, my_height ) = self.GetClientSize() + + ( media_width, media_height ) = self._current_display_media.GetResolution() + + if self._current_media.GetMime() not in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ) or self._current_zoom > 1.0 or ( media_width < my_width and media_height < my_height ): + + new_zoom = self._current_zoom + + if self._current_zoom == 1.0: + + if media_width > my_width or media_height > my_height: + + width_zoom = my_width / float( media_width ) + + height_zoom = my_height / float( media_height ) + + new_zoom = min( ( width_zoom, height_zoom ) ) + + + else: new_zoom = 1.0 + + if new_zoom != self._current_zoom: + + ( drag_x, drag_y ) = self._total_drag_delta + + zoom_ratio = new_zoom / self._current_zoom + + self._total_drag_delta = ( int( drag_x * zoom_ratio ), int( drag_y * zoom_ratio ) ) + + self._current_zoom = new_zoom + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + + + def EventDrag( self, event ): + + if wx.Window.FindFocus() != self: self.SetFocus() + + if event.Dragging() and self._last_drag_coordinates is not None: + + ( old_x, old_y ) = self._last_drag_coordinates + + ( x, y ) = event.GetPosition() + + ( delta_x, delta_y ) = ( x - old_x, y - old_y ) + + try: self.WarpPointer( old_x, old_y ) + except: self._last_drag_coordinates = ( x, y ) + + ( old_delta_x, old_delta_y ) = self._total_drag_delta + + self._total_drag_delta = ( old_delta_x + delta_x, old_delta_y + delta_y ) + + self._DrawCurrentMedia() + + + self.SetCursor( wx.StockCursor( wx.CURSOR_ARROW ) ) + + self._timer_media_info_display.Start( 800, wx.TIMER_ONE_SHOT ) + + + def EventDragBegin( self, event ): + + if event.ShiftDown(): + + ( x, y ) = event.GetPosition() + + ( client_x, client_y ) = self.GetClientSize() + + if x < 20 or x > client_x - 20 or y < 20 or y > client_y -20: + + try: + + better_x = x + better_y = y + + if x < 20: better_x = 20 + if y < 20: better_y = 20 + + if x > client_x - 20: better_x = client_x - 20 + if y > client_y - 20: better_y = client_y - 20 + + self.WarpPointer( better_x, better_y ) + + x = better_x + y = better_y + + except: pass + + + self._last_drag_coordinates = ( x, y ) + + else: self.GetParent().GetParent().ProcessEvent( event ) + + + def EventDragEnd( self, event ): + + self._last_drag_coordinates = None + + event.Skip() + + + def EventKeyDown( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + keys_i_want_to_bump_up_regardless = [ wx.WXK_SPACE, wx.WXK_UP, wx.WXK_NUMPAD_UP, wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN, wx.WXK_LEFT, wx.WXK_NUMPAD_LEFT, wx.WXK_RIGHT, wx.WXK_NUMPAD_RIGHT, wx.WXK_BACK, wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER, wx.WXK_ESCAPE ] + + ( modifier, key ) = HC.GetShortcutFromEvent( event ) + + key_dict = self._options[ 'shortcuts' ][ modifier ] + + if event.KeyCode not in keys_i_want_to_bump_up_regardless and key in key_dict: + + action = key_dict[ key ] + + self.ProcessEvent( wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) ) + + else: + + if event.KeyCode in ( ord( '+' ), wx.WXK_ADD, wx.WXK_NUMPAD_ADD ): self._ZoomIn() + elif event.KeyCode in ( ord( '-' ), wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT ): self._ZoomOut() + elif event.KeyCode == ord( 'Z' ): self._ZoomSwitch() + else: self.GetParent().ProcessEvent( event ) + + + + def EventMenu( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'frame_back': self._ChangeFrame( -1 ) + elif command == 'frame_next': self._ChangeFrame( 1 ) + elif command == 'manage_ratings': self._ManageRatings() + elif command == 'manage_tags': self._ManageTags() + elif command == 'zoom_in': self._ZoomIn() + elif command == 'zoom_out': self._ZoomOut() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + + + + + def EventMouseWheel( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if event.CmdDown(): + + if event.GetWheelRotation() > 0: self._ZoomIn() + else: self._ZoomOut() + + + + + def EventTimerMediaInfoDisplay( self, event ): self.SetCursor( wx.StockCursor( wx.CURSOR_BLANK ) ) + + def RefreshBackground( self ): self._DrawBackgroundBitmap() + +class CanvasPanel( Canvas, wx.Window ): + + def __init__( self, parent, page_key, file_service_identifier ): + + wx.Window.__init__( self, parent, style = wx.SIMPLE_BORDER ) + Canvas.__init__( self, file_service_identifier, wx.GetApp().GetPreviewImageCache() ) + + self._page_key = page_key + + HC.pubsub.sub( self, 'FocusChanged', 'focus_changed' ) + HC.pubsub.sub( self, 'ProcessContentUpdates', 'content_updates_gui' ) + + wx.CallAfter( self.Refresh ) + + + def FocusChanged( self, page_key, media ): + + if page_key == self._page_key: self.SetMedia( media ) + + + def ProcessContentUpdates( self, updates ): + + if self._current_display_media is not None: + + my_hash = self._current_display_media.GetHash() + + if True in ( my_hash in update.GetHashes() for update in updates ): + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + + +class CanvasFullscreenMediaList( ClientGUIMixins.ListeningMediaList, Canvas, ClientGUICommon.Frame ): + + def __init__( self, my_parent, page_key, file_service_identifier, predicates, media_results ): + + ClientGUICommon.Frame.__init__( self, my_parent, title = 'hydrus client fullscreen image viewer' ) + Canvas.__init__( self, file_service_identifier, wx.GetApp().GetFullscreenImageCache() ) + ClientGUIMixins.ListeningMediaList.__init__( self, file_service_identifier, predicates, media_results ) + + self._page_key = page_key + + self._menu_open = False + + self._just_started = True + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + if True: # if borderless fullscreen + + self.ShowFullScreen( True, wx.FULLSCREEN_ALL ) + + self.SetCursor( wx.StockCursor( wx.CURSOR_BLANK ) ) + + else: + + self.Maximize() + + self.Show( True ) + + + wx.GetApp().SetTopWindow( self ) + + self._timer_media_info_display = wx.Timer( self, id = ID_TIMER_MEDIA_INFO_DISPLAY ) + + self.Bind( wx.EVT_TIMER, self.EventTimerMediaInfoDisplay, id = ID_TIMER_MEDIA_INFO_DISPLAY ) + + self.Bind( wx.EVT_MOTION, self.EventDrag ) + self.Bind( wx.EVT_LEFT_DOWN, self.EventDragBegin ) + self.Bind( wx.EVT_LEFT_UP, self.EventDragEnd ) + + HC.pubsub.pub( 'set_focus', self._page_key, None ) + + + def _GetCollectionsString( self ): + + collections_string = '' + + ( creators, series, titles, volumes, chapters, pages ) = self._current_media.GetTags().GetCSTVCP() + + if len( creators ) > 0: + + collections_string_append = ', '.join( creators ) + + if len( collections_string ) > 0: collections_string += ' - ' + collections_string_append + else: collections_string = collections_string_append + + + if len( series ) > 0: + + collections_string_append = ', '.join( series ) + + if len( collections_string ) > 0: collections_string += ' - ' + collections_string_append + else: collections_string = collections_string_append + + + if len( titles ) > 0: + + collections_string_append = ', '.join( titles ) + + if len( collections_string ) > 0: collections_string += ' - ' + collections_string_append + else: collections_string = collections_string_append + + + if len( volumes ) > 0: + + if len( volumes ) == 1: + + ( volume, ) = volumes + + collections_string_append = 'volume ' + str( volume ) + + else: collections_string_append = 'volumes ' + str( min( volumes ) ) + '-' + str( max( volumes ) ) + + if len( collections_string ) > 0: collections_string += ' - ' + collections_string_append + else: collections_string = collections_string_append + + + if len( chapters ) > 0: + + if len( chapters ) == 1: + + ( chapter, ) = chapters + + collections_string_append = 'chapter ' + str( chapter ) + + else: collections_string_append = 'chapters ' + str( min( chapters ) ) + '-' + str( max( chapters ) ) + + if len( collections_string ) > 0: collections_string += ' - ' + collections_string_append + else: collections_string = collections_string_append + + + if len( pages ) > 0: + + if len( pages ) == 1: + + ( page, ) = pages + + collections_string_append = 'page ' + str( page ) + + else: collections_string_append = 'pages ' + str( min( pages ) ) + '-' + str( max( pages ) ) + + if len( collections_string ) > 0: collections_string += ' - ' + collections_string_append + else: collections_string = collections_string_append + + + return collections_string + + + def _GetInfoString( self ): + + info_string = self._current_media.GetPrettyInfo() + ' ' + HC.ConvertZoomToPercentage( self._current_zoom ) + + return info_string + + + def _GetIndexString( self ): + + index_string = HC.ConvertIntToPrettyString( self._sorted_media_to_indices[ self._current_media ] + 1 ) + os.path.sep + HC.ConvertIntToPrettyString( len( self._sorted_media ) ) + + return index_string + + + def _PrefetchImages( self ): + + to_render = [] + + previous = self._current_media + next = self._current_media + + if self._just_started: + + extra_delay_base = 800 + + self._just_started = False + + else: extra_delay_base = 200 + + + for i in range( 10 ): + + previous = self._GetPrevious( previous ) + next = self._GetNext( next ) + + to_render.append( ( previous, 100 + ( extra_delay_base * 2 * i * i ) ) ) + to_render.append( ( next, 100 + ( extra_delay_base * i * i ) ) ) + + + ( my_width, my_height ) = self.GetClientSize() + + for ( media, delay ) in to_render: + + hash = media.GetHash() + + if media.GetMime() in ( HC.IMAGE_JPEG, HC.IMAGE_PNG ): + + ( media_width, media_height ) = media.GetResolution() + + if media_width > my_width or media_height > my_height: + + width_zoom = my_width / float( media_width ) + + height_zoom = my_height / float( media_height ) + + zoom = min( ( width_zoom, height_zoom ) ) + + else: zoom = 1.0 + + resolution_to_request = ( int( round( zoom * media_width ) ), int( round( zoom * media_height ) ) ) + + if not self._image_cache.HasImage( hash, resolution_to_request ): wx.CallLater( delay, self._image_cache.GetImage, hash, resolution_to_request ) + + + + + def _ShowFirst( self ): self.SetMedia( self._GetFirst() ) + + def _ShowLast( self ): self.SetMedia( self._GetLast() ) + + def _ShowNext( self ): self.SetMedia( self._GetNext( self._current_media ) ) + + def _ShowPrevious( self ): self.SetMedia( self._GetPrevious( self._current_media ) ) + + def _StartSlideshow( self, interval ): pass + + def _ZoomIn( self ): + + if self._current_media is not None: + + for zoom in ZOOMINS: + + if self._current_zoom < zoom: + + if self._current_media.GetMime() in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ): + + # because of the event passing under mouse, we want to preserve whitespace around flash + + ( original_width, original_height ) = self._current_display_media.GetResolution() + + ( my_width, my_height ) = self.GetClientSize() + + new_media_width = int( round( original_width * zoom ) ) + new_media_height = int( round( original_height * zoom ) ) + + if new_media_width >= my_width or new_media_height >= my_height: return + + + with wx.FrozenWindow( self ): + + ( drag_x, drag_y ) = self._total_drag_delta + + zoom_ratio = zoom / self._current_zoom + + self._total_drag_delta = ( int( drag_x * zoom_ratio ), int( drag_y * zoom_ratio ) ) + + self._current_zoom = zoom + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + break + + + + + + def _ZoomOut( self ): + + if self._current_media is not None: + + for zoom in ZOOMOUTS: + + if self._current_zoom > zoom: + + with wx.FrozenWindow( self ): + + ( drag_x, drag_y ) = self._total_drag_delta + + zoom_ratio = zoom / self._current_zoom + + self._total_drag_delta = ( int( drag_x * zoom_ratio ), int( drag_y * zoom_ratio ) ) + + self._current_zoom = zoom + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + break + + + + + + def _ZoomSwitch( self ): + + ( my_width, my_height ) = self.GetClientSize() + + ( media_width, media_height ) = self._current_display_media.GetResolution() + + if self._current_media.GetMime() not in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ) or self._current_zoom > 1.0 or ( media_width < my_width and media_height < my_height ): + + new_zoom = self._current_zoom + + if self._current_zoom == 1.0: + + if media_width > my_width or media_height > my_height: + + width_zoom = my_width / float( media_width ) + + height_zoom = my_height / float( media_height ) + + new_zoom = min( ( width_zoom, height_zoom ) ) + + + else: new_zoom = 1.0 + + if new_zoom != self._current_zoom: + + ( drag_x, drag_y ) = self._total_drag_delta + + zoom_ratio = new_zoom / self._current_zoom + + self._total_drag_delta = ( int( drag_x * zoom_ratio ), int( drag_y * zoom_ratio ) ) + + self._current_zoom = new_zoom + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + + + + def Archive( self, hashes ): + + next_media = self._GetNext( self._current_media ) + + if next_media == self._current_media: next_media = None + + ClientGUIMixins.ListeningMediaList.Archive( self, hashes ) + + if self.HasNoMedia(): self.EventClose( None ) + elif self.HasMedia( self._current_media ): self._DrawCurrentMedia() + else: self.SetMedia( next_media ) + + + def EventClose( self, event ): + + HC.pubsub.pub( 'set_focus', self._page_key, self._current_media ) + + self.Destroy() + + + def EventDrag( self, event ): + + self._focus_holder.SetFocus() + + if event.Dragging() and self._last_drag_coordinates is not None: + + ( old_x, old_y ) = self._last_drag_coordinates + + ( x, y ) = event.GetPosition() + + ( delta_x, delta_y ) = ( x - old_x, y - old_y ) + + try: self.WarpPointer( old_x, old_y ) + except: self._last_drag_coordinates = ( x, y ) + + ( old_delta_x, old_delta_y ) = self._total_drag_delta + + self._total_drag_delta = ( old_delta_x + delta_x, old_delta_y + delta_y ) + + self._DrawCurrentMedia() + + + self.SetCursor( wx.StockCursor( wx.CURSOR_ARROW ) ) + + self._timer_media_info_display.Start( 800, wx.TIMER_ONE_SHOT ) + + + def EventDragBegin( self, event ): + + ( x, y ) = event.GetPosition() + + ( client_x, client_y ) = self.GetClientSize() + + if x < 20 or x > client_x - 20 or y < 20 or y > client_y -20: + + try: + + better_x = x + better_y = y + + if x < 20: better_x = 20 + if y < 20: better_y = 20 + + if x > client_x - 20: better_x = client_x - 20 + if y > client_y - 20: better_y = client_y - 20 + + self.WarpPointer( better_x, better_y ) + + x = better_x + y = better_y + + except: pass + + + self._last_drag_coordinates = ( x, y ) + + event.Skip() + + + def EventDragEnd( self, event ): + + self._last_drag_coordinates = None + + event.Skip() + + + def EventTimerMediaInfoDisplay( self, event ): + + if self._menu_open: self._timer_media_info_display.Start( 800, wx.TIMER_ONE_SHOT ) + else: self.SetCursor( wx.StockCursor( wx.CURSOR_BLANK ) ) + + + def KeepCursorAlive( self ): self._timer_media_info_display.Start( 800, wx.TIMER_ONE_SHOT ) + + def ProcessContentUpdates( self, updates ): + + next_media = self._GetNext( self._current_media ) + + if next_media == self._current_media: next_media = None + + ClientGUIMixins.ListeningMediaList.ProcessContentUpdates( self, updates ) + + if self.HasNoMedia(): self.EventClose( None ) + elif self.HasMedia( self._current_media ): + + self._DrawBackgroundBitmap() + + self._DrawCurrentMedia() + + else: self.SetMedia( next_media ) + + +class CanvasFullscreenMediaListBrowser( CanvasFullscreenMediaList ): + + def __init__( self, my_parent, page_key, file_service_identifier, predicates, media_results, first_hash ): + + CanvasFullscreenMediaList.__init__( self, my_parent, page_key, file_service_identifier, predicates, media_results ) + + self._timer_slideshow = wx.Timer( self, id = ID_TIMER_SLIDESHOW ) + + self.Bind( wx.EVT_TIMER, self.EventTimerSlideshow, id = ID_TIMER_SLIDESHOW ) + + self.Bind( wx.EVT_LEFT_DCLICK, self.EventClose ) + self.Bind( wx.EVT_MIDDLE_DOWN, self.EventClose ) + self.Bind( wx.EVT_MOUSEWHEEL, self.EventMouseWheel ) + self.Bind( wx.EVT_RIGHT_UP, self.EventShowMenu ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + if first_hash is None: self.SetMedia( self._GetFirst() ) + else: self.SetMedia( self._GetMedia( { first_hash } )[0] ) + + + def _Archive( self ): wx.GetApp().Write( 'content_updates', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] ) + + def _CopyLocalUrlToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( 'http://127.0.0.1:45865/file?hash=' + self._current_media.GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _CopyPathToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._current_media.GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _Delete( self ): + + with ClientGUIDialogs.DialogYesNo( self, 'Delete this file from the database?' ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] ) + + + self.SetFocus() # annoying bug because of the modal dialog + + + def _PausePlaySlideshow( self ): + + if self._timer_slideshow.IsRunning(): self._timer_slideshow.Stop() + elif self._timer_slideshow.GetInterval() > 0: self._timer_slideshow.Start() + + + def _StartSlideshow( self, interval = None ): + + self._timer_slideshow.Stop() + + if interval is None: + + with wx.TextEntryDialog( self, 'Enter the interval, in seconds', defaultValue='15' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: interval = int( float( dlg.GetValue() ) * 1000 ) + except: return + + + + + if interval > 0: self._timer_slideshow.Start( interval, wx.TIMER_CONTINUOUS ) + + + def EventKeyDown( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + ( modifier, key ) = HC.GetShortcutFromEvent( event ) + + key_dict = self._options[ 'shortcuts' ][ modifier ] + + if key in key_dict: + + action = key_dict[ key ] + + self.ProcessEvent( wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) ) + + else: + + if event.KeyCode in ( wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE ): self._Delete() + elif event.KeyCode in ( wx.WXK_SPACE, wx.WXK_NUMPAD_SPACE ): self._PausePlaySlideshow() + elif event.KeyCode in ( ord( '+' ), wx.WXK_ADD, wx.WXK_NUMPAD_ADD ): self._ZoomIn() + elif event.KeyCode in ( ord( '-' ), wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT ): self._ZoomOut() + elif event.KeyCode == ord( 'Z' ): self._ZoomSwitch() + elif event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER, wx.WXK_ESCAPE ): self.EventClose( event ) + else: event.Skip() + + + + + def EventMenu( self, event ): + + # is None bit means this is prob from a keydown->menu event + if event.GetEventObject() is None and self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'archive': self._Archive() + elif command == 'copy_local_url': self._CopyLocalUrlToClipboard() + elif command == 'copy_path': self._CopyPathToClipboard() + elif command == 'delete': self._Delete() + elif command == 'first': self._ShowFirst() + elif command == 'last': self._ShowLast() + elif command == 'previous': self._ShowPrevious() + elif command == 'next': self._ShowNext() + elif command == 'frame_back': self._ChangeFrame( -1 ) + elif command == 'frame_next': self._ChangeFrame( 1 ) + elif command == 'manage_ratings': self._ManageRatings() + elif command == 'manage_tags': self._ManageTags() + elif command == 'slideshow': self._StartSlideshow( data ) + elif command == 'slideshow_pause_play': self._PausePlaySlideshow() + elif command == 'zoom_in': self._ZoomIn() + elif command == 'zoom_out': self._ZoomOut() + elif command == 'zoom_switch': self._ZoomSwitch() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + + + + + def EventMouseWheel( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if event.CmdDown(): + + if event.GetWheelRotation() > 0: self._ZoomIn() + else: self._ZoomOut() + + else: + + if event.GetWheelRotation() > 0: self._ShowPrevious() + else: self._ShowNext() + + + + + def EventShowMenu( self, event ): + + self._last_drag_coordinates = None # to stop successive right-click drag warp bug + + menu = wx.Menu() + + menu.Append( CC.ID_NULL, self._current_media.GetPrettyInfo() ) + menu.Append( CC.ID_NULL, self._current_media.GetPrettyAge() ) + + menu.AppendSeparator() + + menu.Append( CC.ID_NULL, 'current zoom: ' + HC.ConvertZoomToPercentage( self._current_zoom ) ) + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_in' ), 'zoom in' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_out' ), 'zoom out' ) + + # + + if self._current_media.GetMime() not in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ): + + ( my_width, my_height ) = self.GetClientSize() + + ( media_width, media_height ) = self._current_display_media.GetResolution() + + if self._current_zoom == 1.0: + + if media_width > my_width or media_height > my_height: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_switch' ), 'zoom fit' ) + + else: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_switch' ), 'zoom full' ) + + + # + + menu.AppendSeparator() + + if self._current_media.HasInbox(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'archive' ), '&archive' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), '&delete' ) + + menu.AppendSeparator() + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_path' ) , 'copy path' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_local_url' ) , 'copy local url' ) + + menu.AppendSeparator() + + slideshow = wx.Menu() + + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow', 1000 ), '1 second' ) + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow', 5000 ), '5 seconds' ) + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow', 10000 ), '10 seconds' ) + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow', 30000 ), '30 seconds' ) + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow', 60000 ), '60 seconds' ) + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow', 80 ), 'william gibson' ) + slideshow.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow' ), 'custom interval' ) + + menu.AppendMenu( CC.ID_NULL, 'Start Slideshow', slideshow ) + if self._timer_slideshow.IsRunning(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'slideshow_pause_play' ), 'stop slideshow' ) + + self._menu_open = True + + self.PopupMenu( menu ) + + self._menu_open = False + + menu.Destroy() + + event.Skip() + + + def EventTimerSlideshow( self, event ): self._ShowNext() + +class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaList ): + + def __init__( self, my_parent, page_key, file_service_identifier, predicates, media_results, actions ): + + CanvasFullscreenMediaList.__init__( self, my_parent, page_key, file_service_identifier, predicates, media_results ) + + self._actions = actions + + self.Bind( wx.EVT_LEFT_DCLICK, self.EventClose ) + self.Bind( wx.EVT_MIDDLE_DOWN, self.EventClose ) + self.Bind( wx.EVT_MOUSEWHEEL, self.EventMouseWheel ) + self.Bind( wx.EVT_RIGHT_UP, self.EventShowMenu ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self.SetMedia( self._GetFirst() ) + + + def _Archive( self ): wx.GetApp().Write( 'content_updates', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] ) + + def _CopyLocalUrlToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( 'http://127.0.0.1:45865/file?hash=' + self._current_media.GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _CopyPathToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._current_media.GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _Delete( self ): + + with ClientGUIDialogs.DialogYesNo( self, 'Delete this file from the database?' ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'content_updates', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, ( self._current_media.GetHash(), ) ) ] ) + + + self.SetFocus() # annoying bug because of the modal dialog + + + def EventKeyDown( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + ( modifier, key ) = HC.GetShortcutFromEvent( event ) + + key_dict = self._actions[ modifier ] + + if key in key_dict: + + ( service_identifier, action ) = key_dict[ key ] + + if service_identifier is None: + + if action == 'archive': self._Archive() + elif action == 'delete': self._Delete() + elif action == 'frame_back': self._ChangeFrame( -1 ) + elif action == 'frame_next': self._ChangeFrame( 1 ) + elif action == 'manage_ratings': self._ManageRatings() + elif action == 'manage_tags': self._ManageTags() + elif action == 'first': self._ShowFirst() + elif action == 'last': self._ShowLast() + elif action == 'previous': self._ShowPrevious() + elif action == 'next': self._ShowNext() + + else: + + service_type = service_identifier.GetType() + + if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + tags = self._current_media.GetTags() + + ( current, deleted, pending, petitioned ) = tags.GetCDPP( service_identifier ) + + if service_type == HC.LOCAL_TAG: + + if action in current: edit_log = [ ( CC.CONTENT_UPDATE_DELETE, action ) ] + else: edit_log = [ ( CC.CONTENT_UPDATE_ADD, action ) ] + + else: + + if action in current: + + if action in petitioned: edit_log = [ ( CC.CONTENT_UPDATE_RESCIND_PETITION, action ) ] + else: + + message = 'Enter a reason for this tag to be removed. A janitor will review your petition.' + + with wx.TextEntryDialog( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: edit_log = [ ( CC.CONTENT_UPDATE_PETITION, ( action, dlg.GetValue() ) ) ] + else: return + + + + else: + + if action in pending: edit_log = [ ( CC.CONTENT_UPDATE_RESCIND_PENDING, action ) ] + else: edit_log = [ ( CC.CONTENT_UPDATE_PENDING, action ) ] + + + + content_update = CC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( self._current_media.GetHash(), ), info = edit_log ) + + elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + content_update = CC.ContentUpdate( CC.CONTENT_UPDATE_RATING, service_identifier, ( self._current_media.GetHash(), ), info = action ) + + + wx.GetApp().Write( 'content_updates', ( content_update, ) ) + + + else: + + if event.KeyCode in ( ord( '+' ), wx.WXK_ADD, wx.WXK_NUMPAD_ADD ): self._ZoomIn() + elif event.KeyCode in ( ord( '-' ), wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT ): self._ZoomOut() + elif event.KeyCode == ord( 'Z' ): self._ZoomSwitch() + elif event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER, wx.WXK_ESCAPE ): self.EventClose( event ) + else: event.Skip() + + + + + def EventMenu( self, event ): + + # is None bit means this is prob from a keydown->menu event + if event.GetEventObject() is None and self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'archive': self._Archive() + elif command == 'copy_local_url': self._CopyLocalUrlToClipboard() + elif command == 'copy_path': self._CopyPathToClipboard() + elif command == 'delete': self._Delete() + elif command == 'first': self._ShowFirst() + elif command == 'last': self._ShowLast() + elif command == 'previous': self._ShowPrevious() + elif command == 'next': self._ShowNext() + elif command == 'frame_back': self._ChangeFrame( -1 ) + elif command == 'frame_next': self._ChangeFrame( 1 ) + elif command == 'manage_ratings': self._ManageRatings() + elif command == 'manage_tags': self._ManageTags() + elif command == 'slideshow': self._StartSlideshow( data ) + elif command == 'slideshow_pause_play': self._PausePlaySlideshow() + elif command == 'zoom_in': self._ZoomIn() + elif command == 'zoom_out': self._ZoomOut() + elif command == 'zoom_switch': self._ZoomSwitch() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + + + + + def EventMouseWheel( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if event.CmdDown(): + + if event.GetWheelRotation() > 0: self._ZoomIn() + else: self._ZoomOut() + + else: + + if event.GetWheelRotation() > 0: self._ShowPrevious() + else: self._ShowNext() + + + + + def EventShowMenu( self, event ): + + self._last_drag_coordinates = None # to stop successive right-click drag warp bug + + menu = wx.Menu() + + menu.Append( CC.ID_NULL, self._current_media.GetPrettyInfo() ) + menu.Append( CC.ID_NULL, self._current_media.GetPrettyAge() ) + + menu.AppendSeparator() + + menu.Append( CC.ID_NULL, 'current zoom: ' + HC.ConvertZoomToPercentage( self._current_zoom ) ) + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_in' ), 'zoom in' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_out' ), 'zoom out' ) + + # + + if self._current_media.GetMime() not in ( HC.APPLICATION_FLASH, HC.VIDEO_FLV ): + + ( my_width, my_height ) = self.GetClientSize() + + ( media_width, media_height ) = self._current_display_media.GetResolution() + + if self._current_zoom == 1.0: + + if media_width > my_width or media_height > my_height: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_switch' ), 'zoom fit' ) + + else: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'zoom_switch' ), 'zoom full' ) + + + # + + menu.AppendSeparator() + + if self._current_media.HasInbox(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'archive' ), '&archive' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), '&delete' ) + + menu.AppendSeparator() + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_path' ) , 'copy path' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_local_url' ) , 'copy local url' ) + + menu.AppendSeparator() + + self._menu_open = True + + self.PopupMenu( menu ) + + self._menu_open = False + + menu.Destroy() + + event.Skip() + + +class CanvasFullscreenMediaListFilter( CanvasFullscreenMediaList ): + + def __init__( self, my_parent, page_key, file_service_identifier, predicates, media_results ): + + CanvasFullscreenMediaList.__init__( self, my_parent, page_key, file_service_identifier, predicates, media_results ) + + self._kept = set() + self._deleted = set() + + self.Bind( wx.EVT_LEFT_DOWN, self.EventMouseKeep ) + self.Bind( wx.EVT_LEFT_DCLICK, self.EventMouseKeep ) + self.Bind( wx.EVT_MIDDLE_DOWN, self.EventBack ) + self.Bind( wx.EVT_MIDDLE_DCLICK, self.EventBack ) + self.Bind( wx.EVT_MOUSEWHEEL, self.EventMouseWheel ) + self.Bind( wx.EVT_RIGHT_DOWN, self.EventDelete ) + self.Bind( wx.EVT_RIGHT_DCLICK, self.EventDelete ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self.SetMedia( self._GetFirst() ) + + + def _Keep( self ): + + self._kept.add( self._current_media ) + + if self._current_media == self._GetLast(): self.EventClose( None ) + else: self._ShowNext() + + + def EventBack( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if self._current_media == self._GetFirst(): return + else: + + self._ShowPrevious() + + self._kept.discard( self._current_media ) + self._deleted.discard( self._current_media ) + + + + + def EventKeyDown( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + ( modifier, key ) = HC.GetShortcutFromEvent( event ) + + key_dict = self._options[ 'shortcuts' ][ modifier ] + + if key in key_dict: + + action = key_dict[ key ] + + self.ProcessEvent( wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) ) + + else: + + if event.KeyCode == wx.WXK_SPACE: self._Keep() + elif event.KeyCode in ( ord( '+' ), wx.WXK_ADD, wx.WXK_NUMPAD_ADD ): self._ZoomIn() + elif event.KeyCode in ( ord( '-' ), wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT ): self._ZoomOut() + elif event.KeyCode == ord( 'Z' ): self._ZoomSwitch() + elif event.KeyCode == wx.WXK_BACK: self.EventBack( event ) + elif event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER, wx.WXK_ESCAPE ): self.EventClose( event ) + elif event.KeyCode in ( wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE ): self.EventDelete( event ) + elif event.KeyCode in ( wx.WXK_UP, wx.WXK_NUMPAD_UP ): self.EventSkip( event ) + else: event.Skip() + + + + + def EventClose( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if len( self._kept ) > 0 or len( self._deleted ) > 0: + + with ClientGUIDialogs.DialogFinishFiltering( self, len( self._kept ), len( self._deleted ) ) as dlg: + + modal = dlg.ShowModal() + + if modal == wx.ID_CANCEL: + + if self._current_media in self._kept: self._kept.remove( self._current_media ) + if self._current_media in self._deleted: self._deleted.remove( self._current_media ) + + else: + + if modal == wx.ID_YES: + + try: + + self._deleted_hashes = [ media.GetHash() for media in self._deleted ] + self._kept_hashes = [ media.GetHash() for media in self._kept ] + + content_updates = [] + + content_updates.append( CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, self._deleted_hashes ) ) + content_updates.append( CC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, self._kept_hashes ) ) + + wx.GetApp().Write( 'content_updates', content_updates ) + + self._kept = set() + self._deleted = set() + + except: wx.MessageBox( traceback.format_exc() ) + + + CanvasFullscreenMediaList.EventClose( self, event ) + + + + else: CanvasFullscreenMediaList.EventClose( self, event ) + + + + def EventDelete( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + self._deleted.add( self._current_media ) + + if self._current_media == self._GetLast(): self.EventClose( event ) + else: self._ShowNext() + + + + def EventMouseKeep( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if event.ShiftDown(): self.EventDragBegin( event ) + else: self._Keep() + + + + def EventMenu( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'archive': self._Keep() + elif command == 'back': self.EventBack( event ) + elif command == 'close': self.EventClose( event ) + elif command == 'delete': self.EventDelete( event ) + elif command == 'filter': self.EventClose( event ) + elif command == 'frame_back': self._ChangeFrame( -1 ) + elif command == 'frame_next': self._ChangeFrame( 1 ) + elif command == 'manage_ratings': self._ManageRatings() + elif command == 'manage_tags': self._ManageTags() + elif command == 'zoom_in': self._ZoomIn() + elif command == 'zoom_out': self._ZoomOut() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + + + + + def EventMouseWheel( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if event.CmdDown(): + + if event.GetWheelRotation() > 0: self._ZoomIn() + else: self._ZoomOut() + + + + + def EventSkip( self, event ): + + if self._ShouldSkipInputDueToFlash(): event.Skip() + else: + + if self._current_media == self._GetLast(): self.EventClose( event ) + else: self._ShowNext() + + + +class RatingsFilterFrame( ClientGUICommon.Frame ): + + RATINGS_FILTER_INEQUALITY_FULL = 0 + RATINGS_FILTER_INEQUALITY_HALF = 1 + RATINGS_FILTER_INEQUALITY_QUARTER = 2 + + RATINGS_FILTER_EQUALITY_FULL = 0 + RATINGS_FILTER_EQUALITY_HALF = 1 + RATINGS_FILTER_EQUALITY_QUARTER = 2 + + def __init__( self, parent, page_key, service_identifier, media_results ): + + ClientGUICommon.Frame.__init__( self, parent, title = 'hydrus client ratings frame' ) + + self._page_key = page_key + self._service_identifier = service_identifier + self._media_still_to_rate = { ClientGUIMixins.MediaSingleton( media_result ) for media_result in media_results } + + self._file_query_result = CC.FileQueryResult( CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results ) + + if service_identifier.GetType() == HC.LOCAL_RATING_LIKE: self._score_gap = 1.0 + else: + + self._service = wx.GetApp().Read( 'service', service_identifier ) + + ( self._lower, self._upper ) = self._service.GetExtraInfo() + + self._score_gap = 1.0 / ( self._upper - self._lower ) + + + hashes_to_min_max = wx.GetApp().Read( 'ratings_filter', service_identifier, [ media_result.GetHash() for media_result in media_results ] ) + + self._media_to_initial_scores_dict = { media : hashes_to_min_max[ media.GetHash() ] for media in self._media_still_to_rate } + + self._decision_log = [] + + self._ReinitialiseCurrentScores() + + self._inequal_accuracy = self.RATINGS_FILTER_INEQUALITY_FULL + self._equal_accuracy = self.RATINGS_FILTER_EQUALITY_FULL + + # panel + + if service_identifier.GetType() == HC.LOCAL_RATING_NUMERICAL: + + top_panel = wx.Panel( self ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + if 'ratings_filter_accuracy' not in self._options: + + self._options[ 'ratings_filter_accuracy' ] = 1 + + wx.GetApp().Write( 'save_options' ) + + + value = self._options[ 'ratings_filter_accuracy' ] + + self._accuracy_slider = wx.Slider( top_panel, value = value, minValue = 0, maxValue = 4 ) + self._accuracy_slider.Bind( wx.EVT_SLIDER, self.EventSlider ) + + self.EventSlider( None ) + + hbox.AddF( wx.StaticText( top_panel, label = 'quick' ), FLAGS_MIXED ) + hbox.AddF( self._accuracy_slider, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( wx.StaticText( top_panel, label = 'accurate' ), FLAGS_MIXED ) + + top_panel.SetSizer( hbox ) + + + # end panel + + self._statusbar = self.CreateStatusBar() + self._statusbar.SetFieldsCount( 3 ) + self._statusbar.SetStatusWidths( [ -1, 500, -1 ] ) + + self._splitter = wx.SplitterWindow( self ) + self._splitter.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + if service_identifier.GetType() == HC.LOCAL_RATING_NUMERICAL: vbox.AddF( top_panel, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._splitter, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + self._splitter.SetMinimumPaneSize( 120 ) + self._splitter.SetSashGravity( 0.0 ) + + if True: # if borderless fullscreen + + self.ShowFullScreen( True, wx.FULLSCREEN_ALL ^ wx.FULLSCREEN_NOSTATUSBAR ) + + else: + + self.Maximize() + + self.Show( True ) + + + wx.GetApp().SetTopWindow( self ) + + self._left_window = CanvasRatingsFilterPanel( self._splitter ) + self._right_window = CanvasRatingsFilterPanel( self._splitter ) + + ( my_width, my_height ) = self.GetClientSize() + + self._splitter.SplitVertically( self._left_window, self._right_window, my_width / 2 ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + self.Bind( wx.EVT_LEFT_DOWN, self.EventMouseDown ) + self.Bind( wx.EVT_RIGHT_DOWN, self.EventMouseDown ) + + self._ShowNewMedia() + + HC.pubsub.sub( self, 'ProcessContentUpdates', 'content_updates_gui' ) + HC.pubsub.sub( self, 'ProcessServiceUpdate', 'service_update_gui' ) + + + def _GoBack( self ): + + if len( self._decision_log ) > 0: + + ( action, entry ) = self._decision_log[-1] + + if action == 'external': ( min, max, self._current_media_to_rate, self._current_media_to_rate_against, self._unrated_is_on_the_left ) = entry + elif action == 'internal': ( min, max, self._current_media_to_rate, other_min, other_max, self._current_media_to_rate_against, self._unrated_is_on_the_left ) = entry + + if self._unrated_is_on_the_left: + + self._left_window.SetMedia( self._current_media_to_rate ) + self._right_window.SetMedia( self._current_media_to_rate_against ) + + else: + + self._left_window.SetMedia( self._current_media_to_rate_against ) + self._right_window.SetMedia( self._current_media_to_rate ) + + + self._decision_log = self._decision_log[:-1] + + self._ReinitialiseCurrentScores() + + + self._RefreshStatusBar() + + + def _RefreshStatusBar( self ): + + certain_ratings = [ ( media, ( min, max ) ) for ( media, ( min, max ) ) in self._media_to_current_scores_dict.items() if max - min < self._score_gap ] + uncertain_ratings = [ ( media, ( min, max ) ) for ( media, ( min, max ) ) in self._media_to_current_scores_dict.items() if max - min >= self._score_gap and self._media_to_current_scores_dict[ media ] != self._media_to_initial_scores_dict[ media ] ] + + service_type = self._service_identifier.GetType() + + if service_type == HC.LOCAL_RATING_LIKE: + + current_string = 'uncertain' + + if self._current_media_to_rate_against in self._media_still_to_rate: against_string = 'uncertain' + else: + + against_string = 'already rated' + + if self._current_media_to_rate_against in self._media_to_current_scores_dict: + + ( other_min, other_max ) = self._media_to_current_scores_dict[ self._current_media_to_rate_against ] + + rating = other_min + + else: + + ( local_ratings, remote_ratings ) = self._current_media_to_rate_against.GetRatings() + + rating = local_ratings.GetRating( self._service_identifier ) + + + if other_min == 0.0: against_string += ' - dislike' + else: against_string += ' - like' + + + center_string = str( len( self._media_to_initial_scores_dict ) ) + ' files being rated. after ' + str( len( self._decision_log ) ) + ' decisions, ' + str( len( certain_ratings ) ) + ' are certain' + + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + ( min, max ) = self._media_to_current_scores_dict[ self._current_media_to_rate ] + + current_string = 'between ' + HC.ConvertNumericalRatingToPrettyString( self._lower, self._upper, min, out_of = False ) + ' and ' + HC.ConvertNumericalRatingToPrettyString( self._lower, self._upper, max, out_of = False ) + + if self._current_media_to_rate_against in self._media_still_to_rate: + + ( other_min, other_max ) = self._media_to_current_scores_dict[ self._current_media_to_rate_against ] + + against_string = 'between ' + HC.ConvertNumericalRatingToPrettyString( self._lower, self._upper, other_min, out_of = False ) + ' and ' + HC.ConvertNumericalRatingToPrettyString( self._lower, self._upper, other_max, out_of = False ) + + else: + + against_string = 'already rated' + + if self._current_media_to_rate_against in self._media_to_current_scores_dict: + + ( other_min, other_max ) = self._media_to_current_scores_dict[ self._current_media_to_rate_against ] + + rating = ( other_min + other_max ) / 2.0 + + else: + + ( local_ratings, remote_ratings ) = self._current_media_to_rate_against.GetRatings() + + rating = local_ratings.GetRating( self._service_identifier ) + + + against_string += ' - ' + HC.ConvertNumericalRatingToPrettyString( self._lower, self._upper, rating ) + + + center_string = str( len( self._media_to_initial_scores_dict ) ) + ' files being rated. after ' + str( len( self._decision_log ) ) + ' decisions, ' + str( len( certain_ratings ) ) + ' are certain and ' + str( len( uncertain_ratings ) ) + ' are uncertain' + + + if self._unrated_is_on_the_left: + + left_string = current_string + right_string = against_string + + else: + + left_string = against_string + right_string = current_string + + + self._statusbar.SetStatusText( left_string, number = 0 ) + self._statusbar.SetStatusText( center_string, number = 1 ) + self._statusbar.SetStatusText( right_string, number = 2 ) + + + def _ReinitialiseCurrentScores( self ): + + self._media_to_current_scores_dict = dict( self._media_to_initial_scores_dict ) + + self._already_rated_pairs = collections.defaultdict( set ) + + for ( action, entry ) in self._decision_log: + + if action == 'external': ( min, max, media_rated, media_rated_against, unrated_was_on_the_left ) = entry + elif action == 'internal': + + ( min, max, media_rated, other_min, other_max, media_rated_against, unrated_was_on_the_left ) = entry + + self._media_to_current_scores_dict[ media_rated_against ] = ( other_min, other_max ) + + + self._media_to_current_scores_dict[ media_rated ] = ( min, max ) + + self._already_rated_pairs[ media_rated ].add( media_rated_against ) + self._already_rated_pairs[ media_rated_against ].add( media_rated ) + + + self._media_still_to_rate = { media for ( media, ( min, max ) ) in self._media_to_current_scores_dict.items() if max - min >= self._score_gap } + + + def _ShowNewMedia( self ): + + ( self._current_media_to_rate, ) = random.sample( self._media_still_to_rate, 1 ) + + ( min, max ) = self._media_to_current_scores_dict[ self._current_media_to_rate ] + + media_result_to_rate_against = wx.GetApp().Read( 'ratings_media_result', self._service_identifier, min, max ) + + if media_result_to_rate_against is not None: + + hash = media_result_to_rate_against.GetHash() + + if hash in self._file_query_result.GetHashes(): media_result_to_rate_against = self._file_query_result.GetMediaResult( hash ) + else: self._file_query_result.AddMediaResult( media_result_to_rate_against ) + + media_to_rate_against = ClientGUIMixins.MediaSingleton( media_result_to_rate_against ) + + else: media_to_rate_against = None + + if media_to_rate_against in self._already_rated_pairs[ self._current_media_to_rate ]: media_to_rate_against = None + + if media_to_rate_against is None: + + internal_media = list( self._media_to_current_scores_dict.keys() ) + + random.shuffle( internal_media ) + + valid_internal_media = [ media for media in internal_media if media != self._current_media_to_rate and media not in self._already_rated_pairs[ self._current_media_to_rate ] and self._current_media_to_rate not in self._already_rated_pairs[ media ] ] + + best_media_first = Queue.PriorityQueue() + + for media in valid_internal_media: + + ( other_min, other_max ) = self._media_to_current_scores_dict[ media ] + + if not ( other_max < min or other_min > max ): # i.e. there is overlap in the two pairs of min,max + + # it is best when we have + # + # ######### + # #### + # + # and better when the gaps are large (increasing the uncertainty) + + # when we must choose + # + # ##### + # ###### + # + # saying the second is better gives no change, so we want to minimise the gaps, to increase the likelyhood of a 50-50-ish situation (increasing the uncertainty) + # better we move by self._score_gap half the time than 0 most of the time. + + # the square root stuff prioritises middle-of-the-road results. two fives is more useful than ten and zero + # total gap value is in the range 0.0 - 1.0 + # we times by -1 to prioritise and simultaneously reverse the overlapping-on-both-ends results for the priority queue + + min_gap = abs( other_min - min ) + max_gap = abs( other_max - max ) + + total_gap_value = ( min_gap ** 0.5 + max_gap ** 0.5 ) ** 2 + + if ( other_min < min and other_max > max ) or ( other_min > min and other_max < max ): total_gap_value *= -1 + + best_media_first.put( ( total_gap_value, media ) ) + + + + if best_media_first.qsize() > 0: ( value, media_to_rate_against ) = best_media_first.get() + + + if media_to_rate_against is None: + + message = 'The client has run out of comparisons to show you, and still cannot deduce what ratings everything should have. Commit what decisions you have made, and then please either rate some more files manually, or ratings filter a larger group.' + + wx.MessageBox( message ) + + self.EventClose( None ) + + else: + + self._current_media_to_rate_against = media_to_rate_against + + if random.randint( 0, 1 ) == 0: + + self._unrated_is_on_the_left = True + + self._left_window.SetMedia( self._current_media_to_rate ) + self._right_window.SetMedia( self._current_media_to_rate_against ) + + else: + + self._unrated_is_on_the_left = False + + self._left_window.SetMedia( self._current_media_to_rate_against ) + self._right_window.SetMedia( self._current_media_to_rate ) + + + self._RefreshStatusBar() + + + + def _ProcessAction( self, action ): + + ( min, max ) = self._media_to_current_scores_dict[ self._current_media_to_rate ] + + if self._current_media_to_rate_against in self._media_to_current_scores_dict: + + ( other_min, other_max ) = self._media_to_current_scores_dict[ self._current_media_to_rate_against ] + + rate_other = self._current_media_to_rate_against in self._media_still_to_rate + + if action in ( 'left', 'right' ): + + if self._inequal_accuracy == self.RATINGS_FILTER_INEQUALITY_FULL: adjustment = self._score_gap + if self._inequal_accuracy == self.RATINGS_FILTER_INEQUALITY_HALF: adjustment = 0 + elif self._inequal_accuracy == self.RATINGS_FILTER_INEQUALITY_QUARTER: adjustment = -self._score_gap + + if ( self._unrated_is_on_the_left and action == 'left' ) or ( not self._unrated_is_on_the_left and action == 'right' ): + + # unrated is better + + if min <= other_min: + + if min < other_min + adjustment: min = other_min + adjustment + else: min = other_min + self._score_gap / 2 + + + if other_max >= max: + + if other_max > max - adjustment: other_max = max - adjustment + else: other_max = max - self._score_gap / 2 + + + if min >= max: min = max + if other_max <= other_min: other_max = other_min + + else: + + # unrated is worse + + if other_min <= min: + + if other_min < min + adjustment: other_min = min + adjustment + else: other_min = min + self._score_gap / 2 + + + if max >= other_max: + + if max > other_max - adjustment: max = other_max - adjustment + else: max = other_max - self._score_gap / 2 + + + if other_min >= other_max: other_min = other_max + if max <= min: max = min + + + elif action == 'equal': + + if self._equal_accuracy == self.RATINGS_FILTER_EQUALITY_FULL: + + if min < other_min: min = other_min + else: other_min = min + + if max > other_max: max = other_max + else: other_max = max + + elif self._equal_accuracy == self.RATINGS_FILTER_EQUALITY_HALF: + + if min < other_min: min = ( min + other_min ) / 2 + else: other_min = ( min + other_min ) / 2 + + if max > other_max: max = ( max + other_max ) / 2 + else: other_max = ( max + other_max ) / 2 + + elif self._equal_accuracy == self.RATINGS_FILTER_EQUALITY_QUARTER: + + if min < other_min: min = ( ( 3 * min ) + other_min ) / 4 + else: other_min = ( min + ( 3 * other_min ) ) / 4 + + if max > other_max: max = ( ( 3 * max ) + other_max ) / 4 + else: other_max = ( max + ( 3 * other_max ) ) / 4 + + + + if min < 0.0: min = 0.0 + if max > 1.0: max = 1.0 + + if other_min < 0.0: other_min = 0.0 + if other_max > 1.0: other_max = 1.0 + + if max - min < self._score_gap: self._media_still_to_rate.discard( self._current_media_to_rate ) + + if rate_other: + + if other_max - other_min < self._score_gap: self._media_still_to_rate.discard( self._current_media_to_rate_against ) + + self._media_to_current_scores_dict[ self._current_media_to_rate_against ] = ( other_min, other_max ) + + + decision = ( 'internal', ( min, max, self._current_media_to_rate, other_min, other_max, self._current_media_to_rate_against, self._unrated_is_on_the_left ) ) + + else: + + ( local_ratings, remote_ratings ) = self._current_media_to_rate_against.GetRatings() + + rating = local_ratings.GetRating( self._service_identifier ) + + if action in ( 'left', 'right' ): + + if self._inequal_accuracy == self.RATINGS_FILTER_INEQUALITY_FULL: adjustment = self._score_gap + if self._inequal_accuracy == self.RATINGS_FILTER_INEQUALITY_HALF: adjustment = 0 + elif self._inequal_accuracy == self.RATINGS_FILTER_INEQUALITY_QUARTER: adjustment = -self._score_gap + + if ( self._unrated_is_on_the_left and action == 'left' ) or ( not self._unrated_is_on_the_left and action == 'right' ): + + # unrated is better, so set new min + + if min <= rating: + + if min < rating + adjustment: min = rating + adjustment + else: min = rating + self._score_gap / 2 + + + if min > max: min = max + + else: + + # unrated is worse, so set new max + + if max >= rating: + + if max > rating - adjustment: max = rating - adjustment + else: max = rating - self._score_gap / 2 + + + if max < min: max = min + + + elif action == 'equal': + + if self._equal_accuracy == self.RATINGS_FILTER_EQUALITY_FULL: + + min = rating + max = rating + + elif self._equal_accuracy == self.RATINGS_FILTER_EQUALITY_HALF: + + min = ( min + rating ) / 2 + max = ( max + rating ) / 2 + + elif self._equal_accuracy == self.RATINGS_FILTER_EQUALITY_QUARTER: + + min = ( ( 3 * min ) + rating ) / 4 + max = ( ( 3 * max ) + rating ) / 4 + + + + if min < 0.0: min = 0.0 + if max > 1.0: max = 1.0 + + decision = ( 'external', ( min, max, self._current_media_to_rate, self._current_media_to_rate_against, self._unrated_is_on_the_left ) ) + + + self._decision_log.append( decision ) + + self._already_rated_pairs[ self._current_media_to_rate ].add( self._current_media_to_rate_against ) + self._already_rated_pairs[ self._current_media_to_rate_against ].add( self._current_media_to_rate ) + + if max - min < self._score_gap: self._media_still_to_rate.discard( self._current_media_to_rate ) + + self._media_to_current_scores_dict[ self._current_media_to_rate ] = ( min, max ) + + if len( self._media_still_to_rate ) == 0: self.EventClose( None ) + else: self._ShowNewMedia() + + + def EventClose( self, event ): + + if len( self._decision_log ) > 0: + + def normalise_rating( rating ): return round( rating / self._score_gap ) * self._score_gap + + certain_ratings = [ ( media.GetHash(), normalise_rating( ( min + max ) / 2 ) ) for ( media, ( min, max ) ) in self._media_to_current_scores_dict.items() if max - min < self._score_gap ] + uncertain_ratings = [ ( media.GetHash(), min, max ) for ( media, ( min, max ) ) in self._media_to_current_scores_dict.items() if max - min >= self._score_gap and self._media_to_current_scores_dict[ media ] != self._media_to_initial_scores_dict[ media ] ] + + with ClientGUIDialogs.DialogFinishRatingFiltering( self, len( certain_ratings ), len( uncertain_ratings ) ) as dlg: + + modal = dlg.ShowModal() + + if modal == wx.ID_CANCEL: + + self._ShowNewMedia() + + return + + elif modal == wx.ID_YES: + + try: + + content_updates = [] + + content_updates.extend( [ CC.ContentUpdate( CC.CONTENT_UPDATE_RATING, self._service_identifier, ( hash, ), info = rating ) for ( hash, rating ) in certain_ratings ] ) + content_updates.extend( [ CC.ContentUpdate( CC.CONTENT_UPDATE_RATINGS_FILTER, self._service_identifier, ( hash, ), info = ( min, max ) ) for ( hash, min, max ) in uncertain_ratings ] ) + + wx.GetApp().Write( 'content_updates', content_updates ) + + except: wx.MessageBox( traceback.format_exc() ) + + + + + HC.pubsub.pub( 'set_focus', self._page_key, self._current_media_to_rate ) + + self.Destroy() + + + def EventKeyDown( self, event ): + + if event.KeyCode in ( wx.WXK_SPACE, wx.WXK_UP, wx.WXK_NUMPAD_UP ): self._ShowNewMedia() + elif event.KeyCode in ( wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN ): self._ProcessAction( 'equal' ) + elif event.KeyCode in ( wx.WXK_LEFT, wx.WXK_NUMPAD_LEFT ): self._ProcessAction( 'left' ) + elif event.KeyCode in ( wx.WXK_RIGHT, wx.WXK_NUMPAD_RIGHT ): self._ProcessAction( 'right' ) + elif event.KeyCode == wx.WXK_BACK: self._GoBack() + elif event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER, wx.WXK_ESCAPE ): self.EventClose( event ) + else: event.Skip() + + + def EventMouseDown( self, event ): + + if event.ButtonDown( wx.MOUSE_BTN_LEFT ): self._ProcessAction( 'left' ) + elif event.ButtonDown( wx.MOUSE_BTN_RIGHT ): self._ProcessAction( 'right' ) + elif event.ButtonDown( wx.MOUSE_BTN_MIDDLE ): self._ProcessAction( 'equal' ) + + + def EventSlider( self, event ): + + value = self._accuracy_slider.GetValue() + + if value == 0: self._equal_accuracy = self.RATINGS_FILTER_EQUALITY_FULL + elif value <= 2: self._equal_accuracy = self.RATINGS_FILTER_EQUALITY_HALF + else: self._equal_accuracy = self.RATINGS_FILTER_EQUALITY_QUARTER + + if value <= 1: self._inequal_accuracy = self.RATINGS_FILTER_INEQUALITY_FULL + elif value <= 3: self._inequal_accuracy = self.RATINGS_FILTER_INEQUALITY_HALF + else: self._inequal_accuracy = self.RATINGS_FILTER_INEQUALITY_QUARTER + + self._options[ 'ratings_filter_accuracy' ] = value + + wx.GetApp().Write( 'save_options' ) + + + def ProcessContentUpdates( self, content_updates ): + + redraw = False + + my_hashes = self._file_query_result.GetHashes() + + for content_update in content_updates: + + content_update_hashes = content_update.GetHashes() + + if len( my_hashes.intersection( content_update_hashes ) ) > 0: + + redraw = True + + break + + + + if redraw: + + self._left_window.RefreshBackground() + self._right_window.RefreshBackground() + + + + def ProcessServiceUpdate( self, update ): + + self._left_window.RefreshBackground() + self._right_window.RefreshBackground() + + +class Image( wx.Window ): + + def __init__( self, parent, media, image_cache, initial_size, initial_position ): + + wx.Window.__init__( self, parent, size = initial_size, pos = initial_position ) + + self.SetDoubleBuffered( True ) + + self._media = media + self._image_container = None + self._image_cache = image_cache + + self._current_frame_index = 0 + + ( width, height ) = initial_size + + self._canvas_bmp = wx.EmptyBitmap( 0, 0, 24 ) + + self._timer_animated = wx.Timer( self, id = ID_TIMER_ANIMATED ) + + self.Bind( wx.EVT_PAINT, self.EventPaint ) + self.Bind( wx.EVT_SIZE, self.EventResize ) + self.Bind( wx.EVT_TIMER, self.EventTimerAnimated, id = ID_TIMER_ANIMATED ) + self.Bind( wx.EVT_MOUSE_EVENTS, self.PropagateMouseEvent ) + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self.EventResize( None ) + + + def _Draw( self ): + + dc = wx.BufferedDC( wx.ClientDC( self ), self._canvas_bmp ) + + if self._image_container.HasFrame( self._current_frame_index ): + + current_frame = self._image_container.GetFrame( self._current_frame_index ) + + ( my_width, my_height ) = self._canvas_bmp.GetSize() + + if self._media.GetMime() == HC.IMAGE_GIF and self._media.HasDuration(): + image_width = my_width + image_height = my_height - ANIMATED_SCANBAR_HEIGHT + else: + image_width = my_width + image_height = my_height + + ( frame_width, frame_height ) = current_frame.GetSize() + + x_scale = image_width / float( frame_width ) + y_scale = image_height / float( frame_height ) + + dc.SetUserScale( x_scale, y_scale ) + + hydrus_bmp = current_frame.CreateWxBmp() + + dc.DrawBitmap( hydrus_bmp, 0, 0 ) + + hydrus_bmp.Destroy() + + dc.SetUserScale( 1.0, 1.0 ) + + if self._image_container.IsAnimated(): self._timer_animated.Start( self._image_container.GetDuration( self._current_frame_index ), wx.TIMER_ONE_SHOT ) + + else: + + dc.SetBackground( wx.Brush( wx.WHITE ) ) + + dc.Clear() + + self._timer_animated.Start( 50, wx.TIMER_ONE_SHOT ) + + + if self._media.GetMime() == HC.IMAGE_GIF and self._media.HasDuration(): + + ( my_width, my_height ) = self.GetClientSize() + + zero_y = my_height - ANIMATED_SCANBAR_HEIGHT + + num_frames = self._media.GetNumFrames() + + dc.SetPen( wx.TRANSPARENT_PEN ) + + dc.SetBrush( wx.Brush( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) ) + + dc.DrawRectangle( 0, zero_y, my_width, ANIMATED_SCANBAR_HEIGHT ) + + dc.SetBrush( wx.Brush( wx.SystemSettings.GetColour( wx.SYS_COLOUR_SCROLLBAR ) ) ) + + dc.DrawRectangle( int( float( my_width - ANIMATED_SCANBAR_CARET_WIDTH ) * float( self._current_frame_index ) / float( num_frames - 1 ) ), zero_y, ANIMATED_SCANBAR_CARET_WIDTH, ANIMATED_SCANBAR_HEIGHT ) + + + + def ChangeFrame( self, direction ): + + num_frames = self._media.GetNumFrames() + + if direction == 1: + + if self._current_frame_index == num_frames - 1: self._current_frame_index = 0 + else: self._current_frame_index += 1 + + else: + + if self._current_frame_index == 0: self._current_frame_index = num_frames - 1 + else: self._current_frame_index -= 1 + + + self._Draw() + + self._timer_animated.Stop() + + + def PropagateMouseEvent( self, event ): + + if self._media.GetMime() == HC.IMAGE_GIF and self._media.HasDuration(): + + ( my_width, my_height ) = self.GetClientSize() + + ( x, y ) = event.GetPosition() + + if y > my_height - ANIMATED_SCANBAR_HEIGHT: + + if event.Dragging() or event.ButtonDown(): + + num_frames = self._media.GetNumFrames() + + compensated_x_position = x - ( ANIMATED_SCANBAR_CARET_WIDTH / 2 ) + + proportion = float( compensated_x_position ) / float( my_width - ANIMATED_SCANBAR_CARET_WIDTH ) + + if proportion < 0: proportion = 0 + if proportion > 1: proportion = 1 + + self._current_frame_index = int( proportion * ( num_frames - 1 ) + 0.5 ) + + self._Draw() + + if event.Dragging(): self._timer_animated.Stop() + + self.GetParent().KeepCursorAlive() + + return + + + + + screen_position = self.ClientToScreen( event.GetPosition() ) + ( x, y ) = self.GetParent().ScreenToClient( screen_position ) + + event.SetX( x ) + event.SetY( y ) + + event.ResumePropagation( 1 ) + event.Skip() + + + def EventKeyDown( self, event ): + + self.GetParent().ProcessEvent( event ) + + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp ) + + def EventResize( self, event ): + + ( my_width, my_height ) = self.GetClientSize() + + ( current_bmp_width, current_bmp_height ) = self._canvas_bmp.GetSize() + + if my_width != current_bmp_width or my_height != current_bmp_height: + + if my_width > 0 and my_height > 0: + + if self._image_container is None: + + if self._media.GetMime() == HC.IMAGE_GIF and self._media.HasDuration(): + image_width = my_width + image_height = my_height - ANIMATED_SCANBAR_HEIGHT + else: + image_width = my_width + image_height = my_height + + self._image_container = self._image_cache.GetImage( self._media.GetHash(), ( image_width, image_height ) ) + + else: + + ( image_width, image_height ) = self._image_container.GetSize() + + we_just_zoomed_in = my_width > image_width + + if we_just_zoomed_in and self._image_container.IsScaled(): + + full_resolution = self._image_container.GetResolution() + + self._image_container = self._image_cache.GetImage( self._media.GetHash(), full_resolution ) + + + + self._canvas_bmp = wx.EmptyBitmap( my_width, my_height, 24 ) + + self._Draw() + + + + + def EventTimerAnimated( self, event ): + + if self.IsShown(): + + if self._image_container.HasFrame( self._current_frame_index + 1 ): self._current_frame_index += 1 + elif self._image_container.IsFinishedRendering(): self._current_frame_index = 0 + + self._Draw() + + + +class Text( wx.Window ): + + def __init__( self, parent, place, background_colour = wx.WHITE ): + + wx.Window.__init__( self, parent ) + + self._place = place + self._background_colour = background_colour + + self._current_text = '' + self._canvas_bmp = wx.EmptyBitmap( 0, 0, 24 ) + + self.Bind( wx.EVT_PAINT, self.EventPaint ) + + self._can_show = False + + self.Hide() + + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp ) + + def SetText( self, text ): + + ( my_width, my_height ) = self._canvas_bmp.GetSize() + + if text != self._current_text: + + self._current_text = text + + dc = wx.BufferedDC( wx.ClientDC( self ), self._canvas_bmp ) + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( x, y ) = dc.GetTextExtent( self._current_text ) + + x += 2 + y += 2 + + if x != my_width or y != my_height: + + del dc + + self._canvas_bmp = wx.EmptyBitmap( x, y, 24 ) + + dc = wx.BufferedDC( wx.ClientDC( self ), self._canvas_bmp ) + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( my_width, my_height ) = ( x, y ) + + + dc.SetBackground( wx.Brush( self._background_colour ) ) + + dc.Clear() + + dc.DrawText( self._current_text, 1, 1 ) + + + parent = self.GetParent() + + ( parent_width, parent_height ) = parent.GetClientSize() + + if self._place == 'top': + + pos_x = ( parent_width / 2 ) - ( my_width / 2 ) + pos_y = 0 + + elif self._place == 'bottom_left': + + pos_x = 0 + pos_y = parent_height - my_height + + elif self._place == 'bottom_right': + + pos_x = parent_width - my_width + pos_y = parent_height - my_height + + + self.SetSize( ( my_width, my_height ) ) + self.SetPosition( ( pos_x, pos_y ) ) + + self._can_show = self._current_text != '' + + + def ShowIfPossible( self ): + + if self._can_show: self.Show() + else: self.Hide() + + \ No newline at end of file diff --git a/include/ClientGUICommon.py b/include/ClientGUICommon.py new file mode 100755 index 00000000..2d74891b --- /dev/null +++ b/include/ClientGUICommon.py @@ -0,0 +1,2706 @@ +import collections +import HydrusConstants as HC +import ClientConstants as CC +import ClientGUIMixins +import os +import random +import time +import traceback +import wx +import wx.richtext +from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin +from wx.lib.mixins.listctrl import ColumnSorterMixin + +ID_TIMER_ANIMATED = wx.NewId() +ID_TIMER_SLIDESHOW = wx.NewId() +ID_TIMER_MEDIA_INFO_DISPLAY = wx.NewId() + +# Zooms + +ZOOMINS = [ 0.01, 0.05, 0.1, 0.15, 0.2, 0.3, 0.5, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.5, 2.0, 3.0, 5.0, 10.0, 20.0 ] +ZOOMOUTS = [ 20.0, 10.0, 5.0, 3.0, 2.0, 1.5, 1.2, 1.1, 1.0, 0.9, 0.8, 0.7, 0.5, 0.3, 0.2, 0.15, 0.1, 0.05, 0.01 ] + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +class AdvancedHentaiFoundryOptions( wx.CollapsiblePane ): + + def __init__( self, parent ): + + wx.CollapsiblePane.__init__( self, parent, label = 'expand' ) + + my_panel = self.GetPane() + + def offensive_choice(): + + c = wx.Choice( my_panel ) + + c.Append( 'none', 0 ) + c.Append( 'mild', 1 ) + c.Append( 'moderate', 2 ) + c.Append( 'strong', 3 ) + + c.SetSelection( 3 ) + + return c + + + self._rating_nudity = offensive_choice() + self._rating_violence = offensive_choice() + self._rating_profanity = offensive_choice() + self._rating_racism = offensive_choice() + self._rating_sex = offensive_choice() + self._rating_spoilers = offensive_choice() + + self._rating_yaoi = wx.CheckBox( my_panel ) + self._rating_yuri = wx.CheckBox( my_panel ) + self._rating_loli = wx.CheckBox( my_panel ) + self._rating_shota = wx.CheckBox( my_panel ) + self._rating_teen = wx.CheckBox( my_panel ) + self._rating_guro = wx.CheckBox( my_panel ) + self._rating_furry = wx.CheckBox( my_panel ) + self._rating_beast = wx.CheckBox( my_panel ) + self._rating_male = wx.CheckBox( my_panel ) + self._rating_female = wx.CheckBox( my_panel ) + self._rating_futa = wx.CheckBox( my_panel ) + self._rating_other = wx.CheckBox( my_panel ) + + self._rating_yaoi.SetValue( True ) + self._rating_yuri.SetValue( True ) + self._rating_loli.SetValue( True ) + self._rating_shota.SetValue( True ) + self._rating_teen.SetValue( True ) + self._rating_guro.SetValue( True ) + self._rating_furry.SetValue( True ) + self._rating_beast.SetValue( True ) + self._rating_male.SetValue( True ) + self._rating_female.SetValue( True ) + self._rating_futa.SetValue( True ) + self._rating_other.SetValue( True ) + + self._filter_order = wx.Choice( my_panel ) + + self._filter_order.Append( 'newest first', 'date_new' ) + self._filter_order.Append( 'oldest first', 'date_old' ) + self._filter_order.Append( 'most views first', 'views most' ) # no underscore + self._filter_order.Append( 'highest rating first', 'rating highest' ) # no underscore + self._filter_order.Append( 'most favourites first', 'faves most' ) # no underscore + self._filter_order.Append( 'most popular first', 'popularity most' ) # no underscore + + self._filter_order.SetSelection( 0 ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'nudity' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_nudity, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'violence' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_violence, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'profanity' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_profanity, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'racism' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_racism, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'sex' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_sex, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'spoilers' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_spoilers, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'yaoi' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_yaoi, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'yuri' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_yuri, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'loli' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_loli, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'shota' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_shota, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'teen' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_teen, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'guro' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_guro, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'furry' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_furry, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'beast' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_beast, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'male' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_male, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'female' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_female, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'futa' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_futa, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'other' ), FLAGS_MIXED ) + gridbox.AddF( self._rating_other, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( my_panel, label = 'order' ), FLAGS_MIXED ) + gridbox.AddF( self._filter_order, FLAGS_EXPAND_BOTH_WAYS ) + + my_panel.SetSizer( gridbox ) + + self.Bind( wx.EVT_COLLAPSIBLEPANE_CHANGED, self.EventChanged ) + + + def GetInfo( self ): + + info = {} + + info[ 'rating_nudity' ] = self._rating_nudity.GetClientData( self._rating_nudity.GetSelection() ) + info[ 'rating_violence' ] = self._rating_violence.GetClientData( self._rating_violence.GetSelection() ) + info[ 'rating_profanity' ] = self._rating_profanity.GetClientData( self._rating_profanity.GetSelection() ) + info[ 'rating_racism' ] = self._rating_racism.GetClientData( self._rating_racism.GetSelection() ) + info[ 'rating_sex' ] = self._rating_sex.GetClientData( self._rating_sex.GetSelection() ) + info[ 'rating_spoilers' ] = self._rating_spoilers.GetClientData( self._rating_spoilers.GetSelection() ) + + info[ 'rating_yaoi' ] = int( self._rating_yaoi.GetValue() ) + info[ 'rating_yuri' ] = int( self._rating_yuri.GetValue() ) + info[ 'rating_loli' ] = int( self._rating_loli.GetValue() ) + info[ 'rating_shota' ] = int( self._rating_shota.GetValue() ) + info[ 'rating_teen' ] = int( self._rating_teen.GetValue() ) + info[ 'rating_guro' ] = int( self._rating_guro.GetValue() ) + info[ 'rating_furry' ] = int( self._rating_furry.GetValue() ) + info[ 'rating_beast' ] = int( self._rating_beast.GetValue() ) + info[ 'rating_male' ] = int( self._rating_male.GetValue() ) + info[ 'rating_female' ] = int( self._rating_female.GetValue() ) + info[ 'rating_futa' ] = int( self._rating_futa.GetValue() ) + info[ 'rating_other' ] = int( self._rating_other.GetValue() ) + + info[ 'filter_media' ] = 'A' + info[ 'filter_order' ] = self._filter_order.GetClientData( self._filter_order.GetSelection() ) + info[ 'filter_type' ] = 0 + + return info + + + def EventChanged( self, event ): + + self.GetParent().Layout() # make this vertical only? + + if self.IsExpanded(): self.SetLabel( 'collapse' ) + else: self.SetLabel( 'expand' ) + + +class AdvancedImportOptions( wx.CollapsiblePane ): + + def __init__( self, parent ): + + wx.CollapsiblePane.__init__( self, parent, label = 'expand' ) + + options = wx.GetApp().Read( 'options' ) + + my_panel = self.GetPane() + + self._auto_archive = wx.CheckBox( my_panel ) + self._auto_archive.SetValue( False ) + + self._exclude_deleted = wx.CheckBox( my_panel ) + self._exclude_deleted.SetValue( options[ 'exclude_deleted_files' ] ) + + self._min_size = NoneableSpinCtrl( my_panel, 'minimum size (KB): ', 5120, multiplier = 1024 ) + self._min_size.SetValue( None ) + + self._min_resolution = NoneableSpinCtrl( my_panel, 'minimum resolution: ', ( 50, 50 ), num_dimensions = 2 ) + self._min_resolution.SetValue( None ) + + hbox1 = wx.BoxSizer( wx.HORIZONTAL ) + + hbox1.AddF( self._auto_archive, FLAGS_MIXED ) + hbox1.AddF( wx.StaticText( my_panel, label = ' archive all imports' ), FLAGS_MIXED ) + + hbox2 = wx.BoxSizer( wx.HORIZONTAL ) + + hbox2.AddF( self._exclude_deleted, FLAGS_MIXED ) + hbox2.AddF( wx.StaticText( my_panel, label = ' exclude already deleted files' ), FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( hbox1, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( hbox2, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._min_size, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._min_resolution, FLAGS_EXPAND_PERPENDICULAR ) + + my_panel.SetSizer( vbox ) + + self.Bind( wx.EVT_COLLAPSIBLEPANE_CHANGED, self.EventChanged ) + + + def GetInfo( self ): + + info = {} + + if self._auto_archive.GetValue(): info[ 'auto_archive' ] = True + + if self._exclude_deleted.GetValue(): info[ 'exclude_deleted_files' ] = True + + min_size = self._min_size.GetValue() + + if min_size is not None: info[ 'min_size' ] = min_size + + min_resolution = self._min_resolution.GetValue() + + if min_resolution is not None: info[ 'min_resolution' ] = min_resolution + + return info + + + def EventChanged( self, event ): + + self.GetParent().Layout() # make this vertical only? + + if self.IsExpanded(): self.SetLabel( 'collapse' ) + else: self.SetLabel( 'expand' ) + + +class AdvancedTagOptions( wx.CollapsiblePane ): + + def __init__( self, parent, info_string, namespaces = [] ): + + wx.CollapsiblePane.__init__( self, parent, label = 'expand' ) + + service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ) ) + + self._checkboxes_to_service_identifiers = {} + self._service_identifiers_to_namespaces = {} + + my_panel = self.GetPane() + + vbox = wx.BoxSizer( wx.VERTICAL ) + + if len( service_identifiers ) > 0: + + for service_identifier in service_identifiers: + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + checkbox = wx.CheckBox( my_panel ) + checkbox.Bind( wx.EVT_CHECKBOX, self.EventChecked ) + + self._checkboxes_to_service_identifiers[ checkbox ] = service_identifier + + hbox.AddF( wx.StaticText( my_panel, label = service_identifier.GetName() ), FLAGS_MIXED ) + hbox.AddF( checkbox, FLAGS_MIXED ) + + if len( namespaces ) > 0: + + namespace_vbox = wx.BoxSizer( wx.VERTICAL ) + + self._service_identifiers_to_namespaces[ service_identifier ] = [] + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + for namespace in namespaces: + + if namespace == '': text = wx.StaticText( my_panel, label = 'no namespace' ) + else: text = wx.StaticText( my_panel, label = namespace ) + + namespace_checkbox = wx.CheckBox( my_panel ) + namespace_checkbox.SetValue( True ) + namespace_checkbox.Bind( wx.EVT_CHECKBOX, self.EventChecked ) + + self._service_identifiers_to_namespaces[ service_identifier ].append( ( namespace, namespace_checkbox ) ) + + gridbox.AddF( text, FLAGS_MIXED ) + gridbox.AddF( namespace_checkbox, FLAGS_EXPAND_BOTH_WAYS ) + + + hbox.AddF( gridbox, FLAGS_MIXED ) + + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( my_panel, label = info_string ), FLAGS_MIXED ) + hbox.AddF( vbox, FLAGS_EXPAND_PERPENDICULAR ) + + + my_panel.SetSizer( hbox ) + + else: + + vbox.AddF( wx.StaticText( my_panel, label = 'no tag repositories' ), FLAGS_EXPAND_BOTH_WAYS ) + + my_panel.SetSizer( vbox ) + + + self.Bind( wx.EVT_COLLAPSIBLEPANE_CHANGED, self.EventChanged ) + + + def GetInfo( self ): + + service_identifiers = [ self._checkboxes_to_service_identifiers[ checkbox ] for checkbox in self._checkboxes_to_service_identifiers.keys() if checkbox.GetValue() ] + + result = [] + + for service_identifier in service_identifiers: + + good_namespaces = [] + + if service_identifier in self._service_identifiers_to_namespaces: + + namespaces = self._service_identifiers_to_namespaces[ service_identifier ] + + for ( namespace, namespace_checkbox ) in namespaces: + + if namespace_checkbox.GetValue(): good_namespaces.append( namespace ) + + + + result.append( ( service_identifier, good_namespaces ) ) + + + return result + + + def EventChanged( self, event ): + + self.GetParent().Layout() # make this vertical only? + + if self.IsExpanded(): self.SetLabel( 'collapse' ) + else: self.SetLabel( 'expand' ) + + + def EventChecked( self, event ): + + wx.PostEvent( self, wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'advanced_tag_options_changed' ) ) ) + + event.Skip() + + +class AnimatedStaticTextTimestamp( wx.StaticText ): + + def __init__( self, parent, prefix, rendering_function, timestamp, suffix ): + + self._prefix = prefix + self._rendering_function = rendering_function + self._timestamp = timestamp + self._suffix = suffix + + self._last_tick = int( time.time() ) + + wx.StaticText.__init__( self, parent, label = self._prefix + self._rendering_function( self._timestamp ) + self._suffix ) + + HC.pubsub.sub( self, 'Tick', 'animated_tick' ) + + + def Tick( self ): + + update = False + + now = int( time.time() ) + + difference = abs( now - self._timestamp ) + + if difference < 3600: update = True + elif difference < 3600 * 24 and now - self._last_tick > 60: update = True + elif now - self._last_tick > 3600: update = True + + if update: + + self.SetLabel( self._prefix + self._rendering_function( self._timestamp ) + self._suffix ) + + wx.PostEvent( self.GetEventHandler(), wx.SizeEvent() ) + + + +# much of this is based on the excellent TexCtrlAutoComplete class by Edward Flick, Michele Petrazzo and Will Sadkin, just with plenty of simplification and integration into hydrus +class AutoCompleteDropdown( wx.TextCtrl ): + + def __init__( self, parent ): + + wx.TextCtrl.__init__( self, parent, style=wx.TE_PROCESS_ENTER ) + + self._dropdown_window = wx.PopupWindow( self, flags = wx.SIMPLE_BORDER ) + self._dropdown_window.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + #self._dropdown_window.SetDoubleBuffered( True ) + + self._first_letters = '' + self._cached_results = self._InitCachedResults() + + self._dropdown_list = self._InitDropDownList() + + self.Bind( wx.EVT_SET_FOCUS, self.EventSetFocus ) + self.Bind( wx.EVT_KILL_FOCUS, self.EventKillFocus ) + + self.Bind( wx.EVT_TEXT, lambda event: self._UpdateList(), self ) + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown, self ) + + self.Bind( wx.EVT_MOVE, self.EventShowDropdownIfFocussed ) + self.Bind( wx.EVT_SIZE, self.EventShowDropdownIfFocussed ) + + self.Bind( wx.EVT_MOUSEWHEEL, self.EventMouseWheel ) + + tlp = self.GetTopLevelParent() + + tlp.Bind( wx.EVT_MOVE, self.EventShowDropdownIfFocussed ) + + wx.CallAfter( self._UpdateList ) + + + def _BroadcastChoice( self, predicate ): pass + + def BroadcastChoice( self, predicate ): + + self._BroadcastChoice( predicate ) + + self.Clear() + + wx.CallAfter( self._UpdateList ) + + + def _HideDropdown( self ): self._dropdown_window.Show( False ) + + def _ShowDropdownIfFocussed( self ): + + if wx.Window.FindFocus() == self and len( self._dropdown_list ) > 0: + + ( my_width, my_height ) = self.GetSize() + + self._dropdown_list.Show() + + self._dropdown_window.Fit() + + self._dropdown_window.SetSize( ( my_width, -1 ) ) + + self._dropdown_window.Layout() + + self._dropdown_window.SetPosition( self.ClientToScreenXY( -2, my_height - 2 ) ) + + self._dropdown_window.Show() + + + + def _UpdateList( self ): pass + + def EventKeyDown( self, event ): + + if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ) and self.GetValue() == '' and len( self._dropdown_list ) == 0: self._BroadcastChoice( None ) + elif event.KeyCode == wx.WXK_ESCAPE: self.GetTopLevelParent().SetFocus() + elif event.KeyCode in ( wx.WXK_UP, wx.WXK_NUMPAD_UP, wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN ) and self.GetValue() == '' and len( self._dropdown_list ) == 0: + + if event.KeyCode in ( wx.WXK_UP, wx.WXK_NUMPAD_UP ): id = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select_up' ) + elif event.KeyCode in ( wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN ): id = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select_down' ) + + new_event = wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = id ) + + self.ProcessEvent( new_event ) + + else: self._dropdown_list.ProcessEvent( event ) + + + def EventKillFocus( self, event ): + + self._HideDropdown() + + event.Skip() + + + def EventMouseWheel( self, event ): + + if self.GetValue() == '' and len( self._dropdown_list ) == 0: + + if event.GetWheelRotation() > 0: id = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select_up' ) + else: id = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select_down' ) + + new_event = wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = id ) + + self.ProcessEvent( new_event ) + + else: + + if event.CmdDown(): + + key_event = wx.KeyEvent( wx.EVT_KEY_DOWN.typeId ) + + if event.GetWheelRotation() > 0: key_event.m_keyCode = wx.WXK_UP + else: key_event.m_keyCode = wx.WXK_DOWN + + self._dropdown_list.ProcessEvent( key_event ) + + else: + + # for some reason, the scrolledwindow list doesn't process scroll events properly when in a popupwindow + # so let's just tell it to scroll manually + + ( start_x, start_y ) = self._dropdown_list.GetViewStart() + + if event.GetWheelRotation() > 0: self._dropdown_list.Scroll( -1, start_y - 3 ) + else: self._dropdown_list.Scroll( -1, start_y + 3 ) + + + + + def EventSetFocus( self, event ): + + self._UpdateList() + + event.Skip() + + + def EventShowDropdownIfFocussed( self, event ): + + try: self._ShowDropdownIfFocussed() + except: pass + + event.Skip() + + +class AutoCompleteDropdownContacts( AutoCompleteDropdown ): + + def __init__( self, parent, compose_key, identity ): + + AutoCompleteDropdown.__init__( self, parent ) + + self._compose_key = compose_key + self._identity = identity + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._dropdown_list, FLAGS_EXPAND_BOTH_WAYS ) + + self._dropdown_window.SetSizer( vbox ) + + + def _BroadcastChoice( self, contact_name ): HC.pubsub.pub( 'add_contact', self._compose_key, contact_name ) + + def _GenerateMatches( self ): + + num_first_letters = 1 + + entry = self.GetValue() + + if entry == '': + + self._first_letters = '' + + matches = [] + + else: + + if len( entry ) >= num_first_letters: + + if entry[ : num_first_letters ] != self._first_letters: + + self._first_letters = entry[ : num_first_letters ] + + self._cached_results = wx.GetApp().Read( 'autocomplete_contacts', entry, name_to_exclude = self._identity.GetName() ) + + + matches = self._cached_results.GetMatches( entry ) + + else: matches = [] + + + return matches + + + def _InitCachedResults( self ): return CC.AutocompleteMatches( [] ) + + def _InitDropDownList( self ): return ListBoxMessagesActiveOnly( self._dropdown_window, self.BroadcastChoice ) + + def _UpdateList( self ): + + matches = self._GenerateMatches() + + # this obv needs to be SetValues or whatever + self._dropdown_list.SetTexts( matches ) + + if len( matches ) > 0: self._ShowDropdownIfFocussed() + else: self._HideDropdown() + + +class AutoCompleteDropdownMessageTerms( AutoCompleteDropdown ): + + def __init__( self, parent, page_key, identity ): + + AutoCompleteDropdown.__init__( self, parent ) + + self._page_key = page_key + self._identity = identity + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._dropdown_list, FLAGS_EXPAND_BOTH_WAYS ) + + self._dropdown_window.SetSizer( vbox ) + + + def _BroadcastChoice( self, predicate ): HC.pubsub.pub( 'add_predicate', self._page_key, predicate ) + + def _InitCachedResults( self ): return CC.AutocompleteMatchesCounted( {} ) + + def _InitDropDownList( self ): return ListBoxMessagesActiveOnly( self._dropdown_window, self.BroadcastChoice ) + + def _GenerateMatches( self ): + + entry = self.GetValue() + + if entry.startswith( '-' ): search_term = entry[1:] + else: search_term = entry + + if search_term == '': matches = wx.GetApp().Read( 'message_system_predicates', self._identity ) + else: matches = [ ( entry, None ) ] + + return matches + + + def _UpdateList( self ): + + matches = self._GenerateMatches() + + self._dropdown_list.SetTerms( matches ) + + if len( matches ) > 0: self._ShowDropdownIfFocussed() + else: self._HideDropdown() + + +class AutoCompleteDropdownTags( AutoCompleteDropdown ): + + def __init__( self, parent, file_service_identifier, tag_service_identifier ): + + AutoCompleteDropdown.__init__( self, parent ) + + self._options = wx.GetApp().Read( 'options' ) + + self._current_namespace = '' + + self._file_service_identifier = file_service_identifier + self._tag_service_identifier = tag_service_identifier + + if self._file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known files' + else: name = self._file_service_identifier.GetName() + + self._file_repo_button = wx.Button( self._dropdown_window, label = name ) + self._file_repo_button.Bind( wx.EVT_BUTTON, self.EventFileButton ) + + if self._tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known tags' + else: name = self._tag_service_identifier.GetName() + + self._tag_repo_button = wx.Button( self._dropdown_window, label = name ) + self._tag_repo_button.Bind( wx.EVT_BUTTON, self.EventTagButton ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + + def _InitCachedResults( self ): return CC.AutocompleteMatchesCounted( {} ) + + def _InitDropDownList( self ): return TagsBoxActiveOnly( self._dropdown_window, self.BroadcastChoice ) + + def _ShowDropdownIfFocussed( self ): + + # don't know why I have to do this fit and layout rubbish manually here; I guess it is popupwindow screwing up as usual + + if wx.Window.FindFocus() == self: + + ( my_width, my_height ) = self.GetSize() + + if len( self._dropdown_list ) > 0: self._dropdown_list.Show() + else: self._dropdown_list.Hide() + + self._dropdown_window.Fit() + + self._dropdown_window.SetSize( ( my_width, -1 ) ) + + self._dropdown_window.Layout() + + self._dropdown_window.SetPosition( self.ClientToScreenXY( -2, my_height - 2 ) ) + + self._dropdown_window.Show() + + + + def _UpdateList( self ): + + matches = self._GenerateMatches() + + self._dropdown_list.SetTags( matches ) + + self._ShowDropdownIfFocussed() + + + def EventFileButton( self, event ): + + service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.FILE_REPOSITORY, ) ) + + menu = wx.Menu() + + if len( service_identifiers ) > 0: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', CC.NULL_SERVICE_IDENTIFIER ), 'all known files' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), 'local files' ) + + for service_identifier in service_identifiers: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_file_repository', service_identifier ), service_identifier.GetName() ) + + self.PopupMenu( menu ) + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'change_file_repository': + + service_identifier = data + + self._file_service_identifier = service_identifier + + if service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known files' + else: name = service_identifier.GetName() + + self._file_repo_button.SetLabel( name ) + + HC.pubsub.pub( 'change_file_repository', self._page_key, service_identifier ) + + elif command == 'change_tag_repository': + + service_identifier = data + + self._tag_service_identifier = service_identifier + + if service_identifier == CC.NULL_SERVICE_IDENTIFIER: name = 'all known tags' + else: name = service_identifier.GetName() + + self._tag_repo_button.SetLabel( name ) + + HC.pubsub.pub( 'change_tag_repository', self._page_key, service_identifier ) + + else: + + event.Skip() + + return # this is about select_up and select_down + + + self._first_letters = '' + self._current_namespace = '' + + self._UpdateList() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + wx.MessageBox( traceback.format_exc() ) + + + + + def EventTagButton( self, event ): + + service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.TAG_REPOSITORY, ) ) + + menu = wx.Menu() + + if len( service_identifiers ) > 0: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', CC.NULL_SERVICE_IDENTIFIER ), 'all known tags' ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', CC.LOCAL_TAG_SERVICE_IDENTIFIER ), 'local tags' ) + + for service_identifier in service_identifiers: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'change_tag_repository', service_identifier ), service_identifier.GetName() ) + + self.PopupMenu( menu ) + + +class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ): + + def __init__( self, parent, page_key, file_service_identifier, tag_service_identifier, media_callable ): + + AutoCompleteDropdownTags.__init__( self, parent, file_service_identifier, tag_service_identifier ) + + self._media_callable = media_callable + self._page_key = page_key + + self._include_current = True + self._include_pending = True + + self._include_current_tags = OnOffButton( self._dropdown_window, self._page_key, 'notify_include_current', on_label = 'include current tags', off_label = 'exclude current tags' ) + self._include_current_tags.SetToolTipString( 'select whether to include current tags in the search' ) + self._include_pending_tags = OnOffButton( self._dropdown_window, self._page_key, 'notify_include_pending', on_label = 'include pending tags', off_label = 'exclude pending tags' ) + self._include_pending_tags.SetToolTipString( 'select whether to include pending tags in the search' ) + + self._synchronised = OnOffButton( self._dropdown_window, self._page_key, 'notify_search_immediately', on_label = 'searching immediately', off_label = 'waiting' ) + self._synchronised.SetToolTipString( 'select whether to renew the search as soon as a new predicate is entered' ) + + button_hbox_1 = wx.BoxSizer( wx.HORIZONTAL ) + + button_hbox_1.AddF( self._include_current_tags, FLAGS_EXPAND_BOTH_WAYS ) + button_hbox_1.AddF( self._include_pending_tags, FLAGS_EXPAND_BOTH_WAYS ) + + button_hbox_2 = wx.BoxSizer( wx.HORIZONTAL ) + + button_hbox_2.AddF( self._file_repo_button, FLAGS_EXPAND_BOTH_WAYS ) + button_hbox_2.AddF( self._tag_repo_button, FLAGS_EXPAND_BOTH_WAYS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( button_hbox_1, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._synchronised, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( button_hbox_2, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._dropdown_list, FLAGS_EXPAND_BOTH_WAYS ) + + self._dropdown_window.SetSizer( vbox ) + + HC.pubsub.sub( self, 'SetSynchronisedWait', 'synchronised_wait_switch' ) + + HC.pubsub.sub( self, 'IncludeCurrent', 'notify_include_current' ) + HC.pubsub.sub( self, 'IncludePending', 'notify_include_pending' ) + + + def _BroadcastChoice( self, predicate ): HC.pubsub.pub( 'add_predicate', self._page_key, predicate ) + + def _GenerateMatches( self ): + + num_first_letters = self._options[ 'num_autocomplete_chars' ] + + raw_entry = self.GetValue() + + if raw_entry.startswith( '-' ): search_text = raw_entry[1:] + else: search_text = raw_entry + + search_text = HC.CleanTag( search_text ) + + if search_text == '': + + self._first_letters = '' + self._current_namespace = '' + + if self._file_service_identifier == CC.NULL_SERVICE_IDENTIFIER: s_i = self._tag_service_identifier + else: s_i = self._file_service_identifier + + matches = wx.GetApp().Read( 'file_system_predicates', s_i ) + + else: + + must_do_a_search = False + + if ':' in search_text: + + ( namespace, half_complete_tag ) = search_text.split( ':' ) + + if namespace != self._current_namespace: + + self._current_namespace = namespace # do a new search, no matter what half_complete tag is + + must_do_a_search = True + + + else: + + self._current_namespace = '' + + half_complete_tag = search_text + + + if len( half_complete_tag ) >= num_first_letters: + + if must_do_a_search or half_complete_tag[ : num_first_letters ] != self._first_letters: + + self._first_letters = half_complete_tag[ : num_first_letters ] + + media = self._media_callable() + + if media is None: self._cached_results = wx.GetApp().Read( 'autocomplete_tags', file_service_identifier = self._file_service_identifier, tag_service_identifier = self._tag_service_identifier, half_complete_tag = search_text, include_current = self._include_current, include_pending = self._include_pending ) + else: + + all_tags = [] + + for m in media: + + if m.IsCollection(): all_tags.extend( m.GetSingletonsTags() ) + else: all_tags.append( m.GetTags() ) + + + absolutely_all_tags = [] + + if self._tag_service_identifier == CC.NULL_SERVICE_IDENTIFIER: + + if self._include_current: absolutely_all_tags += [ list( current ) for ( current, deleted, pending, petitioned ) in [ tags.GetUnionCDPP() for tags in all_tags ] ] + if self._include_pending: absolutely_all_tags += [ list( pending ) for ( current, deleted, pending, petitioned ) in [ tags.GetUnionCDPP() for tags in all_tags ] ] + + else: + + if self._include_current: absolutely_all_tags += [ list( current ) for ( current, deleted, pending, petitioned ) in [ tags.GetCDPP( self._tag_service_identifier ) for tags in all_tags ] ] + if self._include_pending: absolutely_all_tags += [ list( pending ) for ( current, deleted, pending, petitioned ) in [ tags.GetCDPP( self._tag_service_identifier ) for tags in all_tags ] ] + + + absolutely_all_tags_flat = [ tag for tags in absolutely_all_tags for tag in tags if HC.SearchEntryMatchesTag( half_complete_tag, tag ) ] + + if self._current_namespace != '': absolutely_all_tags_flat = [ tag for tag in absolutely_all_tags_flat if tag.startswith( self._current_namespace + ':' ) ] + + tags_to_count = collections.Counter( absolutely_all_tags_flat ) + + self._cached_results = CC.AutocompleteMatchesCounted( tags_to_count ) + + + + matches = self._cached_results.GetMatches( half_complete_tag ) + + if raw_entry.startswith( '-' ): matches = [ ( '-' + tag, count ) for ( tag, count ) in matches ] + + else: matches = [] + + + return matches + + + def IncludeCurrent( self, page_key, value ): + + if page_key == self._page_key: self._include_current = value + + self._first_letters = '' + self._current_namespace = '' + + + def IncludePending( self, page_key, value ): + + if page_key == self._page_key: self._include_pending = value + + self._first_letters = '' + self._current_namespace = '' + + + def SetSynchronisedWait( self, page_key ): + + if page_key == self._page_key: self._synchronised.EventButton( None ) + + +class AutoCompleteDropdownTagsWrite( AutoCompleteDropdownTags ): + + def __init__( self, parent, chosen_tag_callable, file_service_identifier, tag_service_identifier ): + + self._chosen_tag_callable = chosen_tag_callable + + self._page_key = None # this makes the parent's eventmenu pubsubs with page_key simpler! + + self._options = wx.GetApp().Read( 'options' ) + + if self._options[ 'show_all_tags_in_autocomplete' ]: file_service_identifier = CC.NULL_SERVICE_IDENTIFIER + + AutoCompleteDropdownTags.__init__( self, parent, file_service_identifier, tag_service_identifier ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._file_repo_button, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( self._tag_repo_button, FLAGS_EXPAND_BOTH_WAYS ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._dropdown_list, FLAGS_EXPAND_BOTH_WAYS ) + + self._dropdown_window.SetSizer( vbox ) + + + def _BroadcastChoice( self, predicate ): self._chosen_tag_callable( predicate ) + + def _GenerateMatches( self ): + + num_first_letters = self._options[ 'num_autocomplete_chars' ] + + raw_entry = self.GetValue() + + search_text = HC.CleanTag( raw_entry ) + + if search_text == '': + + self._first_letters = '' + self._current_namespace = '' + + matches = [] + + else: + + must_do_a_search = False + + if ':' in search_text: + + ( namespace, half_complete_tag ) = search_text.split( ':' ) + + if namespace != self._current_namespace: + + self._current_namespace = namespace # do a new search, no matter what half_complete tag is + + must_do_a_search = True + + + else: + + self._current_namespace = '' + + half_complete_tag = search_text + + + # this bit obviously now needs an overhaul; we want to change to broader search domains automatically, based on what the user has selected + # (and hopefully show that in the buttons, temporarily) + + if len( half_complete_tag ) >= num_first_letters: + + if must_do_a_search or half_complete_tag[ : num_first_letters ] != self._first_letters: + + self._first_letters = half_complete_tag[ : num_first_letters ] + + self._cached_results = wx.GetApp().Read( 'autocomplete_tags', file_service_identifier = self._file_service_identifier, tag_service_identifier = self._tag_service_identifier, half_complete_tag = search_text ) + + + matches = self._cached_results.GetMatches( half_complete_tag ) + + else: matches = [] + + try: + + tags_in_order = [ tag for ( tag, count ) in matches ] + + index = tags_in_order.index( search_text ) + + match = matches[ index ] + + matches.remove( match ) + + matches.insert( 0, match ) + + except: matches.insert( 0, ( search_text, 0 ) ) + + + return matches + + +class BufferedWindow( wx.Window ): + + def __init__( self, *args, **kwargs ): + + wx.Window.__init__( self, *args, **kwargs ) + + if 'size' in kwargs: + + ( x, y ) = kwargs[ 'size' ] + + self._canvas_bmp = wx.EmptyBitmap( x, y, 24 ) + + else: self._canvas_bmp = wx.EmptyBitmap( 0, 0, 24 ) + + self.Bind( wx.EVT_PAINT, self.EventPaint ) + self.Bind( wx.EVT_SIZE, self.EventResize ) + + + + def GetDC( self ): return wx.BufferedDC( wx.ClientDC( self ), self._canvas_bmp ) + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp ) + + def EventResize( self, event ): + + ( my_width, my_height ) = self.GetClientSize() + + ( current_bmp_width, current_bmp_height ) = self._canvas_bmp.GetSize() + + if my_width != current_bmp_width or my_height != current_bmp_height: self._canvas_bmp = wx.EmptyBitmap( my_width, my_height, 24 ) + + +class BetterChoice( wx.Choice ): + + def GetChoice( self ): + + selection = self.GetSelection() + + if selection != wx.NOT_FOUND: return self.GetClientData( selection ) + else: raise Exception( 'choice not chosen' ) + + +class ChoiceCollect( BetterChoice ): + + def __init__( self, parent, page_key = None, sort_by = None ): + + BetterChoice.__init__( self, parent ) + + self._page_key = page_key + + options = wx.GetApp().Read( 'options' ) + + if sort_by is None: sort_by = options[ 'sort_by' ] + + collect_choices = CC.GenerateCollectByChoices( sort_by ) + + for ( string, data ) in collect_choices: self.Append( string, data ) + + self.SetSelection( options[ 'default_collect' ] ) + + self.Bind( wx.EVT_CHOICE, self.EventChoice ) + + + def EventChoice( self, event ): + + if self._page_key is not None: + + selection = self.GetSelection() + + if selection != wx.NOT_FOUND: + + collect_by = self.GetClientData( selection ) + + HC.pubsub.pub( 'collect_media', self._page_key, collect_by ) + + + + +class ChoiceSort( BetterChoice ): + + def __init__( self, parent, page_key = None, sort_by = None ): + + BetterChoice.__init__( self, parent ) + + self._page_key = page_key + + options = wx.GetApp().Read( 'options' ) + + if sort_by is None: sort_by = options[ 'sort_by' ] + + sort_choices = CC.SORT_CHOICES + sort_by + + ratings_service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ) + + for ratings_service_identifier in ratings_service_identifiers: + + sort_choices.append( ( 'rating_descend', ratings_service_identifier ) ) + sort_choices.append( ( 'rating_ascend', ratings_service_identifier ) ) + + + for ( sort_by_type, sort_by_data ) in sort_choices: + + if sort_by_type == 'system': string = CC.sort_string_lookup[ sort_by_data ] + elif sort_by_type == 'namespaces': string = '-'.join( sort_by_data ) + elif sort_by_type == 'rating_descend': string = sort_by_data.GetName() + ' rating highest first' + elif sort_by_type == 'rating_ascend': string = sort_by_data.GetName() + ' rating lowest first' + + self.Append( 'sort by ' + string, ( sort_by_type, sort_by_data ) ) + + + try: self.SetSelection( options[ 'default_sort' ] ) + except: pass + + self.Bind( wx.EVT_CHOICE, self.EventChoice ) + + HC.pubsub.sub( self, 'ACollectHappened', 'collect_media' ) + + + def _BroadcastSort( self ): + + selection = self.GetSelection() + + if selection != wx.NOT_FOUND: + + sort_by = self.GetClientData( selection ) + + HC.pubsub.pub( 'sort_media', self._page_key, sort_by ) + + + + def ACollectHappened( self, page_key, collect_by ): + + if page_key == self._page_key: self._BroadcastSort() + + + def EventChoice( self, event ): + + if self._page_key is not None: self._BroadcastSort() + + +class FileDropTarget( wx.FileDropTarget ): + + def __init__( self, callable ): + + wx.FileDropTarget.__init__( self ) + + self._callable = callable + + + def OnDropFiles( self, x, y, paths ): wx.CallAfter( self._callable, paths ) + +class Frame( wx.Frame ): + + def __init__( self, *args, **kwargs ): + + wx.Frame.__init__( self, *args, **kwargs ) + + self._options = wx.GetApp().Read( 'options' ) + + #self.SetDoubleBuffered( True ) + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + +class Gauge( wx.Gauge ): + + def __init__( self, *args, **kwargs ): + + wx.Gauge.__init__( self, *args, **kwargs ) + + self._actual_max = None + + + def SetRange( self, max ): + + if max > 1000: + + self._actual_max = max + wx.Gauge.SetRange( self, 1000 ) + + else: + + self._actual_max = None + wx.Gauge.SetRange( self, max ) + + + + def SetValue( self, value ): + + if self._actual_max is None: wx.Gauge.SetValue( self, value ) + else: wx.Gauge.SetValue( self, min( int( 1000 * ( float( value ) / self._actual_max ) ), 1000 ) ) + + +class ListBook( wx.Panel ): + + def __init__( self, *args, **kwargs ): + + wx.Panel.__init__( self, *args, **kwargs ) + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._list_box = wx.ListBox( self, style = wx.LB_SINGLE | wx.LB_SORT ) + + self._empty_panel = wx.Panel( self ) + + self._empty_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._current_name = None + + self._current_panel = self._empty_panel + + self._panel_sizer = wx.BoxSizer( wx.VERTICAL ) + + self._panel_sizer.AddF( self._empty_panel, FLAGS_EXPAND_BOTH_WAYS ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._list_box, FLAGS_EXPAND_PERPENDICULAR ) + hbox.AddF( self._panel_sizer, FLAGS_EXPAND_BOTH_WAYS ) + + self._list_box.Bind( wx.EVT_LISTBOX, self.EventSelection ) + + self.SetSizer( hbox ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + + def _RecalcListBoxWidth( self ): self.Layout() + + def _Select( self, selection ): + + with wx.FrozenWindow( self ): + + if selection == wx.NOT_FOUND: self._current_name = None + else: self._current_name = self._list_box.GetString( selection ) + + self._current_panel.Hide() + + self._list_box.SetSelection( selection ) + + if selection == wx.NOT_FOUND: self._current_panel = self._empty_panel + else: + + panel_info = self._list_box.GetClientData( selection ) + + if type( panel_info ) == tuple: + + ( classname, args, kwargs ) = panel_info + + page = classname( *args, **kwargs ) + + page.Hide() + + self._panel_sizer.AddF( page, FLAGS_EXPAND_BOTH_WAYS ) + + self._list_box.SetClientData( selection, page ) + + self._RecalcListBoxWidth() + + + self._current_panel = self._list_box.GetClientData( selection ) + + + + self._current_panel.Show() + + self.Layout() + + event = wx.NotifyEvent( wx.wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGED, -1 ) + + self.ProcessEvent( event ) + + + def AddPage( self, page, name, select = False ): + + if type( page ) != tuple: + + page.Hide() + + self._panel_sizer.AddF( page, FLAGS_EXPAND_BOTH_WAYS ) + + + self._list_box.Append( name, page ) + + self._RecalcListBoxWidth() + + if self._list_box.GetCount() == 1: self._Select( 0 ) + elif select: self._Select( self._list_box.FindString( name ) ) + + + def DeleteAllPages( self ): + + self._panel_sizer.Detach( self._empty_panel ) + + self._panel_sizer.Clear( deleteWindows = True ) + + self._panel_sizer.AddF( self._empty_panel, FLAGS_EXPAND_BOTH_WAYS ) + + self._current_name = None + + self._current_panel = self._empty_panel + + self._list_box.Clear() + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'select_down': self.SelectDown() + elif command == 'select_up': self.SelectUp() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + wx.MessageBox( traceback.format_exc() ) + + + + + def EventSelection( self, event ): + + if self._list_box.GetSelection() != self._list_box.FindString( self._current_name ): + + event = wx.NotifyEvent( wx.wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGING, -1 ) + + self.GetEventHandler().ProcessEvent( event ) + + if event.IsAllowed(): self._Select( self._list_box.GetSelection() ) + else: self._list_box.SetSelection( self._list_box.FindString( self._current_name ) ) + + + + def GetCurrentName( self ): return self._current_name + + def GetCurrentPage( self ): + + if self._current_panel == self._empty_panel: return None + else: return self._current_panel + + + def GetNameToPageDict( self ): return { self._list_box.GetString( i ) : self._list_box.GetClientData( i ) for i in range( self._list_box.GetCount() ) if type( self._list_box.GetClientData( i ) ) != tuple } + + def NameExists( self, name, panel = None ): return self._list_box.FindString( name ) != wx.NOT_FOUND + + def DeleteCurrentPage( self ): + + selection = self._list_box.GetSelection() + + if selection != wx.NOT_FOUND: + + next_selection = selection + 1 + previous_selection = selection - 1 + + if next_selection < self._list_box.GetCount(): self._Select( next_selection ) + elif previous_selection >= 0: self._Select( previous_selection ) + else: self._Select( wx.NOT_FOUND ) + + panel_info = self._list_box.GetClientData( selection ) + + if type( panel_info ) != tuple: self._panel_sizer.Remove( panel_info ) + + self._list_box.Delete( selection ) + + self._RecalcListBoxWidth() + + + + def RenamePage( self, name, new_name ): + + if self._list_box.FindString( new_name ) != wx.NOT_FOUND: raise Exception( 'That name is already in use!' ) + + if self._current_name == name: self._current_name = new_name + + self._list_box.SetString( self._list_box.FindString( name ), new_name ) + + self._RecalcListBoxWidth() + + + def Select( self, name ): + + selection = self._list_box.FindString( name ) + + if selection != wx.NOT_FOUND and selection != self._list_box.GetSelection(): + + event = wx.NotifyEvent( wx.wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGING, -1 ) + + self.GetEventHandler().ProcessEvent( event ) + + if event.IsAllowed(): self._Select( selection ) + + + + def SelectDown( self ): + + current_selection = self._list_box.FindString( self._current_name ) + + if current_selection != wx.NOT_FOUND: + + num_entries = self._list_box.GetCount() + + if current_selection == num_entries - 1: selection = 0 + else: selection = current_selection + 1 + + if selection != current_selection: self._Select( selection ) + + + + def SelectPage( self, page ): + + for i in range( self._list_box.GetCount() ): + + if self._list_box.GetClientData( i ) == page: + + self._Select( i ) + + return + + + + + def SelectUp( self ): + + current_selection = self._list_box.FindString( self._current_name ) + + if current_selection != wx.NOT_FOUND: + + num_entries = self._list_box.GetCount() + + if current_selection == 0: selection = num_entries - 1 + else: selection = current_selection - 1 + + if selection != current_selection: self._Select( selection ) + + + +class ListCtrlAutoWidth( wx.ListCtrl, ListCtrlAutoWidthMixin ): + + def __init__( self, parent, height ): + + wx.ListCtrl.__init__( self, parent, size=( -1, height ), style=wx.LC_REPORT ) + ListCtrlAutoWidthMixin.__init__( self ) + + + def GetAllSelected( self ): + + indices = [] + + i = self.GetFirstSelected() + + while i != -1: + + indices.append( i ) + + i = self.GetNextSelected( i ) + + + return indices + + + def RemoveAllSelected( self ): + + indices = self.GetAllSelected() + + indices.reverse() # so we don't screw with the indices of deletees below + + for index in indices: self.DeleteItem( index ) + + +class OnOffButton( wx.Button ): + + def __init__( self, parent, page_key, topic, on_label, off_label = None, start_on = True ): + + if start_on: label = on_label + else: label = off_label + + wx.Button.__init__( self, parent, label = label ) + + self._page_key = page_key + self._topic = topic + self._on_label = on_label + + if off_label is None: self._off_label = on_label + else: self._off_label = off_label + + self._on = start_on + + if self._on: self.SetForegroundColour( ( 0, 128, 0 ) ) + else: self.SetForegroundColour( ( 128, 0, 0 ) ) + + self.Bind( wx.EVT_BUTTON, self.EventButton ) + + HC.pubsub.sub( self, 'HitButton', 'hit_on_off_button' ) + + + def EventButton( self, event ): + + if self._on: + + self._on = False + + self.SetLabel( self._off_label ) + + self.SetForegroundColour( ( 128, 0, 0 ) ) + + HC.pubsub.pub( self._topic, self._page_key, False ) + + else: + + self._on = True + + self.SetLabel( self._on_label ) + + self.SetForegroundColour( ( 0, 128, 0 ) ) + + HC.pubsub.pub( self._topic, self._page_key, True ) + + + + def IsOn( self ): return self._on + +class NoneableSpinCtrl( wx.Panel ): + + def __init__( self, parent, message, value, none_phrase = 'no limit', max = 1000000, multiplier = 1, num_dimensions = 1 ): + + wx.Panel.__init__( self, parent ) + + self._num_dimensions = num_dimensions + self._multiplier = multiplier + + self._checkbox = wx.CheckBox( self, label = none_phrase ) + self._checkbox.Bind( wx.EVT_CHECKBOX, self.EventCheckBox ) + + if value is None: + + self._one = wx.SpinCtrl( self, initial = 0, max = max, size = ( 80, -1 ) ) + self._one.Disable() + + if num_dimensions == 2: + + self._two = wx.SpinCtrl( self, initial = 0, max = max, size = ( 80, -1 ) ) + self._two.Disable() + + + self._checkbox.SetValue( True ) + + else: + + if num_dimensions == 2: + + ( value, value_2 ) = value + + self._two = wx.SpinCtrl( self, initial = value_2 / multiplier, max = max, size = ( 80, -1 ) ) + + + self._one = wx.SpinCtrl( self, initial = value / multiplier, max = max, size = ( 80, -1 ) ) + + self._checkbox.SetValue( False ) + + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + hbox.AddF( wx.StaticText( self, label=message + ': ' ), FLAGS_MIXED ) + hbox.AddF( self._one, FLAGS_MIXED ) + + if self._num_dimensions == 2: + + hbox.AddF( wx.StaticText( self, label = 'x' ), FLAGS_MIXED ) + hbox.AddF( self._two, FLAGS_MIXED ) + + + hbox.AddF( self._checkbox, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + + def EventCheckBox( self, event ): + + if self._checkbox.GetValue(): + + self._one.Disable() + if self._num_dimensions == 2: self._two.Disable() + + else: + + self._one.Enable() + if self._num_dimensions == 2: self._two.Enable() + + + + def GetValue( self ): + + if self._checkbox.GetValue(): return None + else: + + if self._num_dimensions == 2: return ( self._one.GetValue() * self._multiplier, self._two.GetValue() * self._multiplier ) + else: return self._one.GetValue() * self._multiplier + + + + def SetValue( self, value ): + + if value is None: + + self._checkbox.SetValue( True ) + + self._one.Disable() + if self._num_dimensions == 2: self._two.Disable() + + else: + + self._checkbox.SetValue( False ) + + self._one.Enable() + if self._num_dimensions == 2: self._two.Enable() + + if self._num_dimensions == 2: + + ( value, y ) = value + + self._two.SetValue( y / self._multiplier ) + + + self._one.SetValue( value / self._multiplier ) + + + +class ListBox( wx.ScrolledWindow ): + + def __init__( self, parent, min_height = 250 ): + + wx.ScrolledWindow.__init__( self, parent, style = wx.VSCROLL | wx.SIMPLE_BORDER ) + + self._ordered_strings = [] + self._strings_to_terms = {} + + self._options = wx.GetApp().Read( 'options' ) + + self._canvas_bmp = wx.EmptyBitmap( 0, 0, 24 ) + + self._current_selected_index = None + + dc = self._GetScrolledDC() + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( text_x, self._text_y ) = dc.GetTextExtent( 'abcdefghijklmnopqrstuvwxyz' ) + + self._num_rows_per_page = 0 + + self.SetScrollRate( 0, self._text_y ) + + self.SetMinSize( ( 50, min_height ) ) + + self.Bind( wx.EVT_PAINT, self.EventPaint ) + self.Bind( wx.EVT_SIZE, self.EventResize ) + + self.Bind( wx.EVT_LEFT_DOWN, self.EventMouseSelect ) + self.Bind( wx.EVT_LEFT_DCLICK, self.EventDClick ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + + def __len__( self ): return len( self._ordered_strings ) + + def _Activate( self, tag ): pass + + def _DrawTexts( self ): + + ( my_width, my_height ) = self.GetClientSize() + + dc = self._GetScrolledDC() + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + i = 0 + + dc.SetBackground( wx.Brush( wx.Colour( 255, 255, 255 ) ) ) + + dc.Clear() + + for ( i, text ) in enumerate( self._ordered_strings ): + + ( r, g, b ) = self._GetTextColour( text ) + + text_colour = wx.Colour( r, g, b ) + + if self._current_selected_index is not None and i == self._current_selected_index: + + dc.SetBrush( wx.Brush( text_colour ) ) + + dc.SetPen( wx.TRANSPARENT_PEN ) + + dc.DrawRectangle( 0, i * self._text_y, my_width, self._text_y ) + + text_colour = wx.WHITE + + + dc.SetTextForeground( text_colour ) + + ( x, y ) = ( 3, i * self._text_y ) + + dc.DrawText( text, x, y ) + + + + def _GetIndexUnderMouse( self, mouse_event ): + + ( xUnit, yUnit ) = self.GetScrollPixelsPerUnit() + + ( x_scroll, y_scroll ) = self.GetViewStart() + + y_offset = y_scroll * yUnit + + y = mouse_event.GetY() + y_offset + + row_index = ( y / self._text_y ) + + if row_index >= len( self._ordered_strings ): return None + + return row_index + + + def _GetScrolledDC( self ): + + cdc = wx.ClientDC( self ) + + self.DoPrepareDC( cdc ) # because this is a scrolled window + + return wx.BufferedDC( cdc, self._canvas_bmp ) + + + def _GetTextColour( self, text ): return ( 0, 111, 250 ) + + def _Select( self, index ): + + if index is not None: + + if index == -1 or index > len( self._ordered_strings ): index = len( self._ordered_strings ) - 1 + elif index == len( self._ordered_strings ) or index < -1: index = 0 + + + self._current_selected_index = index + + self._DrawTexts() + + if self._current_selected_index is not None: + + # scroll to index, if needed + + y = self._text_y * self._current_selected_index + + ( start_x, start_y ) = self.GetViewStart() + + ( x_unit, y_unit ) = self.GetScrollPixelsPerUnit() + + ( width, height ) = self.GetClientSize() + + if y < start_y * y_unit: + + y_to_scroll_to = y / y_unit + + self.Scroll( -1, y_to_scroll_to ) + + wx.PostEvent( self, wx.ScrollWinEvent( wx.wxEVT_SCROLLWIN_THUMBRELEASE ) ) + + elif y > ( start_y * y_unit ) + height: + + y_to_scroll_to = ( y - height ) / y_unit + + self.Scroll( -1, y_to_scroll_to + 3 ) + + wx.PostEvent( self, wx.ScrollWinEvent( wx.wxEVT_SCROLLWIN_THUMBRELEASE ) ) + + + + + def _TextsHaveChanged( self ): + + self._current_selected_index = None + + total_height = self._text_y * len( self._ordered_strings ) + + ( my_x, my_y ) = self._canvas_bmp.GetSize() + + if my_y != total_height: wx.PostEvent( self, wx.SizeEvent() ) + else: self._DrawTexts() + + + def EventDClick( self, event ): + + index = self._GetIndexUnderMouse( event ) + + if index is not None and index == self._current_selected_index: self._Activate( self._strings_to_terms[ self._ordered_strings[ self._current_selected_index ] ] ) + + + def EventKeyDown( self, event ): + + key_code = event.GetKeyCode() + + if self._current_selected_index is not None: + + if key_code in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ): self._Activate( self._strings_to_terms[ self._ordered_strings[ self._current_selected_index ] ] ) + elif key_code in ( wx.WXK_UP, wx.WXK_NUMPAD_UP ): self._Select( self._current_selected_index - 1 ) + elif key_code in ( wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN ): self._Select( self._current_selected_index + 1 ) + elif key_code == wx.WXK_PAGEUP: self._Select( self._current_selected_index - self._num_rows_per_page ) + elif key_code == wx.WXK_PAGEDOWN: self._Select( self._current_selected_index + self._num_rows_per_page ) + else: event.Skip() + + else: event.Skip() + + + def EventMouseSelect( self, event ): + + index = self._GetIndexUnderMouse( event ) + + self._Select( index ) + + event.Skip() + + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp, wx.BUFFER_VIRTUAL_AREA ) + + def EventResize( self, event ): + + ( client_x, client_y ) = self.GetClientSize() + + ( my_x, my_y ) = self._canvas_bmp.GetSize() + + self._num_rows_per_page = client_y / self._text_y + + total_height = self._text_y * len( self._ordered_strings ) + + if my_x != client_x or my_y != total_height: + + new_y = max( client_y, total_height ) + + self.SetVirtualSize( ( client_x, new_y ) ) + + self._canvas_bmp = wx.EmptyBitmap( client_x, new_y, 24 ) + + self._DrawTexts() + + + + def SetTexts( self, ordered_strings ): + + if ordered_strings != self._ordered_strings: + + self._ordered_strings = ordered_strings + self._strings_to_terms = { s : s for s in ordered_strings } + + self._TextsHaveChanged() + + if len( ordered_strings ) > 0: self._Select( 0 ) + + + +class ListBoxMessages( ListBox ): + + def _GetTextColour( self, predicate_string ): + + if predicate_string.startswith( 'system:' ): ( r, g, b ) = ( 153, 101, 21 ) + else: ( r, g, b ) = ( 0, 111, 250 ) + + return ( r, g, b ) + + +class ListBoxMessagesActiveOnly( ListBoxMessages ): + + def __init__( self, parent, callable ): + + ListBoxMessages.__init__( self, parent ) + + self._callable = callable + + self._matches = {} + + + def _Activate( self, tag ): self._callable( tag ) + + def SetTerms( self, matches ): + + if matches != self._matches: + + self._matches = matches + + self._ordered_strings = [] + self._strings_to_terms = {} + + for ( term, count ) in matches: + + if count is None: term_string = term + else: term_string = term + ' (' + HC.ConvertIntToPrettyString( count ) + ')' + + self._ordered_strings.append( term_string ) + self._strings_to_terms[ term_string ] = term + + + self._TextsHaveChanged() + + if len( matches ) > 0: self._Select( 0 ) + + + +class ListBoxMessagesPredicates( ListBoxMessages ): + + def __init__( self, parent, page_key, initial_predicates = [] ): + + ListBoxMessages.__init__( self, parent ) + + self._page_key = page_key + + if len( initial_predicates ) > 0: + + for predicate in initial_predicates: + + self._ordered_strings.append( predicate ) + self._strings_to_terms[ predicate ] = predicate + + + self._TextsHaveChanged() + + + + def _Activate( self, term ): HC.pubsub.pub( 'remove_predicate', self._page_key, term ) + + def ActivatePredicate( self, term ): + + if term in self._ordered_strings: + + self._ordered_strings.remove( term ) + del self._strings_to_terms[ term ] + + else: + + if term == 'system:inbox' and 'system:archive' in self._ordered_strings: self._ordered_strings.remove( 'system:archive' ) + elif term == 'system:archive' and 'system:inbox' in self._ordered_strings: self._ordered_strings.remove( 'system:inbox' ) + + self._ordered_strings.append( term ) + self._strings_to_terms[ term ] = term + + self._ordered_strings.sort() + + + self._TextsHaveChanged() + + + def AddPredicate( self, predicate ): + + self._ordered_strings.append( predicate ) + self._strings_to_terms[ predicate ] = predicate + + self._ordered_strings.sort() + + self._TextsHaveChanged() + + + def GetPredicates( self ): return self._ordered_strings + + def HasPredicate( self, predicate ): return predicate in self._ordered_strings + + def RemovePredicate( self, predicate ): + + self._ordered_strings.remove( predicate ) + del self._strings_to_terms[ predicate ] + + self._TextsHaveChanged() + + +class SaneListCtrl( wx.ListCtrl, ListCtrlAutoWidthMixin, ColumnSorterMixin ): + + def __init__( self, parent, height, columns ): + + num_columns = len( columns ) + + wx.ListCtrl.__init__( self, parent, size=( -1, height ), style=wx.LC_REPORT ) + ListCtrlAutoWidthMixin.__init__( self ) + ColumnSorterMixin.__init__( self, num_columns ) + + self.GetTopLevelParent().SetDoubleBuffered( False ) # windows double buffer makes listctrls refresh and bug out + + self.itemDataMap = {} + self._next_data_index = 0 + + resize_column = 1 + + for ( i, ( name, width ) ) in enumerate( columns ): + + self.InsertColumn( i, name, width = width ) + + if width == -1: resize_column = i + 1 + + + self.setResizeColumn( resize_column ) + + + def Append( self, display_tuple, data_tuple ): + + index = wx.ListCtrl.Append( self, display_tuple ) + + self.SetItemData( index, self._next_data_index ) + + self.itemDataMap[ self._next_data_index ] = list( data_tuple ) + + self._next_data_index += 1 + + + def GetAllSelected( self ): + + indices = [] + + i = self.GetFirstSelected() + + while i != -1: + + indices.append( i ) + + i = self.GetNextSelected( i ) + + + return indices + + + def GetClientData( self, index = None ): + + if index is None: + + data_indicies = [ self.GetItemData( index ) for index in range( self.GetItemCount() ) ] + + datas = [ self.itemDataMap[ data_index ] for data_index in data_indicies ] + + return datas + + else: + + data_index = self.GetItemData( index ) + + return self.itemDataMap[ data_index ] + + + + def GetListCtrl( self ): return self + + def RemoveAllSelected( self ): + + indices = self.GetAllSelected() + + indices.reverse() # so we don't screw with the indices of deletees below + + for index in indices: self.DeleteItem( index ) + + + def UpdateValue( self, index, column, display_value, data_value ): + + self.SetStringItem( index, column, display_value ) + + data_index = self.GetItemData( index ) + + self.itemDataMap[ data_index ][ column ] = data_value + + + def UpdateRow( self, index, display_tuple, data_tuple ): + + column = 0 + + for value in display_tuple: + + self.SetStringItem( index, column, value ) + + column += 1 + + + data_index = self.GetItemData( index ) + + self.itemDataMap[ data_index ] = data_tuple + + +class Shortcut( wx.TextCtrl ): + + def __init__( self, parent, modifier = wx.ACCEL_NORMAL, key = wx.WXK_F7 ): + + self._modifier = modifier + self._key = key + + wx.TextCtrl.__init__( self, parent, style = wx.TE_PROCESS_ENTER ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self._SetShortcutString() + + + def _SetShortcutString( self ): + + display_string = '' + + if self._modifier == wx.ACCEL_ALT: display_string += 'alt + ' + elif self._modifier == wx.ACCEL_CTRL: display_string += 'ctrl + ' + elif self._modifier == wx.ACCEL_SHIFT: display_string += 'shift + ' + + if self._key in range( 65, 91 ): display_string += chr( self._key + 32 ) # + 32 for converting ascii A -> a + elif self._key in range( 97, 123 ): display_string += chr( self._key ) + else: display_string += HC.wxk_code_string_lookup[ self._key ] + + wx.TextCtrl.SetValue( self, display_string ) + + + def EventKeyDown( self, event ): + + if event.KeyCode in range( 65, 91 ) or event.KeyCode in HC.wxk_code_string_lookup.keys(): + + modifier = wx.ACCEL_NORMAL + + if event.AltDown(): modifier = wx.ACCEL_ALT + elif event.ControlDown(): modifier = wx.ACCEL_CTRL + elif event.ShiftDown(): modifier = wx.ACCEL_SHIFT + + ( self._modifier, self._key ) = HC.GetShortcutFromEvent( event ) + + self._SetShortcutString() + + + def GetValue( self ): return ( self._modifier, self._key ) + + def SetValue( self, modifier, key ): + + ( self._modifier, self._key ) = ( modifier, key ) + + self._SetShortcutString() + + +class TagsBox( ListBox ): + + def _GetNamespaceColours( self ): return self._options[ 'namespace_colours' ] + + def _GetTextColour( self, tag_string ): + + namespace_colours = self._GetNamespaceColours() + + if ':' in tag_string: + + ( namespace, sub_tag ) = tag_string.split( ':', 1 ) + + if namespace.startswith( '-' ): namespace = namespace[1:] + if namespace.startswith( '(+) ' ): namespace = namespace[4:] + if namespace.startswith( '(-) ' ): namespace = namespace[4:] + if namespace.startswith( '(X) ' ): namespace = namespace[4:] + + if namespace in namespace_colours: ( r, g, b ) = namespace_colours[ namespace ] + else: ( r, g, b ) = namespace_colours[ None ] + + else: ( r, g, b ) = namespace_colours[ '' ] + + return ( r, g, b ) + + +class TagsBoxActiveOnly( TagsBox ): + + def __init__( self, parent, callable ): + + TagsBox.__init__( self, parent ) + + self._callable = callable + + self._matches = {} + + + def _Activate( self, tag ): self._callable( tag ) + + def SetTags( self, matches ): + + if matches != self._matches: + + self._matches = matches + + self._ordered_strings = [] + self._strings_to_terms = {} + + for ( tag, count ) in matches: + + if count is None: tag_string = tag + else: tag_string = tag + ' (' + HC.ConvertIntToPrettyString( count ) + ')' + + self._ordered_strings.append( tag_string ) + self._strings_to_terms[ tag_string ] = tag + + + self._TextsHaveChanged() + + if len( matches ) > 0: self._Select( 0 ) + + + +class TagsBoxCPP( TagsBox ): + + def __init__( self, parent, page_key ): + + TagsBox.__init__( self, parent, min_height = 200 ) + + self._sort = self._options[ 'default_tag_sort' ] + + self._page_key = page_key + + self._tag_service_identifier = CC.NULL_SERVICE_IDENTIFIER + self._last_media = None + + self._current_tags_to_count = {} + self._pending_tags_to_count = {} + self._petitioned_tags_to_count = {} + + HC.pubsub.sub( self, 'SetTagsByMedia', 'new_tags_selection' ) + HC.pubsub.sub( self, 'ChangeTagRepository', 'change_tag_repository' ) + + + def _Activate( self, tag ): HC.pubsub.pub( 'add_predicate', self._page_key, tag ) + + def _SortTags( self ): + + if self._sort == CC.SORT_BY_LEXICOGRAPHIC_ASC: compare_function = lambda a, b: cmp( a, b ) + elif self._sort == CC.SORT_BY_LEXICOGRAPHIC_DESC: compare_function = lambda a, b: cmp( b, a ) + elif self._sort in ( CC.SORT_BY_INCIDENCE_ASC, CC.SORT_BY_INCIDENCE_DESC ): + + tags_to_count = collections.defaultdict( lambda: 0 ) + + tags_to_count.update( self._current_tags_to_count ) + + if self._sort == CC.SORT_BY_INCIDENCE_ASC: compare_function = lambda a, b: cmp( tags_to_count[ self._strings_to_terms[ a ] ], tags_to_count[ self._strings_to_terms[ b ] ] ) + elif self._sort == CC.SORT_BY_INCIDENCE_DESC: compare_function = lambda a, b: cmp( tags_to_count[ self._strings_to_terms[ b ] ], tags_to_count[ self._strings_to_terms[ a ] ] ) + + + self._ordered_strings.sort( compare_function ) + + self._TextsHaveChanged() + + + def ChangeTagRepository( self, page_key, service_identifier ): + + if page_key == self._page_key: + + self._tag_service_identifier = service_identifier + + if self._last_media is not None: self.SetTagsByMedia( self._page_key, self._last_media ) + + + + def SetSort( self, sort ): + + self._sort = sort + + self._SortTags() + + + def SetTags( self, current_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ): + + if current_tags_to_count != self._current_tags_to_count or pending_tags_to_count != self._pending_tags_to_count or petitioned_tags_to_count != self._petitioned_tags_to_count: + + self._current_tags_to_count = current_tags_to_count + self._pending_tags_to_count = pending_tags_to_count + self._petitioned_tags_to_count = petitioned_tags_to_count + + all_tags = { tag for tag in self._current_tags_to_count.keys() + self._pending_tags_to_count.keys() + self._petitioned_tags_to_count.keys() } + + self._ordered_strings = [] + self._strings_to_terms = {} + + for tag in all_tags: + + tag_string = tag + + if tag in self._current_tags_to_count: tag_string += ' (' + HC.ConvertIntToPrettyString( self._current_tags_to_count[ tag ] ) + ')' + if tag in self._pending_tags_to_count: tag_string += ' (+' + HC.ConvertIntToPrettyString( self._pending_tags_to_count[ tag ] ) + ')' + if tag in self._petitioned_tags_to_count: tag_string += ' (-' + HC.ConvertIntToPrettyString( self._petitioned_tags_to_count[ tag ] ) + ')' + + self._ordered_strings.append( tag_string ) + self._strings_to_terms[ tag_string ] = tag + + + self._SortTags() + + + + def SetTagsByMedia( self, page_key, media ): + + if page_key == self._page_key: + + self._last_media = media + + ( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = CC.GetMediasTagCount( media, self._tag_service_identifier ) + + self.SetTags( current_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) + + + +class TagsBoxCPPWithSorter( wx.Panel ): + + def __init__( self, parent, page_key ): + + wx.Panel.__init__( self, parent ) + + self._options = wx.GetApp().Read( 'options' ) + + self._sorter = wx.Choice( self ) + + self._sorter.Append( 'lexicographic (a-z)', CC.SORT_BY_LEXICOGRAPHIC_ASC ) + self._sorter.Append( 'lexicographic (z-a)', CC.SORT_BY_LEXICOGRAPHIC_DESC ) + self._sorter.Append( 'incidence (desc)', CC.SORT_BY_INCIDENCE_DESC ) + self._sorter.Append( 'incidence (asc)', CC.SORT_BY_INCIDENCE_ASC ) + + if self._options[ 'default_tag_sort' ] == CC.SORT_BY_LEXICOGRAPHIC_ASC: self._sorter.Select( 0 ) + elif self._options[ 'default_tag_sort' ] == CC.SORT_BY_LEXICOGRAPHIC_DESC: self._sorter.Select( 1 ) + elif self._options[ 'default_tag_sort' ] == CC.SORT_BY_INCIDENCE_DESC: self._sorter.Select( 2 ) + elif self._options[ 'default_tag_sort' ] == CC.SORT_BY_INCIDENCE_ASC: self._sorter.Select( 3 ) + + self._sorter.Bind( wx.EVT_CHOICE, self.EventSort ) + + self._tags_box = TagsBoxCPP( self, page_key ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._sorter, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._tags_box, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + + def EventSort( self, event ): + + selection = self._sorter.GetSelection() + + if selection != wx.NOT_FOUND: + + sort = self._sorter.GetClientData( selection ) + + self._tags_box.SetSort( sort ) + + + +class TagsBoxFlat( TagsBox ): + + def __init__( self, parent, removed_callable ): + + TagsBox.__init__( self, parent ) + + self._removed_callable = removed_callable + + + def _RecalcTags( self ): + + self._ordered_strings = self._strings_to_terms.values() + + self._ordered_strings.sort() + + self._TextsHaveChanged() + + + def _Activate( self, tag ): + + del self._strings_to_terms[ tag ] + + self._RecalcTags() + + self._removed_callable() + + + def AddTag( self, tag ): + + self._strings_to_terms[ tag ] = tag + + self._RecalcTags() + + + def GetTags( self ): return self._strings_to_terms.values() + + def SetTags( self, tags ): + + self._strings_to_terms = { t : t for t in tags } + + self._RecalcTags() + + +class TagsBoxManage( TagsBox ): + + def __init__( self, parent, callable, current_tags, deleted_tags, pending_tags, petitioned_tags ): + + TagsBox.__init__( self, parent ) + + self._callable = callable + + self._show_deleted_tags = False + + self._current_tags = set( current_tags ) + self._deleted_tags = set( deleted_tags ) + self._pending_tags = set( pending_tags ) + self._petitioned_tags = set( petitioned_tags ) + + self._RebuildTagStrings() + + + def _Activate( self, tag ): self._callable( tag ) + + def _RebuildTagStrings( self ): + + if self._show_deleted_tags: all_tags = self._current_tags | self._deleted_tags | self._pending_tags | self._petitioned_tags + else: all_tags = self._current_tags | self._pending_tags | self._petitioned_tags + + self._ordered_strings = [] + self._strings_to_terms = {} + + for tag in all_tags: + + if tag in self._petitioned_tags: tag_string = '(-) ' + tag + elif tag in self._current_tags: tag_string = tag + elif tag in self._pending_tags: tag_string = '(+) ' + tag + else: tag_string = '(X) ' + tag + + self._ordered_strings.append( tag_string ) + self._strings_to_terms[ tag_string ] = tag + + + self._ordered_strings.sort() + + self._TextsHaveChanged() + + + def PetitionTag( self, tag ): + + self._petitioned_tags.add( tag ) + + self._RebuildTagStrings() + + + def PendTag( self, tag ): + + self._pending_tags.add( tag ) + + self._RebuildTagStrings() + + + def RescindPetition( self, tag ): + + self._petitioned_tags.discard( tag ) + + self._RebuildTagStrings() + + + def RescindPend( self, tag ): + + self._pending_tags.discard( tag ) + + self._RebuildTagStrings() + + + def SetShowDeletedTags( self, value ): + + self._show_deleted_tags = value + + self._RebuildTagStrings() + + +class TagsBoxOptions( TagsBox ): + + def __init__( self, parent, initial_namespace_colours ): + + TagsBox.__init__( self, parent ) + + self._namespace_colours = dict( initial_namespace_colours ) + + for namespace in self._namespace_colours: + + if namespace is None: namespace_string = 'default namespace:tag' + elif namespace == '': namespace_string = 'unnamespaced tag' + else: namespace_string = namespace + ':tag' + + self._ordered_strings.append( namespace_string ) + self._strings_to_terms[ namespace_string ] = namespace + + + self._TextsHaveChanged() + + + def _Activate( self, tag ): self.RemoveNamespace( tag ) + + def _GetNamespaceColours( self ): return self._namespace_colours + + def SetNamespaceColour( self, namespace, colour ): + + if namespace not in self._namespace_colours: + + namespace_string = namespace + ':tag' + + self._ordered_strings.append( namespace_string ) + self._strings_to_terms[ namespace_string ] = namespace + + self._ordered_strings.sort() + + + self._namespace_colours[ namespace ] = colour.Get() + + self._TextsHaveChanged() + + + def GetNamespaceColours( self ): return self._namespace_colours + + def GetSelectedNamespaceColour( self ): + + if self._current_selected_index is not None: + + namespace_string = self._ordered_strings[ self._current_selected_index ] + + namespace = self._strings_to_terms[ namespace_string ] + + ( r, g, b ) = self._namespace_colours[ namespace ] + + colour = wx.Colour( r, g, b ) + + return ( namespace, colour ) + + + return None + + + def RemoveNamespace( self, namespace ): + + if namespace is not None and namespace != '': + + namespace_string = namespace + ':tag' + + self._ordered_strings.remove( namespace_string ) + + del self._strings_to_terms[ namespace_string ] + + del self._namespace_colours[ namespace ] + + self._TextsHaveChanged() + + + +class TagsBoxPredicates( TagsBox ): + + def __init__( self, parent, page_key, initial_predicates = [] ): + + TagsBox.__init__( self, parent, min_height = 100 ) + + self._page_key = page_key + + if len( initial_predicates ) > 0: + + for predicate in initial_predicates: + + self._ordered_strings.append( predicate ) + self._strings_to_terms[ predicate ] = predicate + + + self._TextsHaveChanged() + + + + def _Activate( self, tag ): HC.pubsub.pub( 'remove_predicate', self._page_key, tag ) + + def ActivatePredicate( self, tag ): + + if tag in self._ordered_strings: + + self._ordered_strings.remove( tag ) + del self._strings_to_terms[ tag ] + + else: + + if tag == 'system:inbox' and 'system:archive' in self._ordered_strings: self._ordered_strings.remove( 'system:archive' ) + elif tag == 'system:archive' and 'system:inbox' in self._ordered_strings: self._ordered_strings.remove( 'system:inbox' ) + elif tag == 'system:local' and 'system:not local' in self._ordered_strings: self._ordered_strings.remove( 'system:not local' ) + elif tag == 'system:not local' and 'system:local' in self._ordered_strings: self._ordered_strings.remove( 'system:local' ) + + self._ordered_strings.append( tag ) + self._strings_to_terms[ tag ] = tag + + self._ordered_strings.sort() + + + self._TextsHaveChanged() + + + def AddPredicate( self, predicate ): + + self._ordered_strings.append( predicate ) + self._strings_to_terms[ predicate ] = predicate + + self._ordered_strings.sort() + + self._TextsHaveChanged() + + + def GetPredicates( self ): return self._ordered_strings + + def HasPredicate( self, predicate ): return predicate in self._ordered_strings + + def RemovePredicate( self, predicate ): + + self._ordered_strings.remove( predicate ) + del self._strings_to_terms[ predicate ] + + self._TextsHaveChanged() + + \ No newline at end of file diff --git a/include/ClientGUIDialogs.py b/include/ClientGUIDialogs.py new file mode 100755 index 00000000..1057fefe --- /dev/null +++ b/include/ClientGUIDialogs.py @@ -0,0 +1,8088 @@ +import Crypto.PublicKey.RSA +import HydrusConstants as HC +import HydrusMessageHandling +import ClientConstants as CC +import ClientConstantsMessages +import ClientGUICommon +import collections +import os +import random +import re +import time +import traceback +import wx +import yaml + +# Option Enums + +ID_NULL = wx.NewId() + +ID_TIMER_UPDATE = wx.NewId() + +# Hue is generally 200, Sat and Lum changes based on need + +COLOUR_SELECTED = wx.Colour( 217, 242, 255 ) +COLOUR_SELECTED_DARK = wx.Colour( 1, 17, 26 ) +COLOUR_UNSELECTED = wx.Colour( 223, 227, 230 ) + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) +FLAGS_BIG_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 8 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_DEPTH_ONLY = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +def SelectServiceIdentifier( permission = None, service_types = HC.ALL_SERVICES, service_identifiers = None, unallowed = None ): + + if service_identifiers is None: + + services = wx.GetApp().Read( 'services', service_types ) + + if permission is not None: services = [ service for service in services if service.GetAccount().HasPermission( permission ) ] + + service_identifiers = [ service.GetServiceIdentifier() for service in services ] + + + if unallowed is not None: service_identifiers.difference_update( unallowed ) + + if len( service_identifiers ) == 0: return None + elif len( service_identifiers ) == 1: + + ( service_identifier, ) = service_identifiers + + return service_identifier + + else: + + names_to_service_identifiers = { service_identifier.GetName() : service_identifier for service_identifier in service_identifiers } + + with DialogSelectFromListOfStrings( wx.GetApp().GetGUI(), 'select service', [ service_identifier.GetName() for service_identifier in service_identifiers ] ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: return names_to_service_identifiers[ dlg.GetString() ] + else: return None + + + +def ShowMessage( parent, message ): + + with DialogMessage( parent, message ) as dlg: dlg.ShowModal() + +class Dialog( wx.Dialog ): + + def __init__( self, parent, title, style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER, position = 'topleft' ): + + self._options = wx.GetApp().Read( 'options' ) + + if position == 'topleft': + + ( pos_x, pos_y ) = wx.GetApp().GetGUI().GetPositionTuple() + + pos = ( pos_x + 50, pos_y + 100 ) + + else: pos = ( -1, -1 ) + + wx.Dialog.__init__( self, parent, title = title, style = style, pos = pos ) + + self.SetDoubleBuffered( True ) + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self.SetIcon( wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) ) + + if position == 'center': wx.CallAfter( self.Center ) + + +class DialogFinishFiltering( Dialog ): + + def __init__( self, parent, num_kept, num_deleted ): + + def InitialiseControls(): + + self._commit = wx.Button( self, label = 'commit' ) + self._commit.Bind( wx.EVT_BUTTON, self.EventCommit ) + self._commit.SetForegroundColour( ( 0, 128, 0 ) ) + + self._forget = wx.Button( self, label = 'forget' ) + self._forget.Bind( wx.EVT_BUTTON, self.EventForget ) + self._forget.SetForegroundColour( ( 128, 0, 0 ) ) + + self._back = wx.Button( self, id = wx.ID_CANCEL, label = 'back to filtering' ) + self._back.Bind( wx.EVT_BUTTON, self.EventBack ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._commit, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( self._forget, FLAGS_EXPAND_BOTH_WAYS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + label = 'Keep ' + HC.ConvertIntToPrettyString( num_kept ) + ' and delete ' + HC.ConvertIntToPrettyString( num_deleted ) + ' files?' + + vbox.AddF( wx.StaticText( self, label = label, style = wx.ALIGN_CENTER ), FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( wx.StaticText( self, label = '-or-', style = wx.ALIGN_CENTER ), FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._back, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'are you sure?', position = 'center' ) + + InitialiseControls() + + InitialisePanel() + + + def EventBack( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventCommit( self, event ): self.EndModal( wx.ID_YES ) + + def EventForget( self, event ): self.EndModal( wx.ID_NO ) + +class DialogFinishRatingFiltering( Dialog ): + + def __init__( self, parent, num_certain_ratings, num_uncertain_ratings ): + + def InitialiseControls(): + + self._commit = wx.Button( self, label = 'commit' ) + self._commit.Bind( wx.EVT_BUTTON, self.EventCommit ) + self._commit.SetForegroundColour( ( 0, 128, 0 ) ) + + self._forget = wx.Button( self, label = 'forget' ) + self._forget.Bind( wx.EVT_BUTTON, self.EventForget ) + self._forget.SetForegroundColour( ( 128, 0, 0 ) ) + + self._back = wx.Button( self, id = wx.ID_CANCEL, label = 'back to filtering' ) + self._back.Bind( wx.EVT_BUTTON, self.EventBack ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._commit, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( self._forget, FLAGS_EXPAND_BOTH_WAYS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + info_strings = [] + + if num_certain_ratings > 0: info_strings.append( HC.ConvertIntToPrettyString( num_certain_ratings ) + ' ratings' ) + if num_uncertain_ratings > 0: info_strings.append( HC.ConvertIntToPrettyString( num_uncertain_ratings ) + ' uncertain changes' ) + + label = 'Apply ' + ' and '.join( info_strings ) + '?' + + vbox.AddF( wx.StaticText( self, label = label, style = wx.ALIGN_CENTER ), FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( wx.StaticText( self, label = '-or-', style = wx.ALIGN_CENTER ), FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._back, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'are you sure?', position = 'center' ) + + InitialiseControls() + + InitialisePanel() + + + def EventBack( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventCommit( self, event ): self.EndModal( wx.ID_YES ) + + def EventForget( self, event ): self.EndModal( wx.ID_NO ) + +class DialogInputCustomFilterAction( Dialog ): + + def __init__( self, parent, modifier = wx.ACCEL_NORMAL, key = wx.WXK_F7, service_identifier = None, action = 'archive' ): + + self._service_identifier = service_identifier + self._action = action + + self._current_ratings_like_service = None + self._current_ratings_numerical_service = None + + def InitialiseControls(): + + service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.LOCAL_TAG, HC.TAG_REPOSITORY, HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ) + + self._shortcut = ClientGUICommon.Shortcut( self, modifier, key ) + + self._none_actions = wx.Choice( self, choices = [ 'manage_tags', 'manage_ratings', 'archive', 'delete', 'frame_back', 'frame_next', 'previous', 'next', 'first', 'last' ] ) + + self._ok_none = wx.Button( self, label = 'ok' ) + self._ok_none.Bind( wx.EVT_BUTTON, self.EventOKNone ) + self._ok_none.SetForegroundColour( ( 0, 128, 0 ) ) + + self._tag_service_identifiers = wx.Choice( self ) + self._tag_value = wx.TextCtrl( self, style = wx.TE_READONLY ) + self._tag_input = ClientGUICommon.AutoCompleteDropdownTagsWrite( self, self.SetTag, CC.LOCAL_FILE_SERVICE_IDENTIFIER, CC.NULL_SERVICE_IDENTIFIER ) + + self._ok_tag = wx.Button( self, label = 'ok' ) + self._ok_tag.Bind( wx.EVT_BUTTON, self.EventOKTag ) + self._ok_tag.SetForegroundColour( ( 0, 128, 0 ) ) + + self._ratings_like_service_identifiers = wx.Choice( self ) + self._ratings_like_service_identifiers.Bind( wx.EVT_CHOICE, self.EventRecalcActions ) + self._ratings_like_like = wx.RadioButton( self, style = wx.RB_GROUP, label = 'like' ) + self._ratings_like_dislike = wx.RadioButton( self, label = 'dislike' ) + self._ratings_like_remove = wx.RadioButton( self, label = 'remove rating' ) + + self._ok_ratings_like = wx.Button( self, label = 'ok' ) + self._ok_ratings_like.Bind( wx.EVT_BUTTON, self.EventOKRatingsLike ) + self._ok_ratings_like.SetForegroundColour( ( 0, 128, 0 ) ) + + self._ratings_numerical_service_identifiers = wx.Choice( self ) + self._ratings_numerical_service_identifiers.Bind( wx.EVT_CHOICE, self.EventRecalcActions ) + self._ratings_numerical_slider = wx.Slider( self, style = wx.SL_AUTOTICKS | wx.SL_LABELS ) + self._ratings_numerical_remove = wx.CheckBox( self, label = 'remove rating' ) + + self._ok_ratings_numerical = wx.Button( self, label = 'ok' ) + self._ok_ratings_numerical.Bind( wx.EVT_BUTTON, self.EventOKRatingsNumerical ) + self._ok_ratings_numerical.SetForegroundColour( ( 0, 128, 0 ) ) + + for service_identifier in service_identifiers: + + service_type = service_identifier.GetType() + + if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): choice = self._tag_service_identifiers + elif service_type == HC.LOCAL_RATING_LIKE: choice = self._ratings_like_service_identifiers + elif service_type == HC.LOCAL_RATING_NUMERICAL: choice = self._ratings_numerical_service_identifiers + + choice.Append( service_identifier.GetName(), service_identifier ) + + + self._SetActions() + + if self._service_identifier is None: + + self._none_actions.SetStringSelection( self._action ) + + else: + + service_name = self._service_identifier.GetName() + service_type = self._service_identifier.GetType() + + if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): + + self._tag_service_identifiers.SetStringSelection( service_name ) + + self._tag_value.SetValue( self._action ) + + elif service_type == HC.LOCAL_RATING_LIKE: + + self._ratings_like_service_identifiers.SetStringSelection( service_name ) + + self._SetActions() + + if self._action is None: self._ratings_like_remove.SetValue( True ) + elif self._action == True: self._ratings_like_like.SetValue( True ) + elif self._action == False: self._ratings_like_dislike.SetValue( True ) + + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + self._ratings_numerical_service_identifiers.SetStringSelection( service_name ) + + self._SetActions() + + if self._action is None: self._ratings_numerical_remove.SetValue( True ) + else: + + ( lower, upper ) = self._current_ratings_numerical_service.GetExtraInfo() + + slider_value = int( self._action * ( upper - lower ) ) + lower + + self._ratings_numerical_slider.SetValue( slider_value ) + + + + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetSize( ( 0, 0 ) ) + + + def InitialisePanel(): + + shortcut_vbox = wx.BoxSizer( wx.VERTICAL ) + + shortcut_vbox.AddF( wx.StaticText( self, label = '- shortcut -' ), FLAGS_EXPAND_PERPENDICULAR ) + shortcut_vbox.AddF( self._shortcut, FLAGS_EXPAND_PERPENDICULAR ) + + none_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + none_hbox.AddF( self._none_actions, FLAGS_EXPAND_DEPTH_ONLY ) + none_hbox.AddF( self._ok_none, FLAGS_MIXED ) + + none_vbox = wx.BoxSizer( wx.VERTICAL ) + + none_vbox.AddF( wx.StaticText( self, label = '- non-service actions -' ), FLAGS_EXPAND_PERPENDICULAR ) + none_vbox.AddF( none_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + tag_sub_vbox = wx.BoxSizer( wx.VERTICAL ) + + tag_sub_vbox.AddF( self._tag_value, FLAGS_EXPAND_BOTH_WAYS ) + tag_sub_vbox.AddF( self._tag_input, FLAGS_EXPAND_BOTH_WAYS ) + + tag_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + tag_hbox.AddF( self._tag_service_identifiers, FLAGS_EXPAND_DEPTH_ONLY ) + tag_hbox.AddF( tag_sub_vbox, FLAGS_EXPAND_BOTH_WAYS ) + tag_hbox.AddF( self._ok_tag, FLAGS_MIXED ) + + tag_vbox = wx.BoxSizer( wx.VERTICAL ) + + tag_vbox.AddF( wx.StaticText( self, label = '- tag service actions -' ), FLAGS_EXPAND_PERPENDICULAR ) + tag_vbox.AddF( tag_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + ratings_like_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + ratings_like_hbox.AddF( self._ratings_like_service_identifiers, FLAGS_EXPAND_DEPTH_ONLY ) + ratings_like_hbox.AddF( self._ratings_like_like, FLAGS_MIXED ) + ratings_like_hbox.AddF( self._ratings_like_dislike, FLAGS_MIXED ) + ratings_like_hbox.AddF( self._ratings_like_remove, FLAGS_MIXED ) + ratings_like_hbox.AddF( self._ok_ratings_like, FLAGS_MIXED ) + + ratings_like_vbox = wx.BoxSizer( wx.VERTICAL ) + + ratings_like_vbox.AddF( wx.StaticText( self, label = '- ratings like service actions -' ), FLAGS_EXPAND_PERPENDICULAR ) + ratings_like_vbox.AddF( ratings_like_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + ratings_numerical_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + ratings_numerical_hbox.AddF( self._ratings_numerical_service_identifiers, FLAGS_EXPAND_DEPTH_ONLY ) + ratings_numerical_hbox.AddF( self._ratings_numerical_slider, FLAGS_MIXED ) + ratings_numerical_hbox.AddF( self._ratings_numerical_remove, FLAGS_MIXED ) + ratings_numerical_hbox.AddF( self._ok_ratings_numerical, FLAGS_MIXED ) + + ratings_numerical_vbox = wx.BoxSizer( wx.VERTICAL ) + + ratings_numerical_vbox.AddF( wx.StaticText( self, label = '- ratings numerical service actions -' ), FLAGS_EXPAND_PERPENDICULAR ) + ratings_numerical_vbox.AddF( ratings_numerical_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( none_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( tag_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( ratings_like_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( ratings_numerical_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( shortcut_vbox, FLAGS_MIXED ) + hbox.AddF( vbox, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 680, y ) ) + + + Dialog.__init__( self, parent, 'input custom filter action' ) + + InitialiseControls() + + InitialisePanel() + + + def _SetActions( self ): + + if self._ratings_like_service_identifiers.GetCount() > 0: + + selection = self._ratings_like_service_identifiers.GetSelection() + + if selection != wx.NOT_FOUND: + + service_identifier = self._ratings_like_service_identifiers.GetClientData( selection ) + + service = wx.GetApp().Read( 'service', service_identifier ) + + self._current_ratings_like_service = service + + ( like, dislike ) = service.GetExtraInfo() + + self._ratings_like_like.SetLabel( like ) + self._ratings_like_dislike.SetLabel( dislike ) + + else: + + self._ratings_like_like.SetLabel( 'like' ) + self._ratings_like_dislike.SetLabel( 'dislike' ) + + + + if self._ratings_numerical_service_identifiers.GetCount() > 0: + + selection = self._ratings_numerical_service_identifiers.GetSelection() + + if selection != wx.NOT_FOUND: + + service_identifier = self._ratings_numerical_service_identifiers.GetClientData( selection ) + + service = wx.GetApp().Read( 'service', service_identifier ) + + self._current_ratings_numerical_service = service + + ( lower, upper ) = service.GetExtraInfo() + + self._ratings_numerical_slider.SetRange( lower, upper ) + + else: self._ratings_numerical_slider.SetRange( 0, 5 ) + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOKNone( self, event ): + + self._service_identifier = None + self._action = self._none_actions.GetStringSelection() + self._pretty_action = self._action + + self.EndModal( wx.ID_OK ) + + + def EventOKRatingsLike( self, event ): + + selection = self._ratings_like_service_identifiers.GetSelection() + + if selection != wx.NOT_FOUND: + + self._service_identifier = self._ratings_like_service_identifiers.GetClientData( selection ) + + ( like, dislike ) = self._current_ratings_like_service.GetExtraInfo() + + if self._ratings_like_like.GetValue(): + + self._action = 1.0 + self._pretty_action = like + + elif self._ratings_like_dislike.GetValue(): + + self._action = 0.0 + self._pretty_action = dislike + + else: + + self._action = None + self._pretty_action = 'remove' + + + self.EndModal( wx.ID_OK ) + + else: self.EndModal( wx.ID_CANCEL ) + + + def EventOKRatingsNumerical( self, event ): + + selection = self._ratings_numerical_service_identifiers.GetSelection() + + if selection != wx.NOT_FOUND: + + self._service_identifier = self._ratings_numerical_service_identifiers.GetClientData( selection ) + + if self._ratings_numerical_remove.GetValue(): + + self._action = None + self._pretty_action = 'remove' + + else: + + self._pretty_action = str( self._ratings_numerical_slider.GetValue() ) + + ( lower, upper ) = self._current_ratings_numerical_service.GetExtraInfo() + + self._action = ( float( self._pretty_action ) - float( lower ) ) / ( upper - lower ) + + + self.EndModal( wx.ID_OK ) + + else: self.EndModal( wx.ID_CANCEL ) + + + def EventOKTag( self, event ): + + selection = self._tag_service_identifiers.GetSelection() + + if selection != wx.NOT_FOUND: + + self._service_identifier = self._tag_service_identifiers.GetClientData( selection ) + + self._action = self._tag_value.GetValue() + self._pretty_action = self._action + + self.EndModal( wx.ID_OK ) + + else: self.EndModal( wx.ID_CANCEL ) + + + def EventRecalcActions( self, event ): + + self._SetActions() + + event.Skip() + + + def GetInfo( self ): + + ( modifier, key ) = self._shortcut.GetValue() + + if self._service_identifier is None: pretty_service_identifier = '' + else: pretty_service_identifier = self._service_identifier.GetName() + + # ignore this pretty_action + ( pretty_modifier, pretty_key, pretty_action ) = HC.ConvertShortcutToPrettyShortcut( modifier, key, self._action ) + + return ( ( pretty_modifier, pretty_key, pretty_service_identifier, self._pretty_action ), ( modifier, key, self._service_identifier, self._action ) ) + + + def SetTag( self, tag ): self._tag_value.SetValue( tag ) + +class DialogInputNewAccounts( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + self._num = wx.SpinCtrl( self, min=1, max=10000, initial=1 ) + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + account_types = connection.Get( 'accounttypes' ) + + self._account_types = wx.Choice( self, size = ( 400, -1 ) ) + + for account_type in account_types: self._account_types.Append( account_type.ConvertToString(), account_type ) + + self._account_types.SetSelection( 0 ) # admin + + self._expiration = wx.Choice( self ) + for ( str, value ) in HC.expirations: self._expiration.Append( str, value ) + self._expiration.SetSelection( 3 ) # one year + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + ctrl_box = wx.BoxSizer( wx.HORIZONTAL ) + ctrl_box.AddF( self._num, FLAGS_SMALL_INDENT ) + ctrl_box.AddF( self._account_types, FLAGS_SMALL_INDENT ) + ctrl_box.AddF( self._expiration, FLAGS_SMALL_INDENT ) + + b_box = wx.BoxSizer( wx.HORIZONTAL ) + b_box.AddF( self._ok, FLAGS_MIXED ) + b_box.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( ctrl_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( b_box, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'configure new accounts' ) + + self._service_identifier = service_identifier + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOk( self, event ): + + num = self._num.GetValue() + + account_type = self._account_types.GetClientData( self._account_types.GetSelection() ) + + title = account_type.GetTitle() + + expiration = self._expiration.GetClientData( self._expiration.GetSelection() ) + + service = wx.GetApp().Read( 'service', self._service_identifier ) + + try: + + connection = service.GetConnection() + + if expiration is None: access_keys = connection.Get( 'accesskeys', num = num, title = title ) + else: access_keys = connection.Get( 'accesskeys', num = num, title = title, expiration = expiration ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + +class DialogInputNewAccountType( Dialog ): + + def __init__( self, parent, account_type = None ): + + def InitialiseControls(): + + self._title = wx.TextCtrl( self, value = title ) + + self._permissions = wx.ListBox( self ) + + for permission in permissions: self._permissions.Append( HC.permissions_string_lookup[ permission ], permission ) + + self._permission_choice = wx.Choice( self ) + + for permission in HC.CREATABLE_PERMISSIONS: self._permission_choice.Append( HC.permissions_string_lookup[ permission ], permission ) + + self._permission_choice.SetSelection( 0 ) + + self._add_permission = wx.Button( self, label = 'add' ) + self._add_permission.Bind( wx.EVT_BUTTON, self.EventAddPermission ) + + self._remove_permission = wx.Button( self, label = 'remove' ) + self._remove_permission.Bind( wx.EVT_BUTTON, self.EventRemovePermission ) + + self._max_num_mb = ClientGUICommon.NoneableSpinCtrl( self, 'max monthly data (MB)', max_num_bytes, multiplier = 1048576 ) + self._max_num_requests = ClientGUICommon.NoneableSpinCtrl( self, 'max monthly requests', max_num_requests ) + + self._apply = wx.Button( self, label='apply' ) + self._apply.Bind( wx.EVT_BUTTON, self.EventOk ) + self._apply.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + t_box = wx.BoxSizer( wx.HORIZONTAL ) + t_box.AddF( wx.StaticText( self, label='title: ' ), FLAGS_SMALL_INDENT ) + t_box.AddF( self._title, FLAGS_EXPAND_BOTH_WAYS ) + + perm_buttons_box = wx.BoxSizer( wx.HORIZONTAL ) + perm_buttons_box.AddF( self._permission_choice, FLAGS_MIXED ) + perm_buttons_box.AddF( self._add_permission, FLAGS_MIXED ) + perm_buttons_box.AddF( self._remove_permission, FLAGS_MIXED ) + + p_box = wx.BoxSizer( wx.VERTICAL ) + + p_box.AddF( wx.StaticText( self, label = '- permissions -' ), FLAGS_SMALL_INDENT ) + p_box.AddF( self._permissions, FLAGS_EXPAND_BOTH_WAYS ) + p_box.AddF( perm_buttons_box, FLAGS_EXPAND_PERPENDICULAR ) + + b_box = wx.BoxSizer( wx.HORIZONTAL ) + + b_box.AddF( self._apply, FLAGS_MIXED ) + b_box.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( t_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( p_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._max_num_mb, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._max_num_requests, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( b_box, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 800, y ) ) + + + if account_type is None: + + title = '' + permissions = [ HC.GET_DATA ] + max_num_bytes = 104857600 + max_num_requests = 1000 + + else: + + title = account_type.GetTitle() + permissions = account_type.GetPermissions() + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + + + Dialog.__init__( self, parent, 'edit account type' ) + + InitialiseControls() + + InitialisePanel() + + + def EventAddPermission( self, event ): + + selection = self._permission_choice.GetSelection() + + if selection != wx.NOT_FOUND: + + permission = self._permission_choice.GetClientData( selection ) + + existing_permissions = [ self._permissions.GetClientData( i ) for i in range( self._permissions.GetCount() ) ] + + if permission not in existing_permissions: self._permissions.Append( HC.permissions_string_lookup[ permission ], permission ) + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventCheckBox( self, event ): + + if self._max_num_requests_checkbox.GetValue(): self._max_num_requests.Disable() + else: self._max_num_requests.Enable() + + + def EventOk( self, event ): self.EndModal( wx.ID_OK ) + + def EventRemovePermission( self, event ): + + selection = self._permissions.GetSelection() + + if selection != wx.NOT_FOUND: self._permissions.Delete( selection ) + + + def GetAccountType( self ): + + title = self._title.GetValue() + + permissions = [ self._permissions.GetClientData( i ) for i in range( self._permissions.GetCount() ) ] + + max_num_bytes = self._max_num_mb.GetValue() + + max_num_requests = self._max_num_requests.GetValue() + + return HC.AccountType( title, permissions, ( max_num_bytes, max_num_requests ) ) + + +class DialogInputNewFormField( Dialog ): + + def __init__( self, parent, form_field = None ): + + if form_field is None: ( name, type, default, editable ) = ( '', CC.FIELD_TEXT, '', True ) + else: ( name, type, default, editable ) = form_field + + def InitialiseControls(): + + self._name = wx.TextCtrl( self, value = name ) + + self._type = wx.Choice( self ) + + for temp_type in CC.FIELDS: self._type.Append( CC.field_string_lookup[ temp_type ], temp_type ) + + self._type.Select( type ) + + self._default = wx.TextCtrl( self, value = default ) + + self._editable = wx.CheckBox( self ) + + self._editable.SetValue( editable ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='name' ), FLAGS_MIXED ) + gridbox.AddF( self._name, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='type' ), FLAGS_MIXED ) + gridbox.AddF( self._type, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='default' ), FLAGS_MIXED ) + gridbox.AddF( self._default, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='editable' ), FLAGS_MIXED ) + gridbox.AddF( self._editable, FLAGS_EXPAND_BOTH_WAYS ) + + b_box = wx.BoxSizer( wx.HORIZONTAL ) + b_box.AddF( self._ok, FLAGS_MIXED ) + b_box.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( b_box, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'configure form field' ) + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOk( self, event ): self.EndModal( wx.ID_OK ) + + def GetFormField( self ): + + name = self._name.GetValue() + + type = self._type.GetClientData( self._type.GetSelection() ) + + default = self._default.GetValue() + + editable = self._editable.GetValue() + + return ( name, type, default, editable ) + + +class DialogInputFileSystemPredicate( Dialog ): + + def __init__( self, parent, type ): + + def Age(): + + def InitialiseControls(): + + ( sign, years, months, days ) = system_predicates[ 'age' ] + + self._sign = wx.Choice( self, choices=[ '<', u'\u2248', '>' ] ) + self._sign.SetSelection( sign ) + + self._years = wx.SpinCtrl( self, initial = years, max = 30 ) + self._months = wx.SpinCtrl( self, initial = months, max = 60 ) + self._days = wx.SpinCtrl( self, initial = days, max = 90 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:age' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._years, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='years' ), FLAGS_MIXED ) + hbox.AddF( self._months, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='months' ), FLAGS_MIXED ) + hbox.AddF( self._days, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='days' ), FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter age predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Duration(): + + def InitialiseControls(): + + ( sign, s, ms ) = system_predicates[ 'duration' ] + + self._sign = wx.Choice( self, choices=[ '<', u'\u2248', '=', '>' ] ) + self._sign.SetSelection( sign ) + + self._duration_s = wx.SpinCtrl( self, initial = s, max = 3599 ) + self._duration_ms = wx.SpinCtrl( self, initial = ms, max = 999 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:duration' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._duration_s, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='s' ), FLAGS_MIXED ) + hbox.AddF( self._duration_ms, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='ms' ), FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter duration predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Hash(): + + def InitialiseControls(): + + self._hash = wx.TextCtrl( self ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:hash=' ), FLAGS_MIXED ) + hbox.AddF( self._hash, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter hash predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Height(): + + def InitialiseControls(): + + ( sign, height ) = system_predicates[ 'height' ] + + self._sign = wx.Choice( self, choices=[ '<', u'\u2248', '=', '>' ] ) + self._sign.SetSelection( sign ) + + self._height = wx.SpinCtrl( self, initial = height, max = 200000 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:height' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._height, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter height predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Limit(): + + def InitialiseControls(): + + limit = system_predicates[ 'limit' ] + + self._limit = wx.SpinCtrl( self, initial = limit, max = 1000000 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:limit=' ), FLAGS_MIXED ) + hbox.AddF( self._limit, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter limit predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Mime(): + + def InitialiseControls(): + + ( media, type ) = system_predicates[ 'mime' ] + + self._mime_media = wx.Choice( self, choices=[ 'image', 'application', 'video' ] ) + self._mime_media.SetSelection( media ) + self._mime_media.Bind( wx.EVT_CHOICE, self.EventMime ) + + self._mime_type = wx.Choice( self, choices=[], size = ( 120, -1 ) ) + + self.EventMime( None ) + + self._mime_type.SetSelection( type ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:mime' ), FLAGS_MIXED ) + hbox.AddF( self._mime_media, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='/' ), FLAGS_MIXED ) + hbox.AddF( self._mime_type, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter mime predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def NumTags(): + + def InitialiseControls(): + + ( sign, num_tags ) = system_predicates[ 'num_tags' ] + + self._sign = wx.Choice( self, choices=[ '<', '=', '>' ] ) + self._sign.SetSelection( sign ) + + self._num_tags = wx.SpinCtrl( self, initial = num_tags, max = 2000 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:numtags' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._num_tags, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter number of tags predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Rating(): + + def InitialiseControls(): + + self._service_numerical = wx.Choice( self ) + for service in self._local_numericals: self._service_numerical.Append( service.GetServiceIdentifier().GetName(), service ) + self._service_numerical.Bind( wx.EVT_CHOICE, self.EventRatingsService ) + + ( sign, value ) = system_predicates[ 'local_rating_numerical' ] + + self._sign_numerical = wx.Choice( self, choices=[ '>', '<', '=', u'\u2248', '=rated', '=not rated', '=uncertain' ] ) + self._sign_numerical.SetSelection( sign ) + + self._value_numerical = wx.SpinCtrl( self, initial = value, min = 0, max = 50000 ) # set bounds based on current service + + self._first_ok = wx.Button( self, label='Ok', id = HC.LOCAL_RATING_NUMERICAL ) + self._first_ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._first_ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._service_like = wx.Choice( self ) + for service in self._local_likes: self._service_like.Append( service.GetServiceIdentifier().GetName(), service ) + self._service_like.Bind( wx.EVT_CHOICE, self.EventRatingsService ) + + value = system_predicates[ 'local_rating_like' ] + + self._value_like = wx.Choice( self, choices=[ 'like', 'dislike', 'rated', 'not rated' ] ) # set words based on current service + self._value_like.SetSelection( value ) + + self._second_ok = wx.Button( self, label='Ok', id = HC.LOCAL_RATING_LIKE ) + self._second_ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._second_ok.SetForegroundColour( ( 0, 128, 0 ) ) + + if len( self._local_numericals ) > 0: self._service_numerical.SetSelection( 0 ) + if len( self._local_likes ) > 0: self._service_like.SetSelection( 0 ) + + self.EventRatingsService( None ) + + + def InitialisePanel(): + + vbox = wx.BoxSizer( wx.VERTICAL ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:rating:' ), FLAGS_MIXED ) + hbox.AddF( self._service_numerical, FLAGS_MIXED ) + hbox.AddF( self._sign_numerical, FLAGS_MIXED ) + hbox.AddF( self._value_numerical, FLAGS_MIXED ) + hbox.AddF( self._first_ok, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:rating:' ), FLAGS_MIXED ) + hbox.AddF( self._service_like, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='=' ), FLAGS_MIXED ) + hbox.AddF( self._value_like, FLAGS_MIXED ) + hbox.AddF( self._second_ok, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter rating predicate' ) + + self._local_numericals = wx.GetApp().Read( 'services', ( HC.LOCAL_RATING_NUMERICAL, ) ) + self._local_likes = wx.GetApp().Read( 'services', ( HC.LOCAL_RATING_LIKE, ) ) + + InitialiseControls() + + InitialisePanel() + + + def Ratio(): + + def InitialiseControls(): + + ( sign, width, height ) = system_predicates[ 'ratio' ] + + self._sign = wx.Choice( self, choices=[ '=', u'\u2248' ] ) + self._sign.SetSelection( sign ) + + self._width = wx.SpinCtrl( self, initial = width, max = 50000 ) + + self._height = wx.SpinCtrl( self, initial = height, max = 50000 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:ratio' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._width, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label=':' ), FLAGS_MIXED ) + hbox.AddF( self._height, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter ratio predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Size(): + + def InitialiseControls(): + + ( sign, size, unit ) = system_predicates[ 'size' ] + + self._sign = wx.Choice( self, choices=[ '<', u'\u2248', '=', '>' ] ) + self._sign.SetSelection( sign ) + + self._size = wx.SpinCtrl( self, initial = size, max = 1048576 ) + + self._unit = wx.Choice( self, choices=[ 'b', 'B', 'Kb', 'KB', 'Mb', 'MB', 'Gb', 'GB' ] ) + self._unit.SetSelection( unit ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:size' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._size, FLAGS_MIXED ) + hbox.AddF( self._unit, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter size predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def Width(): + + def InitialiseControls(): + + ( sign, width ) = system_predicates[ 'width' ] + + self._sign = wx.Choice( self, choices=[ '<', u'\u2248', '=', '>' ] ) + self._sign.SetSelection( sign ) + + self._width = wx.SpinCtrl( self, initial = width, max = 200000 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:width' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._width, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter width predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def SimilarTo(): + + def InitialiseControls(): + + self._hash = wx.TextCtrl( self ) + self._hash.SetValue( 'enter hash' ) + + self._max_hamming = wx.SpinCtrl( self, initial = 5, max = 256 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:similar_to' ), FLAGS_MIXED ) + hbox.AddF( self._hash, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label=u'\u2248' ), FLAGS_MIXED ) + hbox.AddF( self._max_hamming, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter duration predicate' ) + + InitialiseControls() + + InitialisePanel() + + + options = wx.GetApp().Read( 'options' ) + + system_predicates = options[ 'file_system_predicates' ] + + self._type = type + + if self._type == 'system:age': Age() + elif self._type == 'system:duration': Duration() + elif self._type == 'system:hash': Hash() + elif self._type == 'system:height': Height() + elif self._type == 'system:limit': Limit() + elif self._type == 'system:mime': Mime() + elif self._type == 'system:numtags': NumTags() + elif self._type == 'system:rating': Rating() + elif self._type == 'system:ratio': Ratio() + elif self._type == 'system:size': Size() + elif self._type == 'system:width': Width() + elif self._type == 'system:similar_to': SimilarTo() + + self._hidden_cancel_button = wx.Button( self, id = wx.ID_CANCEL, label = 'cancel', size = ( 0, 0 ) ) + self._hidden_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + # hide doesn't keep the escape hotkey, so say size = ( 0, 0 ) + # self._hidden_cancel_button.Hide() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventMime( self, event ): + + media = self._mime_media.GetStringSelection() + + self._mime_type.Clear() + + if media == 'image': + + self._mime_type.Append( 'any', HC.IMAGES ) + self._mime_type.Append( 'jpeg', HC.IMAGE_JPEG ) + self._mime_type.Append( 'png', HC.IMAGE_PNG ) + self._mime_type.Append( 'gif', HC.IMAGE_GIF ) + + elif media == 'application': + + self._mime_type.Append( 'x-shockwave-flash', HC.APPLICATION_FLASH ) + + elif media == 'video': + + self._mime_type.Append( 'x-flv', HC.VIDEO_FLV ) + + + self._mime_type.SetSelection( 0 ) + + + def EventOk( self, event ): + + if self._type == 'system:rating': + + id = event.GetId() + + if id == HC.LOCAL_RATING_LIKE: self._type = 'system:rating_like' + elif id == HC.LOCAL_RATING_NUMERICAL: self._type = 'system:rating_numerical' + + + self.EndModal( wx.ID_OK ) + + + def EventRatingsService( self, event ): + + try: + + service = self._service_numerical.GetClientData( self._service_numerical.GetSelection() ) + + ( min, max ) = service.GetExtraInfo() + + self._value_numerical.SetRange( min, max ) + + service = self._service_like.GetClientData( self._service_like.GetSelection() ) + + except: pass + + try: + + ( like, dislike ) = service.GetExtraInfo() + + selection = self._value_like.GetSelection() + + self._value_like.SetString( 0, like ) + self._value_like.SetString( 1, dislike ) + + self._value_like.SetSelection( selection ) + + except: pass + + + def GetString( self ): + + if self._type == 'system:age': return 'system:age' + self._sign.GetStringSelection() + str( self._years.GetValue() ) + 'y' + str( self._months.GetValue() ) + 'm' + str( self._days.GetValue() ) + 'd' + elif self._type == 'system:duration': return 'system:duration' + self._sign.GetStringSelection() + str( self._duration_s.GetValue() * 1000 + self._duration_ms.GetValue() ) + elif self._type == 'system:hash': + + hex_filter = lambda c: c in '0123456789abcdef' + + hash = filter( hex_filter, self._hash.GetValue() ) + + if len( hash ) == 0: hash == '00' + elif len( hash ) % 2 == 1: hash += '0' # since we are later decoding to byte + + return 'system:hash=' + hash + + elif self._type == 'system:height': return 'system:height' + self._sign.GetStringSelection() + str( self._height.GetValue() ) + elif self._type == 'system:limit': return 'system:limit=' + str( self._limit.GetValue() ) + elif self._type == 'system:mime': return 'system:mime=' + HC.mime_string_lookup[ self._mime_type.GetClientData( self._mime_type.GetSelection() ) ] + elif self._type == 'system:numtags': return 'system:numtags' + self._sign.GetStringSelection() + str( self._num_tags.GetValue() ) + elif self._type == 'system:rating_like': + + s = 'system:rating:' + self._service_like.GetClientData( self._service_like.GetSelection() ).GetServiceIdentifier().GetName() + '=' + + selection = self._value_like.GetSelection() + + if selection == 0: s += '1' + elif selection == 1: s += '0' + elif selection == 2: s += 'rated' + elif selection == 3: s += 'not rated' + + return s + + elif self._type == 'system:rating_numerical': + + service = self._service_numerical.GetClientData( self._service_numerical.GetSelection() ) + + s = 'system:rating:' + service.GetServiceIdentifier().GetName() + self._sign_numerical.GetStringSelection() + + if self._sign_numerical.GetStringSelection() not in ( '=rated', '=not rated', '=uncertain' ): + + ( lower, upper ) = service.GetExtraInfo() + + value = self._value_numerical.GetValue() + + value_normalised = float( value - lower ) / float( upper - lower ) + + s += str( value_normalised ) + + + return s + + elif self._type == 'system:ratio': return 'system:ratio' + self._sign.GetStringSelection() + str( self._width.GetValue() ) + ':' + str( self._height.GetValue() ) + elif self._type == 'system:size': return 'system:size' + self._sign.GetStringSelection() + str( self._size.GetValue() ) + self._unit.GetStringSelection() + elif self._type == 'system:width': return 'system:width' + self._sign.GetStringSelection() + str( self._width.GetValue() ) + elif self._type == 'system:similar_to': return 'system:similar_to=' + self._hash.GetValue() + u'\u2248' + str( self._max_hamming.GetValue() ) + + +class DialogInputMessageSystemPredicate( Dialog ): + + def __init__( self, parent, type ): + + def Age(): + + def InitialiseControls(): + + self._sign = wx.Choice( self, choices=[ '<', u'\u2248', '>' ] ) + self._sign.SetSelection( 0 ) + + self._years = wx.SpinCtrl( self, initial = 0, max = 30 ) + self._months = wx.SpinCtrl( self, initial = 0, max = 60 ) + self._days = wx.SpinCtrl( self, initial = 7, max = 90 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:age' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._years, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='years' ), FLAGS_MIXED ) + hbox.AddF( self._months, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='months' ), FLAGS_MIXED ) + hbox.AddF( self._days, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label='days' ), FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter age predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def From(): + + def InitialiseControls(): + + contact_names = wx.GetApp().Read( 'contact_names' ) + + self._contact = wx.Choice( self, choices=contact_names ) + self._contact.SetSelection( 0 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:from' ), FLAGS_MIXED ) + hbox.AddF( self._contact, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter from predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def StartedBy(): + + def InitialiseControls(): + + contact_names = wx.GetApp().Read( 'contact_names' ) + + self._contact = wx.Choice( self, choices=contact_names ) + self._contact.SetSelection( 0 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:started_by' ), FLAGS_MIXED ) + hbox.AddF( self._contact, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter started by predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def To(): + + def InitialiseControls(): + + contact_names = [ name for name in wx.GetApp().Read( 'contact_names' ) if name != 'Anonymous' ] + + self._contact = wx.Choice( self, choices=contact_names ) + self._contact.SetSelection( 0 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:to' ), FLAGS_MIXED ) + hbox.AddF( self._contact, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter to predicate' ) + + InitialiseControls() + + InitialisePanel() + + + def NumAttachments(): + + def InitialiseControls(): + + self._sign = wx.Choice( self, choices=[ '<', '=', '>' ] ) + self._sign.SetSelection( 0 ) + + self._num_attachments = wx.SpinCtrl( self, initial = 4, max = 2000 ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label='system:numattachments' ), FLAGS_MIXED ) + hbox.AddF( self._sign, FLAGS_MIXED ) + hbox.AddF( self._num_attachments, FLAGS_MIXED ) + hbox.AddF( self._ok, FLAGS_MIXED ) + + self.SetSizer( hbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'enter number of attachments predicate' ) + + InitialiseControls() + + InitialisePanel() + + + self._type = type + + if self._type == 'system:age': Age() + elif self._type == 'system:started_by': StartedBy() + elif self._type == 'system:from': From() + elif self._type == 'system:to': To() + elif self._type == 'system:numattachments': NumAttachments() + + self._hidden_cancel_button = wx.Button( self, id = wx.ID_CANCEL, label = 'cancel', size = ( 0, 0 ) ) + self._hidden_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + # hide doesn't keep the escape hotkey, so say size = ( 0, 0 ) + # self._hidden_cancel_button.Hide() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOk( self, event ): self.EndModal( wx.ID_OK ) + + def GetString( self ): + + if self._type == 'system:age': return 'system:age' + self._sign.GetStringSelection() + str( self._years.GetValue() ) + 'y' + str( self._months.GetValue() ) + 'm' + str( self._days.GetValue() ) + 'd' + elif self._type == 'system:started_by': return 'system:started_by=' + self._contact.GetStringSelection() + elif self._type == 'system:from': return 'system:from=' + self._contact.GetStringSelection() + elif self._type == 'system:to': return 'system:to=' + self._contact.GetStringSelection() + elif self._type == 'system:numattachments': return 'system:numattachments' + self._sign.GetStringSelection() + str( self._num_attachments.GetValue() ) + + +class DialogInputShortcut( Dialog ): + + def __init__( self, parent, modifier = wx.ACCEL_NORMAL, key = wx.WXK_F7, action = 'new_page' ): + + self._action = action + + def InitialiseControls(): + + self._shortcut = ClientGUICommon.Shortcut( self, modifier, key ) + + self._actions = wx.Choice( self, choices = [ 'archive', 'close_page', 'filter', 'ratings_filter', 'frame_back', 'frame_next', 'manage_ratings', 'manage_tags', 'new_page', 'refresh', 'set_search_focus', 'show_hide_splitters', 'synchronised_wait_switch', 'previous', 'next', 'first', 'last' ] ) + self._actions.SetSelection( self._actions.FindString( action ) ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._shortcut, FLAGS_MIXED ) + hbox.AddF( self._actions, FLAGS_EXPAND_PERPENDICULAR ) + + b_box = wx.BoxSizer( wx.HORIZONTAL ) + b_box.AddF( self._ok, FLAGS_MIXED ) + b_box.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( b_box, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'configure shortcut' ) + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOk( self, event ): self.EndModal( wx.ID_OK ) + + def GetInfo( self ): + + ( modifier, key ) = self._shortcut.GetValue() + + return ( modifier, key, self._actions.GetStringSelection() ) + + +class DialogManageAccountTypes( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + service = wx.GetApp().Read( 'service', service_identifier ) + + connection = service.GetConnection() + + account_types = connection.Get( 'accounttypes' ) + + self._titles_to_account_types = {} + + self._ctrl_account_types = ClientGUICommon.SaneListCtrl( self, 350, [ ( 'title', 120 ), ( 'permissions', -1 ), ( 'max monthly bytes', 120 ), ( 'max monthly requests', 120 ) ] ) + + for account_type in account_types: + + title = account_type.GetTitle() + + self._titles_to_account_types[ title ] = account_type + + permissions = account_type.GetPermissions() + + permissions_string = ', '.join( [ HC.permissions_string_lookup[ permission ] for permission in permissions ] ) + + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + + ( max_num_bytes_string, max_num_requests_string ) = account_type.GetMaxMonthlyDataString() + + self._ctrl_account_types.Append( ( title, permissions_string, max_num_bytes_string, max_num_requests_string ), ( title, len( permissions ), max_num_bytes, max_num_requests ) ) + + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + + self._edit = wx.Button( self, label='edit' ) + self._edit.Bind( wx.EVT_BUTTON, self.EventEdit ) + + self._delete = wx.Button( self, label='delete' ) + self._delete.Bind( wx.EVT_BUTTON, self.EventDelete ) + + self._apply = wx.Button( self, label='apply' ) + self._apply.Bind( wx.EVT_BUTTON, self.EventOk ) + self._apply.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + h_b_box = wx.BoxSizer( wx.HORIZONTAL ) + h_b_box.AddF( self._add, FLAGS_MIXED ) + h_b_box.AddF( self._edit, FLAGS_MIXED ) + h_b_box.AddF( self._delete, FLAGS_MIXED ) + + a_t_vbox = wx.BoxSizer( wx.VERTICAL ) + + a_t_vbox.AddF( wx.StaticText( self, label = '- account types -' ), FLAGS_SMALL_INDENT ) + a_t_vbox.AddF( self._ctrl_account_types, FLAGS_EXPAND_BOTH_WAYS ) + a_t_vbox.AddF( h_b_box, FLAGS_BUTTON_SIZERS ) + + b_box = wx.BoxSizer( wx.HORIZONTAL ) + b_box.AddF( self._apply, FLAGS_MIXED ) + b_box.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( a_t_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( b_box, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 980, y ) ) + + + Dialog.__init__( self, parent, 'manage account types' ) + + self._service_identifier = service_identifier + + self._edit_log = [] + + try: + + InitialiseControls() + + InitialisePanel() + + except: raise + + + def EventAdd( self, event ): + + try: + + with DialogInputNewAccountType( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + account_type = dlg.GetAccountType() + + title = account_type.GetTitle() + + permissions = account_type.GetPermissions() + + permissions_string = ', '.join( [ HC.permissions_string_lookup[ permission ] for permission in permissions ] ) + + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + + ( max_num_bytes_string, max_num_requests_string ) = account_type.GetMaxMonthlyDataString() + + if title in self._titles_to_account_types: raise Exception( 'You already have an account type called ' + title + '; delete or edit that one first' ) + + self._titles_to_account_types[ title ] = account_type + + self._edit_log.append( ( 'add', account_type ) ) + + self._ctrl_account_types.Append( ( title, permissions_string, max_num_bytes_string, max_num_requests_string ), ( title, len( permissions ), max_num_bytes, max_num_requests ) ) + + + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventDelete( self, event ): + + indices = self._ctrl_account_types.GetAllSelected() + + titles_about_to_delete = { self._ctrl_account_types.GetClientData( index )[0] for index in indices } + + all_titles = set( self._titles_to_account_types.keys() ) + + titles_can_move_to = list( all_titles - titles_about_to_delete ) + + if len( titles_can_move_to ) == 0: + + wx.MessageBox( 'You cannot delete every account type!' ) + + return + + + for title in titles_about_to_delete: + + with DialogSelectFromListOfStrings( self, 'what should deleted ' + title + ' accounts become?', titles_can_move_to ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: title_to_move_to = dlg.GetString() + else: return + + + self._edit_log.append( ( 'delete', ( title, title_to_move_to ) ) ) + + + self._ctrl_account_types.RemoveAllSelected() + + + def EventEdit( self, event ): + + indices = self._ctrl_account_types.GetAllSelected() + + for index in indices: + + title = self._ctrl_account_types.GetClientData( index )[0] + + account_type = self._titles_to_account_types[ title ] + + try: + + with DialogInputNewAccountType( self, account_type ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + old_title = title + + account_type = dlg.GetAccountType() + + title = account_type.GetTitle() + + permissions = account_type.GetPermissions() + + permissions_string = ', '.join( [ HC.permissions_string_lookup[ permission ] for permission in permissions ] ) + + ( max_num_bytes, max_num_requests ) = account_type.GetMaxMonthlyData() + + ( max_num_bytes_string, max_num_requests_string ) = account_type.GetMaxMonthlyDataString() + + if old_title != title: + + if title in self._titles_to_account_types: raise Exception( 'You already have an account type called ' + title + '; delete or edit that one first' ) + + del self._titles_to_account_types[ old_title ] + + + self._titles_to_account_types[ title ] = account_type + + self._edit_log.append( ( 'edit', ( old_title, account_type ) ) ) + + self._ctrl_account_types.UpdateRow( index, ( title, permissions_string, max_num_bytes_string, max_num_requests_string ), ( title, len( permissions ), max_num_bytes, max_num_requests ) ) + + + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + def EventOk( self, event ): + + try: + + service = wx.GetApp().Read( 'service', self._service_identifier ) + + connection = service.GetConnection() + + connection.Post( 'accounttypesmodification', edit_log = self._edit_log ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + +class DialogManageBoorus( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._edit_log = [] + + self._boorus = ClientGUICommon.ListBook( self ) + + boorus = wx.GetApp().Read( 'boorus' ) + + for booru in boorus: + + name = booru.GetName() + + page_info = ( DialogManageBoorusBooruPanel, ( self._boorus, booru ), {} ) + + self._boorus.AddPage( page_info, name ) + + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + self._add.SetForegroundColour( ( 0, 128, 0 ) ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._export = wx.Button( self, label='export' ) + self._export.Bind( wx.EVT_BUTTON, self.EventExport ) + + self._ok = wx.Button( self, label='ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL ) + add_remove_hbox.AddF( self._add, FLAGS_MIXED ) + add_remove_hbox.AddF( self._remove, FLAGS_MIXED ) + add_remove_hbox.AddF( self._export, FLAGS_MIXED ) + + ok_hbox = wx.BoxSizer( wx.HORIZONTAL ) + ok_hbox.AddF( self._ok, FLAGS_MIXED ) + ok_hbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._boorus, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT ) + vbox.AddF( ok_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 980, y ) ) + + + Dialog.__init__( self, parent, 'manage boorus' ) + + InitialiseControls() + + InitialisePanel() + + self.SetDropTarget( ClientGUICommon.FileDropTarget( self.Import ) ) + + + def EventAdd( self, event ): + + with wx.TextEntryDialog( self, 'Enter new booru\'s name' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + name = dlg.GetValue() + + if self._boorus.NameExists( name ): raise Exception( 'That name is already in use!' ) + + if name == '': raise Exception( 'Please enter a nickname for the service.' ) + + booru = CC.Booru( name, 'search_url', '+', 1, 'thumbnail', '', 'original image', {} ) + + self._edit_log.append( ( 'add', name ) ) + + page = DialogManageBoorusBooruPanel( self._boorus, booru ) + + self._boorus.AddPage( page, name, select = True ) + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + self.EventAdd( event ) + + + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventExport( self, event ): + + booru_panel = self._boorus.GetCurrentPage() + + if booru_panel is not None: + + name = self._boorus.GetCurrentName() + + booru = booru_panel.GetBooru() + + with wx.FileDialog( self, 'select where to export booru', defaultFile = 'booru.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( booru ) ) + + + + + + def EventOk( self, event ): + + for ( name, page ) in self._boorus.GetNameToPageDict().items(): + + if page.HasChanges(): self._edit_log.append( ( 'edit', ( name, page.GetBooru() ) ) ) + + + try: + + if len( self._edit_log ) > 0: wx.GetApp().Write( 'update_boorus', self._edit_log ) + + except Exception as e: wx.MessageBox( 'Saving boorus to DB raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def EventRemove( self, event ): + + booru_panel = self._boorus.GetCurrentPage() + + if booru_panel is not None: + + name = self._boorus.GetCurrentName() + + self._edit_log.append( ( 'delete', name ) ) + + self._boorus.DeleteCurrentPage() + + + + def Import( self, paths ): + + for path in paths: + + try: + + with open( path, 'rb' ) as f: file = f.read() + + thing = yaml.safe_load( file ) + + if type( thing ) == CC.Booru: + + booru = thing + + name = booru.GetName() + + if not self._boorus.NameExists( name ): + + new_booru = CC.Booru( name, 'search_url', '+', 1, 'thumbnail', '', 'original image', {} ) + + self._edit_log.append( ( 'add', name ) ) + + page = DialogManageBoorusBooruPanel( self._boorus, new_booru ) + + self._boorus.AddPage( page, name, select = True ) + + + page = self._boorus.GetNameToPageDict()[ name ] + + page.Update( booru ) + + + except: + + wx.MessageBox( traceback.format_exc() ) + + + + +class DialogManageBoorusBooruPanel( wx.Panel ): + + def __init__( self, parent, booru ): + + wx.Panel.__init__( self, parent ) + + self._booru = booru + + ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData() + + def InitialiseControls(): + + self._search_url = wx.TextCtrl( self, value = search_url ) + self._search_url.Bind( wx.EVT_TEXT, self.EventHTML ) + + self._search_separator = wx.Choice( self, choices = [ '+', '&', '%20' ] ) + self._search_separator.Select( self._search_separator.FindString( search_separator ) ) + self._search_separator.Bind( wx.EVT_CHOICE, self.EventHTML ) + + self._gallery_advance_num = wx.SpinCtrl( self, min = 1, max = 1000, initial = gallery_advance_num ) + self._gallery_advance_num.Bind( wx.EVT_SPIN, self.EventHTML ) + + self._thumb_classname = wx.TextCtrl( self, value = thumb_classname ) + self._thumb_classname.Bind( wx.EVT_TEXT, self.EventHTML ) + + self._example_html_search = wx.StaticText( self, style = wx.ST_NO_AUTORESIZE ) + + self._image_info = wx.TextCtrl( self ) + self._image_info.Bind( wx.EVT_TEXT, self.EventHTML ) + + self._image_id = wx.RadioButton( self, style = wx.RB_GROUP ) + self._image_id.Bind( wx.EVT_RADIOBUTTON, self.EventHTML ) + + self._image_data = wx.RadioButton( self ) + self._image_data.Bind( wx.EVT_RADIOBUTTON, self.EventHTML ) + + if image_id is None: + + self._image_info.SetValue( image_data ) + self._image_data.SetValue( True ) + + else: + + self._image_info.SetValue( image_id ) + self._image_id.SetValue( True ) + + + self._example_html_image = wx.StaticText( self, style = wx.ST_NO_AUTORESIZE ) + + self._tag_classnames_to_namespaces = wx.ListBox( self, style = wx.LB_SORT ) + self._tag_classnames_to_namespaces.Bind( wx.EVT_LEFT_DCLICK, self.EventRemove ) + + for ( tag_classname, namespace ) in tag_classnames_to_namespaces.items(): self._tag_classnames_to_namespaces.Append( tag_classname + ' : ' + namespace, ( tag_classname, namespace ) ) + + self._tag_classname = wx.TextCtrl( self ) + self._namespace = wx.TextCtrl( self ) + + self._add = wx.Button( self, label = 'add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + + self._example_html_tags = wx.StaticText( self, style = wx.ST_NO_AUTORESIZE ) + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox1 = wx.BoxSizer( wx.VERTICAL ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='search url' ), FLAGS_MIXED ) + gridbox.AddF( self._search_url, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='search tag separator' ), FLAGS_MIXED ) + gridbox.AddF( self._search_separator, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='gallery page advance' ), FLAGS_MIXED ) + gridbox.AddF( self._gallery_advance_num, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='thumbnail classname' ), FLAGS_MIXED ) + gridbox.AddF( self._thumb_classname, FLAGS_EXPAND_BOTH_WAYS ) + + vbox1.AddF( wx.StaticText( self, label = '- search -' ), FLAGS_SMALL_INDENT ) + vbox1.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox1.AddF( self._example_html_search, FLAGS_EXPAND_PERPENDICULAR ) + + vbox2 = wx.BoxSizer( wx.VERTICAL ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='text' ), FLAGS_MIXED ) + gridbox.AddF( self._image_info, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='id of ' ), FLAGS_MIXED ) + gridbox.AddF( self._image_id, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='text of ' ), FLAGS_MIXED ) + gridbox.AddF( self._image_data, FLAGS_EXPAND_BOTH_WAYS ) + + vbox2.AddF( wx.StaticText( self, label = '- image -' ), FLAGS_SMALL_INDENT ) + vbox2.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox2.AddF( self._example_html_image, FLAGS_EXPAND_PERPENDICULAR ) + + vbox3 = wx.BoxSizer( wx.VERTICAL ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._tag_classname, FLAGS_MIXED ) + hbox.AddF( self._namespace, FLAGS_MIXED ) + hbox.AddF( self._add, FLAGS_MIXED ) + + vbox3.AddF( wx.StaticText( self, label = '- tags -' ), FLAGS_SMALL_INDENT ) + vbox3.AddF( self._tag_classnames_to_namespaces, FLAGS_EXPAND_BOTH_WAYS ) + vbox3.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox3.AddF( self._example_html_tags, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( wx.StaticText( self, label = '- booru -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( vbox1, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( vbox2, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( vbox3, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + + def _GetInfo( self ): + + booru_name = self._booru.GetName() + + search_url = self._search_url.GetValue() + + search_separator = self._search_separator.GetStringSelection() + + gallery_advance_num = self._gallery_advance_num.GetValue() + + thumb_classname = self._thumb_classname.GetValue() + + if self._image_id.GetValue(): + + image_id = self._image_info.GetValue() + image_data = None + + else: + + image_id = None + image_data = self._image_info.GetValue() + + + tag_classnames_to_namespaces = { tag_classname : namespace for ( tag_classname, namespace ) in [ self._tag_classnames_to_namespaces.GetClientData( i ) for i in range( self._tag_classnames_to_namespaces.GetCount() ) ] } + + return ( booru_name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + + def EventAdd( self, event ): + + tag_classname = self._tag_classname.GetValue() + namespace = self._namespace.GetValue() + + if tag_classname != '': + + self._tag_classnames_to_namespaces.Append( tag_classname + ' : ' + namespace, ( tag_classname, namespace ) ) + + self._tag_classname.SetValue( '' ) + self._namespace.SetValue( '' ) + + self.EventHTML( event ) + + + + def EventHTML( self, event ): + + pass + + + def EventRemove( self, event ): + + selection = self._tag_classnames_to_namespaces.GetSelection() + + if selection != wx.NOT_FOUND: + + self._tag_classnames_to_namespaces.Delete( selection ) + + self.EventHTML( event ) + + + + def GetBooru( self ): + + ( booru_name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = self._GetInfo() + + return CC.Booru( booru_name, search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) + + + def HasChanges( self ): + + ( booru_name, my_search_url, my_search_separator, my_gallery_advance_num, my_thumb_classname, my_image_id, my_image_data, my_tag_classnames_to_namespaces ) = self._GetInfo() + + ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = self._booru.GetData() + + if search_url != my_search_url: return True + + if search_separator != my_search_separator: return True + + if gallery_advance_num != my_gallery_advance_num: return True + + if thumb_classname != my_thumb_classname: return True + + if image_id != my_image_id: return True + + if image_data != my_image_data: return True + + if tag_classnames_to_namespaces != my_tag_classnames_to_namespaces: return True + + return False + + + def Update( self, booru ): + + ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = booru.GetData() + + self._search_url.SetValue( search_url ) + + self._search_separator.Select( self._search_separator.FindString( search_separator ) ) + + self._gallery_advance_num.SetValue( gallery_advance_num ) + + self._thumb_classname.SetValue( thumb_classname ) + + if image_id is None: + + self._image_info.SetValue( image_data ) + self._image_data.SetValue( True ) + + else: + + self._image_info.SetValue( image_id ) + self._image_id.SetValue( True ) + + + self._tag_classnames_to_namespaces.Clear() + + for ( tag_classname, namespace ) in tag_classnames_to_namespaces.items(): self._tag_classnames_to_namespaces.Append( tag_classname + ' : ' + namespace, ( tag_classname, namespace ) ) + + +class DialogManageContacts( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._edit_log = [] + + self._contacts = ClientGUICommon.ListBook( self ) + + ( identities, contacts, deletable_names ) = wx.GetApp().Read( 'identities_and_contacts' ) + + self._deletable_names = deletable_names + + for identity in identities: + + name = identity.GetName() + + page_info = ( DialogManageContactsContactPanel, ( self._contacts, identity ), { 'is_identity' : True } ) + + self._contacts.AddPage( page_info, ' identity - ' + name ) + + + for contact in contacts: + + name = contact.GetName() + + page_info = ( DialogManageContactsContactPanel, ( self._contacts, contact ), { 'is_identity' : False } ) + + self._contacts.AddPage( page_info, name ) + + + # bind events after population + self._contacts.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventContactChanging ) + self._contacts.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventContactChanged ) + + self._add_contact_address = wx.Button( self, label='add by contact address' ) + self._add_contact_address.Bind( wx.EVT_BUTTON, self.EventAddByContactAddress ) + self._add_contact_address.SetForegroundColour( ( 0, 128, 0 ) ) + + self._add_manually = wx.Button( self, label='add manually' ) + self._add_manually.Bind( wx.EVT_BUTTON, self.EventAddManually ) + self._add_manually.SetForegroundColour( ( 0, 128, 0 ) ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._export = wx.Button( self, label='export' ) + self._export.Bind( wx.EVT_BUTTON, self.EventExport ) + + self._ok = wx.Button( self, label='ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL ) + add_remove_hbox.AddF( self._add_manually, FLAGS_MIXED ) + add_remove_hbox.AddF( self._add_contact_address, FLAGS_MIXED ) + add_remove_hbox.AddF( self._remove, FLAGS_MIXED ) + add_remove_hbox.AddF( self._export, FLAGS_MIXED ) + + ok_hbox = wx.BoxSizer( wx.HORIZONTAL ) + ok_hbox.AddF( self._ok, FLAGS_MIXED ) + ok_hbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._contacts, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT ) + vbox.AddF( ok_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 980, y ) ) + + + Dialog.__init__( self, parent, 'manage contacts' ) + + InitialiseControls() + + InitialisePanel() + + self.SetDropTarget( ClientGUICommon.FileDropTarget( self.Import ) ) + + self.EventContactChanged( None ) + + + def _CheckCurrentContactIsValid( self ): + + contact_panel = self._contacts.GetCurrentPage() + + if contact_panel is not None: + + contact = contact_panel.GetContact() + + old_name = self._contacts.GetCurrentName() + name = contact.GetName() + + if name != old_name and ' identity - ' + name != old_name: + + if self._contacts.NameExists( name ) or self._contacts.NameExists( ' identity - ' + name ) or name == 'Anonymous': raise Exception( 'That name is already in use!' ) + + if old_name.startswith( ' identity - ' ): self._contacts.RenamePage( old_name, ' identity - ' + name ) + else: self._contacts.RenamePage( old_name, name ) + + + + + def EventAddByContactAddress( self, event ): + + try: self._CheckCurrentContactIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + return + + + with wx.TextEntryDialog( self, 'Enter contact\'s address in the form contact_key@hostname:port' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + contact_address = dlg.GetValue() + + try: + + ( contact_key_encoded, address ) = contact_address.split( '@' ) + + contact_key = contact_key_encoded.decode( 'hex' ) + + ( host, port ) = address.split( ':' ) + + port = int( port ) + + except: raise Exception( 'Could not parse the address!' ) + + name = contact_key_encoded + + contact = ClientConstantsMessages.Contact( None, name, host, port ) + + try: + + connection = contact.GetConnection() + + public_key = connection.Get( 'publickey', contact_key = contact_key.encode( 'hex' ) ) + + except: raise Exception( 'Could not fetch the contact\'s public key from the address:' + os.linesep + traceback.format_exc() ) + + contact = ClientConstantsMessages.Contact( public_key, name, host, port ) + + self._edit_log.append( ( 'add', contact ) ) + + page = DialogManageContactsContactPanel( self._contacts, contact, is_identity = False ) + + self._deletable_names.add( name ) + + self._contacts.AddPage( page, name, select = True ) + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + self.EventAddByContactAddress( event ) + + + + + + def EventAddManually( self, event ): + + try: self._CheckCurrentContactIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + return + + + with wx.TextEntryDialog( self, 'Enter new contact\'s name' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + name = dlg.GetValue() + + if self._contacts.NameExists( name ) or self._contacts.NameExists( ' identity - ' + name ) or name == 'Anonymous': raise Exception( 'That name is already in use!' ) + + if name == '': raise Exception( 'Please enter a nickname for the service.' ) + + public_key = None + host = 'hostname' + port = 45871 + + contact = ClientConstantsMessages.Contact( public_key, name, host, port ) + + self._edit_log.append( ( 'add', contact ) ) + + page = DialogManageContactsContactPanel( self._contacts, contact, is_identity = False ) + + self._deletable_names.add( name ) + + self._contacts.AddPage( page, name, select = True ) + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + self.EventAddManually( event ) + + + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventContactChanged( self, event ): + + contact_panel = self._contacts.GetCurrentPage() + + if contact_panel is not None: + + old_name = contact_panel.GetOriginalName() + + if old_name in self._deletable_names: self._remove.Enable() + else: self._remove.Disable() + + + + def EventContactChanging( self, event ): + + try: self._CheckCurrentContactIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + event.Veto() + + + + def EventExport( self, event ): + + contact_panel = self._contacts.GetCurrentPage() + + if contact_panel is not None: + + name = self._contacts.GetCurrentName() + + contact = contact_panel.GetContact() + + try: + + with wx.FileDialog( self, 'select where to export contact', defaultFile = name + '.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( contact ) ) + + + + except: + + with wx.FileDialog( self, 'select where to export contact', defaultFile = 'contact.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( contact ) ) + + + + + + + def EventOk( self, event ): + + try: self._CheckCurrentContactIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + return + + + for ( name, page ) in self._contacts.GetNameToPageDict().items(): + + if page.HasChanges(): self._edit_log.append( ( 'edit', ( page.GetOriginalName(), page.GetContact() ) ) ) + + + try: + + if len( self._edit_log ) > 0: wx.GetApp().Write( 'update_contacts', self._edit_log ) + + except Exception as e: wx.MessageBox( 'Saving contacts to DB raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + # this isn't used yet! + def EventRemove( self, event ): + + contact_panel = self._contacts.GetCurrentPage() + + if contact_panel is not None: + + name = contact_panel.GetOriginalName() + + self._edit_log.append( ( 'delete', name ) ) + + self._contacts.DeleteCurrentPage() + + self._deletable_names.discard( name ) + + + + def Import( self, paths ): + + try: self._CheckCurrentContactIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + return + + + for path in paths: + + try: + + with open( path, 'rb' ) as f: file = f.read() + + obj = yaml.safe_load( file ) + + if type( obj ) == ClientConstantsMessages.Contact: + + contact = obj + + name = contact.GetName() + + if self._contacts.NameExists( name ) or self._contacts.NameExists( ' identities - ' + name ) or name == 'Anonymous': + + message = 'There already exists a contact or identity with the name ' + name + '. Do you want to overwrite, or make a new contact?' + + with DialogYesNo( self, message, yes_label = 'overwrite', no_label = 'make new' ) as dlg: + + if True: + + name_to_page_dict = self._contacts.GetNameToPageDict() + + if name in name_to_page_dict: page = name_to_page_dict[ name ] + else: page = name_to_page_dict[ ' identities - ' + name ] + + page.Update( contact ) + + else: + + while self._contacts.NameExists( name ) or self._contacts.NameExists( ' identities - ' + name ) or name == 'Anonymous': name = name + str( random.randint( 0, 9 ) ) + + ( public_key, old_name, host, port ) = contact.GetInfo() + + new_contact = ClientConstantsMessages.Contact( public_key, name, host, port ) + + self._edit_log.append( ( 'add', contact ) ) + + self._deletable_names.add( name ) + + page = DialogManageContactsContactPanel( self._contacts, contact, False ) + + self._contacts.AddPage( page, name, select = True ) + + + + else: + + ( public_key, old_name, host, port ) = contact.GetInfo() + + new_contact = ClientConstantsMessages.Contact( public_key, name, host, port ) + + self._edit_log.append( ( 'add', contact ) ) + + self._deletable_names.add( name ) + + page = DialogManageContactsContactPanel( self._contacts, contact, False ) + + self._contacts.AddPage( page, name, select = True ) + + + + except: + + wx.MessageBox( traceback.format_exc() ) + + + + +class DialogManageContactsContactPanel( wx.Panel ): + + def __init__( self, parent, contact, is_identity ): + + wx.Panel.__init__( self, parent ) + + self._contact = contact + self._is_identity = is_identity + + ( public_key, name, host, port ) = contact.GetInfo() + + contact_key = contact.GetContactKey() + + def InitialiseControls(): + + self._name = wx.TextCtrl( self, value = name ) + + contact_address = host + ':' + str( port ) + + if contact_key is not None: contact_address = contact_key.encode( 'hex' ) + '@' + contact_address + + self._contact_address = wx.TextCtrl( self, value = contact_address ) + + self._public_key = wx.TextCtrl( self, style = wx.TE_MULTILINE ) + + if public_key is not None: self._public_key.SetValue( public_key ) + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='name' ), FLAGS_MIXED ) + gridbox.AddF( self._name, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='contact address' ), FLAGS_MIXED ) + gridbox.AddF( self._contact_address, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label = 'public key' ), FLAGS_MIXED ) + gridbox.AddF( self._public_key, FLAGS_EXPAND_BOTH_WAYS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self, label = '- contact -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + + def _GetInfo( self ): + + public_key = self._public_key.GetValue() + + if public_key == '': public_key = None + + name = self._name.GetValue() + + contact_address = self._contact_address.GetValue() + + try: + + if '@' in contact_address: ( contact_key, address ) = contact_address.split( '@' ) + else: address = contact_address + + ( host, port ) = address.split( ':' ) + + try: port = int( port ) + except: + + port = 45871 + + wx.MessageBox( 'Could not parse the port!' ) + + + except: + + host = 'hostname' + port = 45871 + + wx.MessageBox( 'Could not parse the contact\'s address!' ) + + + return [ public_key, name, host, port ] + + + def GetContact( self ): + + [ public_key, name, host, port ] = self._GetInfo() + + return ClientConstantsMessages.Contact( public_key, name, host, port ) + + + def GetOriginalName( self ): return self._contact.GetName() + + def HasChanges( self ): + + [ my_public_key, my_name, my_host, my_port ] = self._GetInfo() + + [ public_key, name, host, port ] = self._contact.GetInfo() + + if my_public_key != public_key: return True + + if my_name != name: return True + + if my_host != host: return True + + if my_port != port: return True + + return False + + + def Update( self, contact ): + + ( public_key, name, host, port ) = contact.GetInfo() + + contact_key = contact.GetContactKey() + + self._name.SetValue( name ) + + contact_address = host + ':' + str( port ) + + if contact_key is not None: contact_address = contact_key.encode( 'hex' ) + '@' + contact_address + + self._contact_address.SetValue( contact_address ) + + if public_key is None: public_key = '' + + self._public_key.SetValue( public_key ) + + +class DialogManage4chanPass( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._token = wx.TextCtrl( self, value = token ) + self._pin = wx.TextCtrl( self, value = pin ) + + self._status = wx.StaticText( self ) + + self._SetStatus() + + self._reauthenticate = wx.Button( self, label = 'reauthenticate' ) + self._reauthenticate.Bind( wx.EVT_BUTTON, self.EventReauthenticate ) + + self._ok = wx.Button( self, label='Ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='token' ), FLAGS_MIXED ) + gridbox.AddF( self._token, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='pin' ), FLAGS_MIXED ) + gridbox.AddF( self._pin, FLAGS_EXPAND_BOTH_WAYS ) + + b_box = wx.BoxSizer( wx.HORIZONTAL ) + b_box.AddF( self._ok, FLAGS_MIXED ) + b_box.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._status, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._reauthenticate, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( b_box, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + x = max( x, 240 ) + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'manage 4chan pass' ) + + ( token, pin, self._timeout ) = wx.GetApp().Read( '4chan_pass' ) + + InitialiseControls() + + InitialisePanel() + + + def _SetStatus( self ): + + if self._timeout == 0: label = 'not authenticated' + elif self._timeout < int( time.time() ): label = 'timed out' + else: label = 'authenticated - ' + HC.ConvertTimestampToPrettyExpires( self._timeout ) + + self._status.SetLabel( label ) + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOk( self, event ): + + token = self._token.GetValue() + pin = self._pin.GetValue() + + wx.GetApp().Write( '4chan_pass', token, pin, self._timeout ) + + self.EndModal( wx.ID_OK ) + + + def EventReauthenticate( self, event ): + + try: + + token = self._token.GetValue() + pin = self._pin.GetValue() + + form_fields = {} + + form_fields[ 'act' ] = 'do_login' + form_fields[ 'id' ] = token + form_fields[ 'pin' ] = pin + form_fields[ 'long_login' ] = 'yes' + + ( ct, body ) = CC.GenerateMultipartFormDataCTAndBodyFromDict( form_fields ) + + headers = {} + headers[ 'Content-Type' ] = ct + + connection = CC.AdvancedHTTPConnection( url = 'https://sys.4chan.org/', accept_cookies = True ) + + connection.request( 'POST', '/auth', headers = headers, body = body ) + + cookies = connection.GetCookies() + + self._timeout = int( time.time() ) + 365 * 24 * 3600 + + wx.GetApp().Write( '4chan_pass', token, pin, self._timeout ) + + self._SetStatus() + + except Exception as e: + wx.MessageBox( traceback.format_exc() ) + wx.MessageBox( unicode( e ) ) + + +class DialogManageImageboards( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._edit_log = [] + + self._sites = ClientGUICommon.ListBook( self ) + + sites = wx.GetApp().Read( 'imageboards' ) + + for ( name, imageboards ) in sites: + + page_info = ( DialogManageImageboardsSitePanel, ( self._sites, imageboards ), {} ) + + self._sites.AddPage( page_info, name ) + + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + self._add.SetForegroundColour( ( 0, 128, 0 ) ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._export = wx.Button( self, label='export' ) + self._export.Bind( wx.EVT_BUTTON, self.EventExport ) + + self._ok = wx.Button( self, label='ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL ) + add_remove_hbox.AddF( self._add, FLAGS_MIXED ) + add_remove_hbox.AddF( self._remove, FLAGS_MIXED ) + add_remove_hbox.AddF( self._export, FLAGS_MIXED ) + + ok_hbox = wx.BoxSizer( wx.HORIZONTAL ) + ok_hbox.AddF( self._ok, FLAGS_MIXED ) + ok_hbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._sites, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT ) + vbox.AddF( ok_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 980, y ) ) + + + Dialog.__init__( self, parent, 'manage imageboards' ) + + InitialiseControls() + + InitialisePanel() + + self.SetDropTarget( ClientGUICommon.FileDropTarget( self.Import ) ) + + + def EventAdd( self, event ): + + with wx.TextEntryDialog( self, 'Enter new site\'s name' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + name = dlg.GetValue() + + if self._sites.NameExists( name ): raise Exception( 'That name is already in use!' ) + + if name == '': raise Exception( 'Please enter a nickname for the service.' ) + + self._edit_log.append( ( 'add', name ) ) + + page = DialogManageImageboardsSitePanel( self._sites, [] ) + + self._sites.AddPage( page, name, select = True ) + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + self.EventAdd( event ) + + + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventExport( self, event ): + + site_panel = self._sites.GetCurrentPage() + + if site_panel is not None: + + name = self._sites.GetCurrentName() + + imageboards = site_panel.GetImageboards() + + dict = { name : imageboards } + + with wx.FileDialog( self, 'select where to export site', defaultFile = 'site.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( dict ) ) + + + + + + def EventOk( self, event ): + + for ( name, page ) in self._sites.GetNameToPageDict().items(): + + if page.HasChanges(): self._edit_log.append( ( 'edit', ( name, page.GetChanges() ) ) ) + + + try: + + if len( self._edit_log ) > 0: wx.GetApp().Write( 'update_imageboards', self._edit_log ) + + except Exception as e: wx.MessageBox( 'Saving imageboards to DB raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def EventRemove( self, event ): + + site_panel = self._sites.GetCurrentPage() + + if site_panel is not None: + + name = self._sites.GetCurrentName() + + self._edit_log.append( ( 'delete', name ) ) + + self._sites.DeleteCurrentPage() + + + + def Import( self, paths ): + + for path in paths: + + try: + + with open( path, 'rb' ) as f: file = f.read() + + thing = yaml.safe_load( file ) + + if type( thing ) == dict: + + ( name, imageboards ) = thing.items()[0] + + if not self._sites.NameExists( name ): + + self._edit_log.append( ( 'add', name ) ) + + page = DialogManageImageboardsSitePanel( self._sites, [] ) + + self._sites.AddPage( page, name, select = True ) + + + page = self._sites.GetNameToPageDict()[ name ] + + for imageboard in imageboards: + + if type( imageboard ) == CC.Imageboard: page.UpdateImageboard( imageboard ) + + + elif type( thing ) == CC.Imageboard: + + imageboard = thing + + page = self._sites.GetCurrentPage() + + page.UpdateImageboard( imageboard ) + + + except: + + wx.MessageBox( traceback.format_exc() ) + + + + +class DialogManageImageboardsSitePanel( wx.Panel ): + + def __init__( self, parent, imageboards ): + + wx.Panel.__init__( self, parent ) + + def InitialiseControls(): + + self._edit_log = [] + + self._imageboards = ClientGUICommon.ListBook( self ) + + for imageboard in imageboards: + + name = imageboard.GetName() + + page_info = ( DialogManageImageboardsImageboardPanel, ( self._imageboards, imageboard ), {} ) + + self._imageboards.AddPage( page_info, name ) + + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + self._add.SetForegroundColour( ( 0, 128, 0 ) ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._export = wx.Button( self, label='export' ) + self._export.Bind( wx.EVT_BUTTON, self.EventExport ) + + + def InitialisePanel(): + + add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL ) + add_remove_hbox.AddF( self._add, FLAGS_MIXED ) + add_remove_hbox.AddF( self._remove, FLAGS_MIXED ) + add_remove_hbox.AddF( self._export, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self, label = '- site -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( self._imageboards, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 980, y ) ) + + + InitialiseControls() + + InitialisePanel() + + + def EventAdd( self, event ): + + with wx.TextEntryDialog( self, 'Enter new imageboard\'s name' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + name = dlg.GetValue() + + if self._imageboards.NameExists( name ): raise Exception( 'That name is already in use!' ) + + if name == '': raise Exception( 'Please enter a nickname for the service.' ) + + imageboard = CC.Imageboard( name, '', 60, [], {} ) + + self._edit_log.append( ( 'add', name ) ) + + page = DialogManageImageboardsImageboardPanel( self._imageboards, imageboard ) + + self._imageboards.AddPage( page, name, select = True ) + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + self.EventAdd( event ) + + + + + + def EventExport( self, event ): + + imageboard_panel = self._imageboards.GetCurrentPage() + + if imageboard_panel is not None: + + imageboard = imageboard_panel.GetImageboard() + + with wx.FileDialog( self, 'select where to export imageboard', defaultFile = 'imageboard.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( imageboard ) ) + + + + + + def EventRemove( self, event ): + + imageboard_panel = self._imageboards.GetCurrentPage() + + if imageboard_panel is not None: + + name = self._imageboards.GetCurrentName() + + self._edit_log.append( ( 'delete', name ) ) + + self._imageboards.DeleteCurrentPage() + + + + def GetChanges( self ): + + for page in self._imageboards.GetNameToPageDict().values(): + + if page.HasChanges(): self._edit_log.append( ( 'edit', page.GetImageboard() ) ) + + + return self._edit_log + + + def GetImageboards( self ): return [ page.GetImageboard() for page in self._imageboards.GetNameToPageDict().values() ] + + def HasChanges( self ): return len( self._edit_log ) > 0 or True in ( page.HasChanges() for page in self._imageboards.GetNameToPageDict().values() ) + + def UpdateImageboard( self, imageboard ): + + name = imageboard.GetName() + + if not self._imageboards.NameExists( name ): + + new_imageboard = CC.Imageboard( name, '', 60, [], {} ) + + self._edit_log.append( ( 'add', name ) ) + + page = DialogManageImageboardsImageboardPanel( self._imageboards, new_imageboard ) + + self._imageboards.AddPage( page, name, select = True ) + + + page = self._imageboards.GetNameToPageDict()[ name ] + + page.Update( imageboard ) + + +class DialogManageImageboardsImageboardPanel( wx.Panel ): + + def __init__( self, parent, imageboard ): + + wx.Panel.__init__( self, parent ) + + self._imageboard = imageboard + + ( post_url, flood_time, form_fields, restrictions ) = self._imageboard.GetBoardInfo() + + def InitialiseControls(): + + self._post_url = wx.TextCtrl( self, value = post_url ) + + self._flood_time = wx.SpinCtrl( self, min = 5, max = 1200, initial = flood_time ) + + self._form_fields = ClientGUICommon.SaneListCtrl( self, 350, [ ( 'name', 120 ), ( 'type', 120 ), ( 'default', -1 ), ( 'editable', 120 ) ] ) + + for ( name, type, default, editable ) in form_fields: + + self._form_fields.Append( ( name, CC.field_string_lookup[ type ], str( default ), str( editable ) ), ( name, type, default, editable ) ) + + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + + self._edit = wx.Button( self, label='edit' ) + self._edit.Bind( wx.EVT_BUTTON, self.EventEdit ) + + self._delete = wx.Button( self, label='delete' ) + self._delete.Bind( wx.EVT_BUTTON, self.EventDelete ) + + if CC.RESTRICTION_MIN_RESOLUTION in restrictions: value = restrictions[ CC.RESTRICTION_MIN_RESOLUTION ] + else: value = None + + self._min_resolution = ClientGUICommon.NoneableSpinCtrl( self, 'min resolution', value, num_dimensions = 2 ) + + if CC.RESTRICTION_MAX_RESOLUTION in restrictions: value = restrictions[ CC.RESTRICTION_MAX_RESOLUTION ] + else: value = None + + self._max_resolution = ClientGUICommon.NoneableSpinCtrl( self, 'max resolution', value, num_dimensions = 2 ) + + if CC.RESTRICTION_MAX_FILE_SIZE in restrictions: value = restrictions[ CC.RESTRICTION_MAX_FILE_SIZE ] + else: value = None + + self._max_file_size = ClientGUICommon.NoneableSpinCtrl( self, 'max file size (KB)', value, multiplier = 1024 ) + + self._mimes = wx.ListBox( self ) + + if CC.RESTRICTION_ALLOWED_MIMES in restrictions: mimes = restrictions[ CC.RESTRICTION_ALLOWED_MIMES ] + else: mimes = [] + + for mime in mimes: self._mimes.Append( HC.mime_string_lookup[ mime ], mime ) + + self._mime_choice = wx.Choice( self ) + + for mime in HC.ALLOWED_MIMES: self._mime_choice.Append( HC.mime_string_lookup[ mime ], mime ) + + self._mime_choice.SetSelection( 0 ) + + self._add_mime = wx.Button( self, label = 'add' ) + self._add_mime.Bind( wx.EVT_BUTTON, self.EventAddMime ) + + self._remove_mime = wx.Button( self, label = 'remove' ) + self._remove_mime.Bind( wx.EVT_BUTTON, self.EventRemoveMime ) + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='POST URL' ), FLAGS_MIXED ) + gridbox.AddF( self._post_url, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='flood time' ), FLAGS_MIXED ) + gridbox.AddF( self._flood_time, FLAGS_EXPAND_BOTH_WAYS ) + + field_box = wx.BoxSizer( wx.VERTICAL ) + + field_box.AddF( wx.StaticText( self, label = '- form fields -' ), FLAGS_SMALL_INDENT ) + field_box.AddF( self._form_fields, FLAGS_EXPAND_BOTH_WAYS ) + + h_b_box = wx.BoxSizer( wx.HORIZONTAL ) + h_b_box.AddF( self._add, FLAGS_MIXED ) + h_b_box.AddF( self._edit, FLAGS_MIXED ) + h_b_box.AddF( self._delete, FLAGS_MIXED ) + + mime_buttons_box = wx.BoxSizer( wx.HORIZONTAL ) + mime_buttons_box.AddF( self._mime_choice, FLAGS_MIXED ) + mime_buttons_box.AddF( self._add_mime, FLAGS_MIXED ) + mime_buttons_box.AddF( self._remove_mime, FLAGS_MIXED ) + + m_box = wx.BoxSizer( wx.VERTICAL ) + + m_box.AddF( wx.StaticText( self, label = '- allowed mimes -' ), FLAGS_SMALL_INDENT ) + m_box.AddF( self._mimes, FLAGS_EXPAND_BOTH_WAYS ) + m_box.AddF( mime_buttons_box, FLAGS_EXPAND_PERPENDICULAR ) + + restrictions_box = wx.BoxSizer( wx.VERTICAL ) + + restrictions_box.AddF( wx.StaticText( self, label = '- restrictions -' ), FLAGS_SMALL_INDENT ) + restrictions_box.AddF( self._min_resolution, FLAGS_EXPAND_PERPENDICULAR ) + restrictions_box.AddF( self._max_resolution, FLAGS_EXPAND_PERPENDICULAR ) + restrictions_box.AddF( self._max_file_size, FLAGS_EXPAND_PERPENDICULAR ) + restrictions_box.AddF( m_box, FLAGS_EXPAND_BOTH_WAYS ) + + vbox.AddF( wx.StaticText( self, label = '- imageboard -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( field_box, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( h_b_box, FLAGS_BUTTON_SIZERS ) + vbox.AddF( restrictions_box, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + + def _GetInfo( self ): + + imageboard_name = self._imageboard.GetName() + + post_url = self._post_url.GetValue() + + flood_time = self._flood_time.GetValue() + + # list instead of tumple cause of yaml comparisons + form_fields = self._form_fields.GetClientData() + + restrictions = {} + + # yaml list again + value = self._min_resolution.GetValue() + if value is not None: restrictions[ CC.RESTRICTION_MIN_RESOLUTION ] = list( value ) + + # yaml list again + value = self._max_resolution.GetValue() + if value is not None: restrictions[ CC.RESTRICTION_MAX_RESOLUTION ] = list( value ) + + value = self._max_file_size.GetValue() + if value is not None: restrictions[ CC.RESTRICTION_MAX_FILE_SIZE ] = value + + mimes = [ self._mimes.GetClientData( i ) for i in range( self._mimes.GetCount() ) ] + + if len( mimes ) > 0: restrictions[ CC.RESTRICTION_ALLOWED_MIMES ] = mimes + + return ( imageboard_name, post_url, flood_time, form_fields, restrictions ) + + + def EventAdd( self, event ): + + try: + + with DialogInputNewFormField( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + ( name, type, default, editable ) = dlg.GetFormField() + + if name in [ form_field[0] for form_field in self._form_fields.GetClientData() ]: + + wx.MessageBox( 'There is already a field named ' + name ) + + self.EventAdd( event ) + + return + + + self._form_fields.Append( ( name, CC.field_string_lookup[ type ], str( default ), str( editable ) ), ( name, type, default, editable ) ) + + + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def EventAddMime( self, event ): + + selection = self._mime_choice.GetSelection() + + if selection != wx.NOT_FOUND: + + mime = self._mime_choice.GetClientData( selection ) + + existing_mimes = [ self._mimes.GetClientData( i ) for i in range( self._mimes.GetCount() ) ] + + if mime not in existing_mimes: self._mimes.Append( HC.mime_string_lookup[ mime ], mime ) + + + + def EventDelete( self, event ): self._form_fields.RemoveAllSelected() + + def EventRemoveMime( self, event ): + + selection = self._mimes.GetSelection() + + if selection != wx.NOT_FOUND: self._mimes.Delete( selection ) + + + def EventEdit( self, event ): + + indices = self._form_fields.GetAllSelected() + + for index in indices: + + ( name, type, default, editable ) = self._form_fields.GetClientData( index ) + + form_field = ( name, type, default, editable ) + + try: + + with DialogInputNewFormField( self, form_field ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + old_name = name + + ( name, type, default, editable ) = dlg.GetFormField() + + if old_name != name: + + if name in [ form_field[0] for form_field in self._form_fields.GetClientData() ]: raise Exception( 'You already have a form field called ' + name + '; delete or edit that one first' ) + + + self._form_fields.UpdateRow( index, ( name, CC.field_string_lookup[ type ], str( default ), str( editable ) ), ( name, type, default, editable ) ) + + + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + def GetImageboard( self ): + + ( name, post_url, flood_time, form_fields, restrictions ) = self._GetInfo() + + return CC.Imageboard( name, post_url, flood_time, form_fields, restrictions ) + + + def HasChanges( self ): + + ( my_name, my_post_url, my_flood_time, my_form_fields, my_restrictions ) = self._GetInfo() + + ( post_url, flood_time, form_fields, restrictions ) = self._imageboard.GetBoardInfo() + + if post_url != my_post_url: return True + + if flood_time != my_flood_time: return True + + if set( [ tuple( item ) for item in form_fields ] ) != set( [ tuple( item ) for item in my_form_fields ] ): return True + + if restrictions != my_restrictions: return True + + return False + + + def Update( self, imageboard ): + + ( post_url, flood_time, form_fields, restrictions ) = imageboard.GetBoardInfo() + + self._post_url.SetValue( post_url ) + self._flood_time.SetValue( flood_time ) + + self._form_fields.ClearAll() + + self._form_fields.InsertColumn( 0, 'name', width = 120 ) + self._form_fields.InsertColumn( 1, 'type', width = 120 ) + self._form_fields.InsertColumn( 2, 'default' ) + self._form_fields.InsertColumn( 3, 'editable', width = 120 ) + + self._form_fields.setResizeColumn( 3 ) # default + + for ( name, type, default, editable ) in form_fields: + + self._form_fields.Append( ( name, CC.field_string_lookup[ type ], str( default ), str( editable ) ), ( name, type, default, editable ) ) + + + if CC.RESTRICTION_MIN_RESOLUTION in restrictions: value = restrictions[ CC.RESTRICTION_MIN_RESOLUTION ] + else: value = None + + self._min_resolution.SetValue( value ) + + if CC.RESTRICTION_MAX_RESOLUTION in restrictions: value = restrictions[ CC.RESTRICTION_MAX_RESOLUTION ] + else: value = None + + self._max_resolution.SetValue( value ) + + if CC.RESTRICTION_MAX_FILE_SIZE in restrictions: value = restrictions[ CC.RESTRICTION_MAX_FILE_SIZE ] + else: value = None + + self._max_file_size.SetValue( value ) + + self._mimes.Clear() + + if CC.RESTRICTION_ALLOWED_MIMES in restrictions: mimes = restrictions[ CC.RESTRICTION_ALLOWED_MIMES ] + else: mimes = [] + + for mime in mimes: self._mimes.Append( HC.mime_string_lookup[ mime ], mime ) + + +class DialogManageOptionsFileRepository( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + self._max_monthly_data = ClientGUICommon.NoneableSpinCtrl( self, 'max monthly data (MB)', options[ 'max_monthly_data' ], multiplier = 1048576 ) + self._max_storage = ClientGUICommon.NoneableSpinCtrl( self, 'max storage (MB)', options[ 'max_monthly_data' ], multiplier = 1048576 ) + + self._log_uploader_ips = wx.CheckBox( self, label='' ) + self._log_uploader_ips.SetValue( options[ 'log_uploader_ips' ] ) + + self._message = wx.TextCtrl( self, value = options[ 'message' ] ) + + self._save_button = wx.Button( self, label='Save' ) + self._save_button.Bind( wx.EVT_BUTTON, self.EventOK ) + self._save_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._close_button = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._close_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._close_button.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='Log uploader ips?' ), FLAGS_MIXED ) + gridbox.AddF( self._log_uploader_ips, FLAGS_MIXED ) + gridbox.AddF( wx.StaticText( self, label='Message' ), FLAGS_MIXED ) + gridbox.AddF( self._message, FLAGS_MIXED ) + + file_vbox = wx.BoxSizer( wx.VERTICAL ) + + file_vbox.AddF( wx.StaticText( self, label = '- file repository -' ), FLAGS_SMALL_INDENT ) + file_vbox.AddF( self._max_monthly_data, FLAGS_EXPAND_PERPENDICULAR ) + file_vbox.AddF( self._max_storage, FLAGS_EXPAND_PERPENDICULAR ) + file_vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._save_button, FLAGS_SMALL_INDENT ) + buttons.AddF( self._close_button, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( file_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x + 80, y ) ) + + + Dialog.__init__( self, parent, service_identifier.GetName() + ' options' ) + + self._service_identifier = service_identifier + + self._service = wx.GetApp().Read( 'service', service_identifier ) + + connection = self._service.GetConnection() + + options = connection.Get( 'options' ) + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOK( self, event ): + + options = {} + + options[ 'max_monthly_data' ] = self._max_monthly_data.GetValue() + + options[ 'max_storage' ] = self._max_storage.GetValue() + + options[ 'log_uploader_ips' ] = self._log_uploader_ips.GetValue() + + options[ 'message' ] = self._message.GetValue() + + try: + + connection = self._service.GetConnection() + + connection.Post( 'options', options = options ) + + except Exception as e: wx.MessageBox( 'Something went wrong when trying to send the options to the file repository: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + +class DialogManageOptionsLocal( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._listbook = ClientGUICommon.ListBook( self ) + + # files and memory + + self._file_page = wx.Panel( self._listbook ) + self._file_page.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._export_location = wx.DirPickerCtrl( self._file_page, style = wx.DIRP_USE_TEXTCTRL ) + + if self._options[ 'export_path' ] is not None: self._export_location.SetPath( HC.ConvertPortablePathToAbsPath( self._options[ 'export_path' ] ) ) + + self._exclude_deleted_files = wx.CheckBox( self._file_page, label='' ) + self._exclude_deleted_files.SetValue( self._options[ 'exclude_deleted_files' ] ) + + self._thumbnail_cache_size = wx.SpinCtrl( self._file_page, initial = int( self._options[ 'thumbnail_cache_size' ] / 1048576 ), min = 10, max = 3000 ) + self._thumbnail_cache_size.Bind( wx.EVT_SPINCTRL, self.EventThumbnailsUpdate ) + + self._estimated_number_thumbnails = wx.StaticText( self._file_page, label = '', style = wx.ST_NO_AUTORESIZE ) + + self._preview_cache_size = wx.SpinCtrl( self._file_page, initial = int( self._options[ 'preview_cache_size' ] / 1048576 ), min = 20, max = 3000 ) + self._preview_cache_size.Bind( wx.EVT_SPINCTRL, self.EventPreviewsUpdate ) + + self._estimated_number_previews = wx.StaticText( self._file_page, label = '', style = wx.ST_NO_AUTORESIZE ) + + self._fullscreen_cache_size = wx.SpinCtrl( self._file_page, initial = int( self._options[ 'fullscreen_cache_size' ] / 1048576 ), min = 100, max = 3000 ) + self._fullscreen_cache_size.Bind( wx.EVT_SPINCTRL, self.EventFullscreensUpdate ) + + self._estimated_number_fullscreens = wx.StaticText( self._file_page, label = '', style = wx.ST_NO_AUTORESIZE ) + + ( thumbnail_width, thumbnail_height ) = self._options[ 'thumbnail_dimensions' ] + + self._thumbnail_width = wx.SpinCtrl( self._file_page, initial = thumbnail_width, min=20, max=200 ) + self._thumbnail_width.Bind( wx.EVT_SPINCTRL, self.EventThumbnailsUpdate ) + + self._thumbnail_height = wx.SpinCtrl( self._file_page, initial = thumbnail_height, min=20, max=200 ) + self._thumbnail_height.Bind( wx.EVT_SPINCTRL, self.EventThumbnailsUpdate ) + + self._num_autocomplete_chars = wx.SpinCtrl( self._file_page, initial = self._options[ 'num_autocomplete_chars' ], min = 1, max = 100 ) + self._num_autocomplete_chars.SetToolTipString( 'how many characters you enter before the gui fetches autocomplete results from the db' + os.linesep + 'increase this if you find autocomplete results are slow' ) + + self._listbook.AddPage( self._file_page, 'files and memory' ) + + # gui + + self._gui_page = wx.Panel( self._listbook ) + self._gui_page.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._gui_capitalisation = wx.CheckBox( self._gui_page ) + self._gui_capitalisation.SetValue( self._options[ 'gui_capitalisation' ] ) + + self._gui_show_all_tags_in_autocomplete = wx.CheckBox( self._gui_page ) + self._gui_show_all_tags_in_autocomplete.SetValue( self._options[ 'show_all_tags_in_autocomplete' ] ) + + self._default_tag_sort = wx.Choice( self._gui_page ) + + self._default_tag_sort.Append( 'lexicographic (a-z)', CC.SORT_BY_LEXICOGRAPHIC_ASC ) + self._default_tag_sort.Append( 'lexicographic (z-a)', CC.SORT_BY_LEXICOGRAPHIC_DESC ) + self._default_tag_sort.Append( 'incidence (desc)', CC.SORT_BY_INCIDENCE_DESC ) + self._default_tag_sort.Append( 'incidence (asc)', CC.SORT_BY_INCIDENCE_ASC ) + + if self._options[ 'default_tag_sort' ] == CC.SORT_BY_LEXICOGRAPHIC_ASC: self._default_tag_sort.Select( 0 ) + elif self._options[ 'default_tag_sort' ] == CC.SORT_BY_LEXICOGRAPHIC_DESC: self._default_tag_sort.Select( 1 ) + elif self._options[ 'default_tag_sort' ] == CC.SORT_BY_INCIDENCE_DESC: self._default_tag_sort.Select( 2 ) + elif self._options[ 'default_tag_sort' ] == CC.SORT_BY_INCIDENCE_ASC: self._default_tag_sort.Select( 3 ) + + service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ) ) + + self._default_tag_repository = wx.Choice( self._gui_page ) + for service_identifier in service_identifiers: self._default_tag_repository.Append( service_identifier.GetName(), service_identifier ) + + self._default_tag_repository.SetStringSelection( self._options[ 'default_tag_repository' ].GetName() ) + + self._listbook.AddPage( self._gui_page, 'gui' ) + + # default file system predicates + + system_predicates = self._options[ 'file_system_predicates' ] + + self._file_system_predicates_page = wx.Panel( self._listbook ) + self._file_system_predicates_page.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + ( sign, years, months, days ) = system_predicates[ 'age' ] + + self._file_system_predicate_age_sign = wx.Choice( self._file_system_predicates_page, choices=[ '<', u'\u2248', '>' ] ) + self._file_system_predicate_age_sign.SetSelection( sign ) + + self._file_system_predicate_age_years = wx.SpinCtrl( self._file_system_predicates_page, initial = years, max = 30 ) + self._file_system_predicate_age_months = wx.SpinCtrl( self._file_system_predicates_page, initial = months, max = 60 ) + self._file_system_predicate_age_days = wx.SpinCtrl( self._file_system_predicates_page, initial = days, max = 90 ) + + ( sign, s, ms ) = system_predicates[ 'duration' ] + + self._file_system_predicate_duration_sign = wx.Choice( self._file_system_predicates_page, choices=[ '<', u'\u2248', '=', '>' ] ) + self._file_system_predicate_duration_sign.SetSelection( sign ) + + self._file_system_predicate_duration_s = wx.SpinCtrl( self._file_system_predicates_page, initial = s, max = 3599 ) + self._file_system_predicate_duration_ms = wx.SpinCtrl( self._file_system_predicates_page, initial = ms, max = 999 ) + + ( sign, height ) = system_predicates[ 'height' ] + + self._file_system_predicate_height_sign = wx.Choice( self._file_system_predicates_page, choices=[ '<', u'\u2248', '=', '>' ] ) + self._file_system_predicate_height_sign.SetSelection( sign ) + + self._file_system_predicate_height = wx.SpinCtrl( self._file_system_predicates_page, initial = height, max = 200000 ) + + limit = system_predicates[ 'limit' ] + + self._file_system_predicate_limit = wx.SpinCtrl( self._file_system_predicates_page, initial = limit, max = 1000000 ) + + ( media, type ) = system_predicates[ 'mime' ] + + self._file_system_predicate_mime_media = wx.Choice( self._file_system_predicates_page, choices=[ 'image', 'application' ] ) + self._file_system_predicate_mime_media.SetSelection( media ) + self._file_system_predicate_mime_media.Bind( wx.EVT_CHOICE, self.EventFileSystemPredicateMime ) + + self._file_system_predicate_mime_type = wx.Choice( self._file_system_predicates_page, choices=[], size = ( 120, -1 ) ) + + self.EventFileSystemPredicateMime( None ) + + self._file_system_predicate_mime_type.SetSelection( type ) + + ( sign, num_tags ) = system_predicates[ 'num_tags' ] + + self._file_system_predicate_num_tags_sign = wx.Choice( self._file_system_predicates_page, choices=[ '<', '=', '>' ] ) + self._file_system_predicate_num_tags_sign.SetSelection( sign ) + + self._file_system_predicate_num_tags = wx.SpinCtrl( self._file_system_predicates_page, initial = num_tags, max = 2000 ) + + ( sign, value ) = system_predicates[ 'local_rating_numerical' ] + + self._file_system_predicate_local_rating_numerical_sign = wx.Choice( self._file_system_predicates_page, choices=[ '>', '<', '=', u'\u2248', '=rated', '=not rated', '=uncertain' ] ) + self._file_system_predicate_local_rating_numerical_sign.SetSelection( sign ) + + self._file_system_predicate_local_rating_numerical_value = wx.SpinCtrl( self._file_system_predicates_page, initial = value, min = 0, max = 50000 ) + + value = system_predicates[ 'local_rating_like' ] + + self._file_system_predicate_local_rating_like_value = wx.Choice( self._file_system_predicates_page, choices=[ 'like', 'dislike', 'rated', 'not rated' ] ) + self._file_system_predicate_local_rating_like_value.SetSelection( value ) + + ( sign, width, height ) = system_predicates[ 'ratio' ] + + self._file_system_predicate_ratio_sign = wx.Choice( self._file_system_predicates_page, choices=[ '=', u'\u2248' ] ) + self._file_system_predicate_ratio_sign.SetSelection( sign ) + + self._file_system_predicate_ratio_width = wx.SpinCtrl( self._file_system_predicates_page, initial = width, max = 50000 ) + + self._file_system_predicate_ratio_height = wx.SpinCtrl( self._file_system_predicates_page, initial = height, max = 50000 ) + + ( sign, size, unit ) = system_predicates[ 'size' ] + + self._file_system_predicate_size_sign = wx.Choice( self._file_system_predicates_page, choices=[ '<', u'\u2248', '=', '>' ] ) + self._file_system_predicate_size_sign.SetSelection( sign ) + + self._file_system_predicate_size = wx.SpinCtrl( self._file_system_predicates_page, initial = size, max = 1048576 ) + + self._file_system_predicate_size_unit = wx.Choice( self._file_system_predicates_page, choices=[ 'b', 'B', 'Kb', 'KB', 'Mb', 'MB', 'Gb', 'GB' ] ) + self._file_system_predicate_size_unit.SetSelection( unit ) + + ( sign, width ) = system_predicates[ 'width' ] + + self._file_system_predicate_width_sign = wx.Choice( self._file_system_predicates_page, choices=[ '<', u'\u2248', '=', '>' ] ) + self._file_system_predicate_width_sign.SetSelection( sign ) + + self._file_system_predicate_width = wx.SpinCtrl( self._file_system_predicates_page, initial = width, max = 200000 ) + + self._listbook.AddPage( self._file_system_predicates_page, 'default file system predicates' ) + + # colours + + self._colour_page = wx.Panel( self._listbook ) + self._colour_page.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._namespace_colours = ClientGUICommon.TagsBoxOptions( self._colour_page, self._options[ 'namespace_colours' ] ) + + self._edit_namespace_colour = wx.Button( self._colour_page, label = 'edit selected' ) + self._edit_namespace_colour.Bind( wx.EVT_BUTTON, self.EventEditNamespaceColour ) + + self._new_namespace_colour = wx.TextCtrl( self._colour_page, style = wx.TE_PROCESS_ENTER ) + self._new_namespace_colour.Bind( wx.EVT_KEY_DOWN, self.EventKeyDownNamespace ) + + self._listbook.AddPage( self._colour_page, 'colours' ) + + # sort/collect + + self._sort_by_page = wx.Panel( self._listbook ) + self._sort_by_page.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._default_sort = ClientGUICommon.ChoiceSort( self._sort_by_page, sort_by = self._options[ 'sort_by' ] ) + + self._default_collect = ClientGUICommon.ChoiceCollect( self._sort_by_page, sort_by = self._options[ 'sort_by' ] ) + + self._sort_by = wx.ListBox( self._sort_by_page ) + self._sort_by.Bind( wx.EVT_LEFT_DCLICK, self.EventRemoveSortBy ) + for ( sort_by_type, sort_by ) in self._options[ 'sort_by' ]: self._sort_by.Append( '-'.join( sort_by ), sort_by ) + + self._new_sort_by = wx.TextCtrl( self._sort_by_page, style = wx.TE_PROCESS_ENTER ) + self._new_sort_by.Bind( wx.EVT_KEY_DOWN, self.EventKeyDownSortBy ) + + self._listbook.AddPage( self._sort_by_page, 'sort/collect' ) + + # shortcuts + + self._shortcuts_page = wx.Panel( self._listbook ) + self._shortcuts_page.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._shortcuts = ClientGUICommon.SaneListCtrl( self._shortcuts_page, 480, [ ( 'modifier', 120 ), ( 'key', 120 ), ( 'action', -1 ) ] ) + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): + + for ( key, action ) in key_dict.items(): + + ( pretty_modifier, pretty_key, pretty_action ) = HC.ConvertShortcutToPrettyShortcut( modifier, key, action ) + + self._shortcuts.Append( ( pretty_modifier, pretty_key, pretty_action ), ( modifier, key, action ) ) + + + + self._SortListCtrl() + + self._shortcuts_add = wx.Button( self._shortcuts_page, label = 'add' ) + self._shortcuts_add.Bind( wx.EVT_BUTTON, self.EventShortcutsAdd ) + + self._shortcuts_edit = wx.Button( self._shortcuts_page, label = 'edit' ) + self._shortcuts_edit.Bind( wx.EVT_BUTTON, self.EventShortcutsEdit ) + + self._shortcuts_delete = wx.Button( self._shortcuts_page, label = 'delete' ) + self._shortcuts_delete.Bind( wx.EVT_BUTTON, self.EventShortcutsDelete ) + + self._listbook.AddPage( self._shortcuts_page, 'shortcuts' ) + + # + + self._save_button = wx.Button( self, label='Save' ) + self._save_button.Bind( wx.EVT_BUTTON, self.EventOK ) + self._save_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._close_button = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._close_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._close_button.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + thumbnails_sizer = wx.BoxSizer( wx.HORIZONTAL ) + + thumbnails_sizer.AddF( self._thumbnail_cache_size, FLAGS_MIXED ) + thumbnails_sizer.AddF( self._estimated_number_thumbnails, FLAGS_MIXED ) + + previews_sizer = wx.BoxSizer( wx.HORIZONTAL ) + + previews_sizer.AddF( self._preview_cache_size, FLAGS_MIXED ) + previews_sizer.AddF( self._estimated_number_previews, FLAGS_MIXED ) + + fullscreens_sizer = wx.BoxSizer( wx.HORIZONTAL ) + + fullscreens_sizer.AddF( self._fullscreen_cache_size, FLAGS_MIXED ) + fullscreens_sizer.AddF( self._estimated_number_fullscreens, FLAGS_MIXED ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self._file_page, label='Default export directory: ' ), FLAGS_MIXED ) + gridbox.AddF( self._export_location, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self._file_page, label='Exclude deleted files from new imports and remote searches: ' ), FLAGS_MIXED ) + gridbox.AddF( self._exclude_deleted_files, FLAGS_MIXED ) + gridbox.AddF( wx.StaticText( self._file_page, label='MB memory reserved for thumbnail cache: ' ), FLAGS_MIXED ) + gridbox.AddF( thumbnails_sizer, FLAGS_NONE ) + gridbox.AddF( wx.StaticText( self._file_page, label='MB memory reserved for preview cache: ' ), FLAGS_MIXED ) + gridbox.AddF( previews_sizer, FLAGS_NONE ) + gridbox.AddF( wx.StaticText( self._file_page, label='MB memory reserved for fullscreen cache: ' ), FLAGS_MIXED ) + gridbox.AddF( fullscreens_sizer, FLAGS_NONE ) + gridbox.AddF( wx.StaticText( self._file_page, label='Thumbnail width: ' ), FLAGS_MIXED ) + gridbox.AddF( self._thumbnail_width, FLAGS_MIXED ) + gridbox.AddF( wx.StaticText( self._file_page, label='Thumbnail height: ' ), FLAGS_MIXED ) + gridbox.AddF( self._thumbnail_height, FLAGS_MIXED ) + gridbox.AddF( wx.StaticText( self._file_page, label='Autocomplete character threshold: ' ), FLAGS_MIXED ) + gridbox.AddF( self._num_autocomplete_chars, FLAGS_MIXED ) + + self._file_page.SetSizer( gridbox ) + + # + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self._gui_page, label = 'Default tag service in manage tag dialogs:' ), FLAGS_MIXED ) + gridbox.AddF( self._default_tag_repository, FLAGS_MIXED ) + + gridbox.AddF( wx.StaticText( self._gui_page, label = 'Default tag sort on management panel:' ), FLAGS_MIXED ) + gridbox.AddF( self._default_tag_sort, FLAGS_MIXED ) + + gridbox.AddF( wx.StaticText( self._gui_page, label='Capitalise gui: ' ), FLAGS_MIXED ) + gridbox.AddF( self._gui_capitalisation, FLAGS_MIXED ) + + gridbox.AddF( wx.StaticText( self._gui_page, label='By default, search non-local tags in write-autocomplete: ' ), FLAGS_MIXED ) + gridbox.AddF( self._gui_show_all_tags_in_autocomplete, FLAGS_MIXED ) + + self._gui_page.SetSizer( gridbox ) + + # + + vbox = wx.BoxSizer( wx.VERTICAL ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:age' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_age_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_age_years, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='years' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_age_months, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='months' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_age_days, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='days' ), FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:duration' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_duration_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_duration_s, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='s' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_duration_ms, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='ms' ), FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:height' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_height_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_height, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:limit=' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_limit, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:mime' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_mime_media, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='/' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_mime_type, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:numtags' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_num_tags_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_num_tags, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:local_rating_like' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_local_rating_like_value, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:local_rating_numerical' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_local_rating_numerical_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_local_rating_numerical_value, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:ratio' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_ratio_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_ratio_width, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label=':' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_ratio_height, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:size' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_size_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_size, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_size_unit, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self._file_system_predicates_page, label='system:width' ), FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_width_sign, FLAGS_MIXED ) + hbox.AddF( self._file_system_predicate_width, FLAGS_MIXED ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + self._file_system_predicates_page.SetSizer( vbox ) + + # + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._namespace_colours, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( self._new_namespace_colour, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._edit_namespace_colour, FLAGS_EXPAND_PERPENDICULAR ) + + self._colour_page.SetSizer( vbox ) + + # + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self._sort_by_page, label='Default sort: ' ), FLAGS_MIXED ) + gridbox.AddF( self._default_sort, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self._sort_by_page, label='Default collect: ' ), FLAGS_MIXED ) + gridbox.AddF( self._default_collect, FLAGS_EXPAND_BOTH_WAYS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._sort_by, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( self._new_sort_by, FLAGS_EXPAND_PERPENDICULAR ) + + self._sort_by_page.SetSizer( vbox ) + + # + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self._shortcuts_page, label = 'These shortcuts are global to the main gui! You probably want to stick to function keys or ctrl + something!' ), FLAGS_MIXED ) + vbox.AddF( self._shortcuts, FLAGS_EXPAND_BOTH_WAYS ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._shortcuts_add, FLAGS_BUTTON_SIZERS ) + hbox.AddF( self._shortcuts_edit, FLAGS_BUTTON_SIZERS ) + hbox.AddF( self._shortcuts_delete, FLAGS_BUTTON_SIZERS ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + + self._shortcuts_page.SetSizer( vbox ) + + # + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._save_button, FLAGS_SMALL_INDENT ) + buttons.AddF( self._close_button, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._listbook, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if x < 800: x = 800 + if y < 600: y = 600 + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'hydrus client options' ) + + InitialiseControls() + + InitialisePanel() + + self.EventFullscreensUpdate( None ) + self.EventPreviewsUpdate( None ) + self.EventThumbnailsUpdate( None ) + + wx.CallAfter( self._file_page.Layout ) # draws the static texts correctly + + + def _SortListCtrl( self ): self._shortcuts.SortListItems( 2 ) + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventEditNamespaceColour( self, event ): + + result = self._namespace_colours.GetSelectedNamespaceColour() + + if result is not None: + + ( namespace, colour ) = result + + colour_data = wx.ColourData() + + colour_data.SetColour( colour ) + colour_data.SetChooseFull( True ) + + with wx.ColourDialog( self, data = colour_data ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + colour_data = dlg.GetColourData() + + colour = colour_data.GetColour() + + self._namespace_colours.SetNamespaceColour( namespace, colour ) + + + + + + def EventFileSystemPredicateMime( self, event ): + + media = self._file_system_predicate_mime_media.GetStringSelection() + + self._file_system_predicate_mime_type.Clear() + + if media == 'image': + + self._file_system_predicate_mime_type.Append( 'any', HC.IMAGES ) + self._file_system_predicate_mime_type.Append( 'jpeg', HC.IMAGE_JPEG ) + self._file_system_predicate_mime_type.Append( 'png', HC.IMAGE_PNG ) + self._file_system_predicate_mime_type.Append( 'gif', HC.IMAGE_GIF ) + + elif media == 'application': + + self._file_system_predicate_mime_type.Append( 'x-shockwave-flash', HC.APPLICATION_FLASH ) + + elif media == 'video': + + self._file_system_predicate_mime_type.Append( 'x-flv', HC.VIDEO_FLV ) + + + self._file_system_predicate_mime_type.SetSelection( 0 ) + + + def EventFullscreensUpdate( self, event ): + + ( width, height ) = wx.GetDisplaySize() + + estimated_bytes_per_fullscreen = 3 * width * height + + self._estimated_number_fullscreens.SetLabel( '(about ' + HC.ConvertIntToPrettyString( ( self._fullscreen_cache_size.GetValue() * 1048576 ) / estimated_bytes_per_fullscreen ) + '-' + HC.ConvertIntToPrettyString( ( self._fullscreen_cache_size.GetValue() * 1048576 ) / ( estimated_bytes_per_fullscreen / 4 ) ) + ' images)' ) + + + def EventKeyDownNamespace( self, event ): + + if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ): + + namespace = self._new_namespace_colour.GetValue() + + if namespace != '': + + self._namespace_colours.SetNamespaceColour( namespace, wx.Colour( random.randint( 0, 255 ), random.randint( 0, 255 ), random.randint( 0, 255 ) ) ) + + self._new_namespace_colour.SetValue( '' ) + + + else: event.Skip() + + + def EventKeyDownSortBy( self, event ): + + if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ): + + sort_by_string = self._new_sort_by.GetValue() + + if sort_by_string != '': + + try: sort_by = sort_by_string.split( '-' ) + except: + + wx.MessageBox( 'Could not parse that sort by string!' ) + + return + + + self._sort_by.Append( sort_by_string, sort_by ) + + self._new_sort_by.SetValue( '' ) + + + else: event.Skip() + + + def EventOK( self, event ): + + self._options[ 'gui_capitalisation' ] = self._gui_capitalisation.GetValue() + self._options[ 'show_all_tags_in_autocomplete' ] = self._gui_show_all_tags_in_autocomplete.GetValue() + + self._options[ 'export_path' ] = HC.ConvertAbsPathToPortablePath( self._export_location.GetPath() ) + self._options[ 'default_sort' ] = self._default_sort.GetSelection() + self._options[ 'default_collect' ] = self._default_collect.GetSelection() + + self._options[ 'exclude_deleted_files' ] = self._exclude_deleted_files.GetValue() + + self._options[ 'thumbnail_cache_size' ] = self._thumbnail_cache_size.GetValue() * 1048576 + self._options[ 'preview_cache_size' ] = self._preview_cache_size.GetValue() * 1048576 + self._options[ 'fullscreen_cache_size' ] = self._fullscreen_cache_size.GetValue() * 1048576 + + self._options[ 'thumbnail_dimensions' ] = [ self._thumbnail_width.GetValue(), self._thumbnail_height.GetValue() ] + + self._options[ 'num_autocomplete_chars' ] = self._num_autocomplete_chars.GetValue() + + self._options[ 'namespace_colours' ] = self._namespace_colours.GetNamespaceColours() + + sort_by_choices = [] + + for sort_by in [ self._sort_by.GetClientData( i ) for i in range( self._sort_by.GetCount() ) ]: sort_by_choices.append( ( 'namespaces', sort_by ) ) + + self._options[ 'sort_by' ] = sort_by_choices + + system_predicates = {} + + system_predicates[ 'age' ] = ( self._file_system_predicate_age_sign.GetSelection(), self._file_system_predicate_age_years.GetValue(), self._file_system_predicate_age_months.GetValue(), self._file_system_predicate_age_days.GetValue() ) + system_predicates[ 'duration' ] = ( self._file_system_predicate_duration_sign.GetSelection(), self._file_system_predicate_duration_s.GetValue(), self._file_system_predicate_duration_ms.GetValue() ) + system_predicates[ 'height' ] = ( self._file_system_predicate_height_sign.GetSelection(), self._file_system_predicate_height.GetValue() ) + system_predicates[ 'limit' ] = self._file_system_predicate_limit.GetValue() + system_predicates[ 'mime' ] = ( self._file_system_predicate_mime_media.GetSelection(), self._file_system_predicate_mime_type.GetSelection() ) + system_predicates[ 'num_tags' ] = ( self._file_system_predicate_num_tags_sign.GetSelection(), self._file_system_predicate_num_tags.GetValue() ) + system_predicates[ 'local_rating_like' ] = self._file_system_predicate_local_rating_like_value.GetSelection() + system_predicates[ 'local_rating_numerical' ] = ( self._file_system_predicate_local_rating_numerical_sign.GetSelection(), self._file_system_predicate_local_rating_numerical_value.GetValue() ) + system_predicates[ 'ratio' ] = ( self._file_system_predicate_ratio_sign.GetSelection(), self._file_system_predicate_ratio_width.GetValue(), self._file_system_predicate_ratio_height.GetValue() ) + system_predicates[ 'size' ] = ( self._file_system_predicate_size_sign.GetSelection(), self._file_system_predicate_size.GetValue(), self._file_system_predicate_size_unit.GetSelection() ) + system_predicates[ 'width' ] = ( self._file_system_predicate_width_sign.GetSelection(), self._file_system_predicate_width.GetValue() ) + + self._options[ 'file_system_predicates' ] = system_predicates + + shortcuts = {} + + shortcuts[ wx.ACCEL_NORMAL ] = {} + shortcuts[ wx.ACCEL_CTRL ] = {} + shortcuts[ wx.ACCEL_ALT ] = {} + shortcuts[ wx.ACCEL_SHIFT ] = {} + + for ( modifier, key, action ) in self._shortcuts.GetClientData(): shortcuts[ modifier ][ key ] = action + + self._options[ 'shortcuts' ] = shortcuts + + self._options[ 'default_tag_repository' ] = self._default_tag_repository.GetClientData( self._default_tag_repository.GetSelection() ) + self._options[ 'default_tag_sort' ] = self._default_tag_sort.GetClientData( self._default_tag_sort.GetSelection() ) + + try: wx.GetApp().Write( 'save_options' ) + except: wx.MessageBox( traceback.format_exc() ) + + self.EndModal( wx.ID_OK ) + + + def EventRemoveSortBy( self, event ): + + selection = self._sort_by.GetSelection() + + if selection != wx.NOT_FOUND: self._sort_by.Delete( selection ) + + + def EventPreviewsUpdate( self, event ): + + estimated_bytes_per_preview = 3 * 400 * 400 + + self._estimated_number_previews.SetLabel( '(about ' + HC.ConvertIntToPrettyString( ( self._preview_cache_size.GetValue() * 1048576 ) / estimated_bytes_per_preview ) + ' previews)' ) + + + def EventShortcutsAdd( self, event ): + + with DialogInputShortcut( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + ( modifier, key, action ) = dlg.GetInfo() + + ( pretty_modifier, pretty_key, pretty_action ) = HC.ConvertShortcutToPrettyShortcut( modifier, key, action ) + + self._shortcuts.Append( ( pretty_modifier, pretty_key, pretty_action ), ( modifier, key, action ) ) + + self._SortListCtrl() + + + + + def EventShortcutsDelete( self, event ): self._shortcuts.RemoveAllSelected() + + def EventShortcutsEdit( self, event ): + + indices = self._shortcuts.GetAllSelected() + + for index in indices: + + ( modifier, key, action ) = self._shortcuts.GetClientData( index ) + + try: + + with DialogInputShortcut( self, modifier, key, action ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + ( modifier, key, action ) = dlg.GetInfo() + + ( pretty_modifier, pretty_key, pretty_action ) = HC.ConvertShortcutToPrettyShortcut( modifier, key, action ) + + self._shortcuts.UpdateRow( index, ( pretty_modifier, pretty_key, pretty_action ), ( modifier, key, action ) ) + + self._SortListCtrl() + + + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + def EventThumbnailsUpdate( self, event ): + + estimated_bytes_per_thumb = 3 * self._thumbnail_height.GetValue() * self._thumbnail_width.GetValue() + + self._estimated_number_thumbnails.SetLabel( '(about ' + HC.ConvertIntToPrettyString( ( self._thumbnail_cache_size.GetValue() * 1048576 ) / estimated_bytes_per_thumb ) + ' thumbnails)' ) + + +class DialogManageOptionsServerAdmin( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + self._max_monthly_data = ClientGUICommon.NoneableSpinCtrl( self, 'max monthly data (MB)', options[ 'max_monthly_data' ], multiplier = 1048576 ) + self._max_storage = ClientGUICommon.NoneableSpinCtrl( self, 'max storage (MB)', options[ 'max_monthly_data' ], multiplier = 1048576 ) + + self._message = wx.TextCtrl( self, value = options[ 'message' ] ) + + self._save_button = wx.Button( self, label='Save' ) + self._save_button.Bind( wx.EVT_BUTTON, self.EventOK ) + self._save_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._close_button = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._close_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._close_button.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='Message' ), FLAGS_MIXED ) + gridbox.AddF( self._message, FLAGS_MIXED ) + + file_vbox = wx.BoxSizer( wx.VERTICAL ) + + file_vbox.AddF( wx.StaticText( self, label = '- server -' ), FLAGS_SMALL_INDENT ) + file_vbox.AddF( self._max_monthly_data, FLAGS_EXPAND_PERPENDICULAR ) + file_vbox.AddF( self._max_storage, FLAGS_EXPAND_PERPENDICULAR ) + file_vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._save_button, FLAGS_SMALL_INDENT ) + buttons.AddF( self._close_button, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( file_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x + 80, y ) ) + + + Dialog.__init__( self, parent, service_identifier.GetName() + ' options' ) + + self._service_identifier = service_identifier + + self._service = wx.GetApp().Read( 'service', service_identifier ) + + connection = self._service.GetConnection() + + options = connection.Get( 'options' ) + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOK( self, event ): + + options = {} + + options[ 'max_monthly_data' ] = self._max_monthly_data.GetValue() + + options[ 'max_storage' ] = self._max_storage.GetValue() + + options[ 'message' ] = self._message.GetValue() + + try: + + connection = self._service.GetConnection() + + connection.Post( 'options', options = options ) + + except Exception as e: wx.MessageBox( 'Something went wrong when trying to send the options to the server admin: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + +class DialogManageOptionsTagRepository( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + self._max_monthly_data = ClientGUICommon.NoneableSpinCtrl( self, 'max monthly data (MB)', options[ 'max_monthly_data' ], multiplier = 1048576 ) + + self._message = wx.TextCtrl( self, value = options[ 'message' ] ) + + self._save_button = wx.Button( self, label='Save' ) + self._save_button.Bind( wx.EVT_BUTTON, self.EventOK ) + self._save_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._close_button = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._close_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._close_button.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='Message' ), FLAGS_MIXED ) + gridbox.AddF( self._message, FLAGS_MIXED ) + + tag_vbox = wx.BoxSizer( wx.VERTICAL ) + + tag_vbox.AddF( wx.StaticText( self, label = '- tag repository -' ), FLAGS_SMALL_INDENT ) + tag_vbox.AddF( self._max_monthly_data, FLAGS_EXPAND_PERPENDICULAR ) + tag_vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._save_button, FLAGS_SMALL_INDENT ) + buttons.AddF( self._close_button, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( tag_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x + 80, y ) ) + + + Dialog.__init__( self, parent, service_identifier.GetName() + ' options' ) + + self._service_identifier = service_identifier + + self._service = wx.GetApp().Read( 'service', service_identifier ) + + connection = self._service.GetConnection() + + options = connection.Get( 'options' ) + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOK( self, event ): + + options = {} + + options[ 'max_monthly_data' ] = self._max_monthly_data.GetValue() + + options[ 'message' ] = self._message.GetValue() + + try: + + connection = self._service.GetConnection() + + connection.Post( 'options', options = options ) + + except Exception as e: wx.MessageBox( 'Something went wrong when trying to send the options to the tag repository: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + +class DialogManageRatings( Dialog ): + + def __init__( self, parent, media ): + + def InitialiseControls(): + + service_identifiers = wx.GetApp().Read( 'service_identifiers', HC.RATINGS_SERVICES ) + + # sort according to local/remote, I guess + # and maybe sub-sort according to name? + # maybe just do two get s_i queries + + self._panels = [] + + for service_identifier in service_identifiers: self._panels.append( DialogManageRatingsPanel( self, service_identifier, media ) ) + + self._apply = wx.Button( self, label='Apply' ) + self._apply.Bind( wx.EVT_BUTTON, self.EventOk ) + self._apply.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + buttonbox = wx.BoxSizer( wx.HORIZONTAL ) + + buttonbox.AddF( self._apply, FLAGS_MIXED ) + buttonbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + for panel in self._panels: vbox.AddF( panel, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( buttonbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x + 200, y ) ) + + + self._hashes = HC.IntelligentMassUnion( ( m.GetHashes() for m in media ) ) + + Dialog.__init__( self, parent, 'manage ratings for ' + HC.ConvertIntToPrettyString( len( self._hashes ) ) + ' files' ) + + InitialiseControls() + + InitialisePanel() + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.RefreshAcceleratorTable() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + ( command, data ) = action + + if command == 'manage_ratings': self.EventCancel( event ) + elif command == 'ok': self.EventOk( event ) + else: event.Skip() + + + + def EventOk( self, event ): + + try: + + content_updates = [] + + for panel in self._panels: + + if panel.HasChanges(): + + service_identifier = panel.GetServiceIdentifier() + + rating = panel.GetRating() + + content_updates.append( CC.ContentUpdate( CC.CONTENT_UPDATE_RATING, service_identifier, self._hashes, info = rating ) ) + + + + if len( content_updates ) > 0: wx.GetApp().Write( 'content_updates', content_updates ) + + except Exception as e: wx.MessageBox( 'Saving pending mapping changes to DB raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def RefreshAcceleratorTable( self ): + + interested_actions = [ 'archive', 'close_page', 'filter', 'ratings_filter', 'manage_ratings', 'manage_tags', 'new_page', 'refresh', 'set_search_focus', 'show_hide_splitters', 'synchronised_wait_switch' ] + + entries = [] + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): entries.extend( [ ( modifier, key, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) for ( key, action ) in key_dict.items() if action in interested_actions ] ) + + self.SetAcceleratorTable( wx.AcceleratorTable( entries ) ) + + +class DialogManageRatingsPanel( wx.Panel ): + + def __init__( self, parent, service_identifier, media ): + + wx.Panel.__init__( self, parent ) + + self._service_identifier = service_identifier + self._service = wx.GetApp().Read( 'service', service_identifier ) + + extra_info = self._service.GetExtraInfo() + + self._media = media + + service_type = service_identifier.GetType() + + def InitialiseControls(): + + self._current_score = wx.StaticText( self, style = wx.ALIGN_CENTER ) + + score_font = self._GetScoreFont() + + self._current_score.SetFont( score_font ) + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): all_rating_services = [ local_ratings for ( local_ratings, remote_ratings ) in [ media.GetRatings() for media in self._media ] ] + elif service_type in ( HC.RATING_LIKE_REPOSITORY, HC.RATING_NUMERICAL_REPOSITORY ): all_rating_services = [ remote_ratings for ( local_ratings, remote_ratings ) in [ media.GetRatings() for media in self._media ] ] + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.RATING_LIKE_REPOSITORY ): + + ( like, dislike ) = extra_info + + if service_type == HC.LOCAL_RATING_LIKE: + + ratings = [ rating_services.GetRating( self._service_identifier ) for rating_services in all_rating_services ] + + if all( ( i is None for i in ratings ) ): + + choices = [ like, dislike, 'make no changes' ] + + if len( self._media ) > 1: self._current_score.SetLabel( 'none rated' ) + else: self._current_score.SetLabel( 'not rated' ) + + elif None in ratings: + + choices = [ like, dislike, 'remove rating', 'make no changes' ] + + self._current_score.SetLabel( 'not all rated' ) + + else: + + if all( ( i == 1 for i in ratings ) ): + + choices = [ dislike, 'remove rating', 'make no changes' ] + + if len( self._media ) > 1: self._current_score.SetLabel( 'all ' + like ) + else: self._current_score.SetLabel( like ) + + elif all( ( i == 0 for i in ratings ) ): + + choices = [ like, 'remove rating', 'make no changes' ] + + if len( self._media ) > 1: self._current_score.SetLabel( 'all ' + dislike ) + else: self._current_score.SetLabel( dislike ) + + else: + + choices = [ like, dislike, 'remove rating', 'make no changes' ] + + + overall_rating = float( sum( ratings ) ) / float( len( ratings ) ) + + self._current_score.SetLabel( str( '%.2f' % overall_rating ) ) + + + if len( self._media ) > 1: + + ratings_counter = collections.Counter( ratings ) + + likes = ratings_counter[ 1 ] + dislikes = ratings_counter[ 0 ] + nones = ratings_counter[ None ] + + scores = [] + + if likes > 0: scores.append( str( likes ) + ' likes' ) + if dislikes > 0: scores.append( str( dislikes ) + ' dislikes' ) + if nones > 0: scores.append( str( nones ) + ' not rated' ) + + self._current_score.SetLabel( ', '.join( scores ) ) + + else: + + ( rating, ) = ratings + + if rating is None: self._current_score.SetLabel( 'not rated' ) + elif rating == 1: self._current_score.SetLabel( like ) + elif rating == 0: self._current_score.SetLabel( dislike ) + + + else: + + self._current_score.SetLabel( '23 ' + like + 's, 44 ' + dislike + 's' ) + + + elif service_type in ( HC.LOCAL_RATING_NUMERICAL, HC.RATING_NUMERICAL_REPOSITORY ): + + if service_type == HC.LOCAL_RATING_NUMERICAL: + + ( min, max ) = extra_info + + self._slider = wx.Slider( self, minValue = min, maxValue = max, style = wx.SL_AUTOTICKS | wx.SL_LABELS ) + self._slider.Bind( wx.EVT_SLIDER, self.EventSlider ) + + ratings = [ rating_services.GetRating( self._service_identifier ) for rating_services in all_rating_services ] + + if all( ( i is None for i in ratings ) ): + + choices = [ 'set rating', 'make no changes' ] + + if len( self._media ) > 1: self._current_score.SetLabel( 'none rated' ) + else: self._current_score.SetLabel( 'not rated' ) + + elif None in ratings: + + choices = [ 'set rating', 'remove rating', 'make no changes' ] + + if len( self._media ) > 1: self._current_score.SetLabel( 'not all rated' ) + else: self._current_score.SetLabel( 'not rated' ) + + else: + + # you know what? this should really be a bargraph or something! + # * + # * + # * + # * * + # * * * * + # None 0 1 2 3 4 5 + # but we can't rely on integers, so just think about it + # some kind of sense of distribution would be helpful though + + choices = [ 'set rating', 'remove rating', 'make no changes' ] + + overall_rating = float( sum( ratings ) ) / float( len( ratings ) ) + + overall_rating_converted = ( overall_rating * ( max - min ) ) + min + + self._slider.SetValue( int( overall_rating_converted + 0.5 ) ) + + str_overall_rating = str( '%.2f' % overall_rating_converted ) + + if min in ( 0, 1 ): str_overall_rating += '/' + str( '%.2f' % max ) + + self._current_score.SetLabel( str_overall_rating ) + + + else: + + self._current_score.SetLabel( '3.82/5' ) + + + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + self._choices = wx.Choice( self, choices = choices ) + + self._choices.SetSelection( self._choices.FindString( 'make no changes' ) ) + + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): label = 'local rating' + elif service_type in ( HC.RATING_LIKE_REPOSITORY, HC.RATING_NUMERICAL_REPOSITORY ): label = 'remote rating' + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self, label = '- ' + self._service_identifier.GetName() + ' -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( self._current_score, FLAGS_EXPAND_PERPENDICULAR ) + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): + + if service_type == HC.LOCAL_RATING_LIKE: + + vbox.AddF( self._choices, FLAGS_EXPAND_PERPENDICULAR ) + + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + vbox.AddF( self._slider, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._choices, FLAGS_EXPAND_PERPENDICULAR ) + + + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + + def _GetScoreFont( self ): + + normal_font = wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) + + normal_font_size = normal_font.GetPointSize() + normal_font_family = normal_font.GetFamily() + + return wx.Font( normal_font_size * 2, normal_font_family, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD ) + + + def EventSlider( self, event ): + + rating = self._slider.GetValue() + + self._choices.SetSelection( 0 ) + + self._choices.SetString( 0, 'set rating to ' + str( rating ) ) + + event.Skip() + + + def GetRating( self ): + + service_type = self._service_identifier.GetType() + + selection = self._choices.GetSelection() + + s = self._choices.GetString( selection ) + + if s == 'remove rating': return None + else: + + if service_type == HC.LOCAL_RATING_LIKE: + + ( like, dislike ) = self._service.GetExtraInfo() + + if s == like: rating = 1 + elif s == dislike: rating = 0 + + elif service_type == HC.LOCAL_RATING_NUMERICAL: rating = float( self._slider.GetValue() - self._slider.GetMin() ) / float( self._slider.GetMax() - self._slider.GetMin() ) + + + return rating + + + def HasChanges( self ): + + selection = self._choices.GetSelection() + + s = self._choices.GetString( selection ) + + if s == 'make no changes': return False + else: return True + + + def GetServiceIdentifier( self ): return self._service_identifier + +class DialogManageServer( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + self._edit_log = [] + + self._services_listbook = ClientGUICommon.ListBook( self ) + self._services_listbook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventServiceChanged ) + self._services_listbook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._service_types = wx.Choice( self ) + + for service_type in [ HC.TAG_REPOSITORY, HC.FILE_REPOSITORY, HC.MESSAGE_DEPOT ]: self._service_types.Append( HC.service_string_lookup[ service_type ], service_type ) + + self._service_types.SetSelection( 0 ) + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + self._add.SetForegroundColour( ( 0, 128, 0 ) ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._ok = wx.Button( self, label='ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + # goes after self._remove, because of events + + for service_identifier in self._service_identifiers: + + page = DialogManageServerServicePanel( self._services_listbook, service_identifier ) + + name = HC.service_string_lookup[ service_identifier.GetType() ] + + self._services_listbook.AddPage( page, name ) + + + + def InitialisePanel(): + + add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL ) + add_remove_hbox.AddF( self._service_types, FLAGS_MIXED ) + add_remove_hbox.AddF( self._add, FLAGS_MIXED ) + add_remove_hbox.AddF( self._remove, FLAGS_MIXED ) + + ok_hbox = wx.BoxSizer( wx.HORIZONTAL ) + ok_hbox.AddF( self._ok, FLAGS_MIXED ) + ok_hbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._services_listbook, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT ) + vbox.AddF( ok_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if y < 400: y = 400 # listbook's setsize ( -1, 400 ) is buggy + + self.SetInitialSize( ( 680, y ) ) + + + Dialog.__init__( self, parent, 'manage ' + service_identifier.GetName() + ' services' ) + + self._service = wx.GetApp().Read( 'service', service_identifier ) + + connection = self._service.GetConnection() + + self._service_identifiers = connection.Get( 'services' ) + + InitialiseControls() + + InitialisePanel() + + current_page = self._services_listbook.GetCurrentPage() + + if current_page.GetOriginalServiceIdentifier().GetType() == HC.SERVER_ADMIN: self._remove.Disable() + else: self._remove.Enable() + + + def _CheckCurrentServiceIsValid( self ): + + service_panel = self._services_listbook.GetCurrentPage() + + if service_panel is not None: + + port = service_panel.GetInfo() + + for existing_port in [ page.GetInfo() for page in self._services_listbook.GetNameToPageDict().values() if page != service_panel ]: + + if port == existing_port: raise Exception( 'That port is already in use!' ) + + + + + def EventAdd( self, event ): + + try: + + self._CheckCurrentServiceIsValid() + + service_type = self._service_types.GetClientData( self._service_types.GetSelection() ) + + existing_ports = [ page.GetInfo() for page in self._services_listbook.GetNameToPageDict().values() ] + + port = HC.DEFAULT_SERVICE_PORT + + while port in existing_ports: port += 1 + + service_identifier = HC.ServerServiceIdentifier( service_type, port ) + + self._edit_log.append( ( HC.ADD, service_identifier ) ) + + page = DialogManageServerServicePanel( self._services_listbook, service_identifier ) + + name = HC.service_string_lookup[ service_type ] + + self._services_listbook.AddPage( page, name, select = True ) + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOk( self, event ): + + try: self._CheckCurrentServiceIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + return + + + for page in self._services_listbook.GetNameToPageDict().values(): + + if page.HasChanges(): self._edit_log.append( ( HC.EDIT, ( page.GetOriginalServiceIdentifier(), page.GetInfo() ) ) ) + + + try: + + if len( self._edit_log ) > 0: + + connection = self._service.GetConnection() + + connection.Post( 'servicesmodification', edit_log = self._edit_log ) + + wx.GetApp().Write( 'update_server_services', self._service.GetServiceIdentifier(), self._edit_log ) + + + except Exception as e: wx.MessageBox( 'Saving services to server raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def EventRemove( self, event ): + + service_panel = self._services_listbook.GetCurrentPage() + + if service_panel is not None: + + service_identifier = service_panel.GetOriginalServiceIdentifier() + + self._edit_log.append( ( HC.DELETE, service_identifier ) ) + + self._services_listbook.DeleteCurrentPage() + + + + def EventServiceChanged( self, event ): + + page = self._services_listbook.GetCurrentPage() + + if page.GetOriginalServiceIdentifier().GetType() == HC.SERVER_ADMIN: self._remove.Disable() + else: self._remove.Enable() + + + def EventServiceChanging( self, event ): + + try: self._CheckCurrentServiceIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + event.Veto() + + + +class DialogManageServerServicePanel( wx.Panel ): + + def __init__( self, parent, service_identifier ): + + wx.Panel.__init__( self, parent ) + + self._service_identifier = service_identifier + + service_type = service_identifier.GetType() + + def InitialiseControls(): + + self._service_port = wx.SpinCtrl( self, min = 1, max = 65535 ) + self._service_port.SetValue( service_identifier.GetPort() ) + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='port' ), FLAGS_MIXED ) + gridbox.AddF( self._service_port, FLAGS_EXPAND_BOTH_WAYS ) + + vbox.AddF( wx.StaticText( self, label = '- service -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( gridbox, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + + def GetInfo( self ): + + port = self._service_port.GetValue() + + return port + + + def HasChanges( self ): + + port = self.GetInfo() + + if port != self._service_identifier.GetPort(): return True + + return False + + + def GetOriginalServiceIdentifier( self ): return self._service_identifier + +class DialogManageServices( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._edit_log = [] + + self._listbook = ClientGUICommon.ListBook( self ) + self._listbook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._local_ratings_like = ClientGUICommon.ListBook( self._listbook ) + self._local_ratings_like.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._local_ratings_numerical = ClientGUICommon.ListBook( self._listbook ) + self._local_ratings_numerical.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._tag_repositories = ClientGUICommon.ListBook( self._listbook ) + self._tag_repositories.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._file_repositories = ClientGUICommon.ListBook( self._listbook ) + self._file_repositories.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._message_depots = ClientGUICommon.ListBook( self._listbook ) + self._message_depots.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + self._servers_admin = ClientGUICommon.ListBook( self._listbook ) + self._servers_admin.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventServiceChanging ) + + services = wx.GetApp().Read( 'services', HC.RESTRICTED_SERVICES + [ HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ] ) + + for service in services: + + service_identifier = service.GetServiceIdentifier() + + service_type = service_identifier.GetType() + name = service_identifier.GetName() + + if service_type in HC.REMOTE_SERVICES: credentials = service.GetCredentials() + else: credentials = None + + extra_info = service.GetExtraInfo() + + if service_type == HC.LOCAL_RATING_LIKE: listbook = self._local_ratings_like + elif service_type == HC.LOCAL_RATING_NUMERICAL: listbook = self._local_ratings_numerical + elif service_type == HC.TAG_REPOSITORY: listbook = self._tag_repositories + elif service_type == HC.FILE_REPOSITORY: listbook = self._file_repositories + elif service_type == HC.MESSAGE_DEPOT: listbook = self._message_depots + elif service_type == HC.SERVER_ADMIN: listbook = self._servers_admin + else: continue + + page_info = ( DialogManageServicesServicePanel, ( listbook, service_identifier, credentials, extra_info ), {} ) + + listbook.AddPage( page_info, name ) + + + self._listbook.AddPage( self._local_ratings_like, 'local ratings like' ) + self._listbook.AddPage( self._local_ratings_numerical, 'local ratings numerical' ) + self._listbook.AddPage( self._tag_repositories, 'tags' ) + self._listbook.AddPage( self._file_repositories, 'files' ) + self._listbook.AddPage( self._message_depots, 'message depots' ) + self._listbook.AddPage( self._servers_admin, 'servers admin' ) + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + self._add.SetForegroundColour( ( 0, 128, 0 ) ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._export = wx.Button( self, label='export' ) + self._export.Bind( wx.EVT_BUTTON, self.EventExport ) + + self._ok = wx.Button( self, label='ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + # these need to be below the addpages because they'd fire the events + self._listbook.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGING, self.EventPageChanging, source = self._listbook ) + + + def InitialisePanel(): + + add_remove_hbox = wx.BoxSizer( wx.HORIZONTAL ) + add_remove_hbox.AddF( self._add, FLAGS_MIXED ) + add_remove_hbox.AddF( self._remove, FLAGS_MIXED ) + add_remove_hbox.AddF( self._export, FLAGS_MIXED ) + + ok_hbox = wx.BoxSizer( wx.HORIZONTAL ) + ok_hbox.AddF( self._ok, FLAGS_MIXED ) + ok_hbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._listbook, FLAGS_EXPAND_BOTH_WAYS ) + + vbox.AddF( add_remove_hbox, FLAGS_SMALL_INDENT ) + vbox.AddF( ok_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( 880, y + 220 ) ) + + + Dialog.__init__( self, parent, 'manage services' ) + + InitialiseControls() + + InitialisePanel() + + self.SetDropTarget( ClientGUICommon.FileDropTarget( self.Import ) ) + + + def _CheckCurrentServiceIsValid( self ): + + services_listbook = self._listbook.GetCurrentPage() + + if services_listbook is not None: + + service_panel = services_listbook.GetCurrentPage() + + if service_panel is not None: + + ( service_identifier, credentials, extra_info ) = service_panel.GetInfo() + + old_name = services_listbook.GetCurrentName() + name = service_identifier.GetName() + + if old_name is not None and name != old_name: + + if services_listbook.NameExists( name ): raise Exception( 'That name is already in use!' ) + + services_listbook.RenamePage( old_name, name ) + + + + + + def EventAdd( self, event ): + + with wx.TextEntryDialog( self, 'Enter new service\'s name' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + try: + + name = dlg.GetValue() + + services_listbook = self._listbook.GetCurrentPage() + + if services_listbook.NameExists( name ): raise Exception( 'That name is already in use!' ) + + if name == '': raise Exception( 'Please enter a nickname for the service.' ) + + if services_listbook == self._local_ratings_like: service_type = HC.LOCAL_RATING_LIKE + elif services_listbook == self._local_ratings_numerical: service_type = HC.LOCAL_RATING_NUMERICAL + elif services_listbook == self._tag_repositories: service_type = HC.TAG_REPOSITORY + elif services_listbook == self._file_repositories: service_type = HC.FILE_REPOSITORY + elif services_listbook == self._message_depots: service_type = HC.MESSAGE_DEPOT + elif services_listbook == self._servers_admin: service_type = HC.SERVER_ADMIN + + service_identifier = HC.ClientServiceIdentifier( os.urandom( 32 ), service_type, name ) + + if service_type in HC.REMOTE_SERVICES: + + if service_type == HC.SERVER_ADMIN: credentials = CC.Credentials( 'hostname', 45870, '' ) + elif service_type in HC.RESTRICTED_SERVICES: credentials = CC.Credentials( 'hostname', 45871, '' ) + else: credentials = CC.Credentials( 'hostname', 45871 ) + + else: credentials = None + + if service_type == HC.MESSAGE_DEPOT: + + identity_name = 'identity@' + name + check_period = 180 + private_key = HydrusMessageHandling.GenerateNewPrivateKey() + receive_anon = True + + extra_info = ( identity_name, check_period, private_key, receive_anon ) + + elif service_type == HC.LOCAL_RATING_LIKE: extra_info = ( 'like', 'dislike' ) + elif service_type == HC.LOCAL_RATING_NUMERICAL: extra_info = ( 0, 5 ) + else: extra_info = None + + self._edit_log.append( ( 'add', ( service_identifier, credentials, extra_info ) ) ) + + page = DialogManageServicesServicePanel( services_listbook, service_identifier, credentials, extra_info ) + + services_listbook.AddPage( page, name, select = True ) + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + self.EventAdd( event ) + + + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventExport( self, event ): + + services_listbook = self._listbook.GetCurrentPage() + + if services_listbook is not None: + + service_panel = services_listbook.GetCurrentPage() + + if service_panel is not None: + + ( service_identifier, credentials, extra_info ) = service_panel.GetInfo() + + old_name = services_listbook.GetCurrentName() + name = service_identifier.GetName() + + if old_name is not None and name != old_name: + + if services_listbook.NameExists( name ): raise Exception( 'That name is already in use!' ) + + services_listbook.RenamePage( old_name, name ) + + + + + services_listbook = self._listbook.GetCurrentPage() + + if services_listbook is not None: + + service_panel = services_listbook.GetCurrentPage() + + ( service_identifier, credentials, extra_info ) = service_panel.GetInfo() + + name = service_identifier.GetName() + + try: + + with wx.FileDialog( self, 'select where to export service', defaultFile = name + '.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( ( service_identifier, credentials, extra_info ) ) ) + + + + except: + + with wx.FileDialog( self, 'select where to export service', defaultFile = 'service.yaml', style = wx.FD_SAVE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + with open( dlg.GetPath(), 'wb' ) as f: f.write( yaml.safe_dump( ( service_identifier, credentials, extra_info ) ) ) + + + + + + + def EventOk( self, event ): + + try: self._CheckCurrentServiceIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + return + + + all_pages = [] + + all_pages.extend( self._local_ratings_like.GetNameToPageDict().values() ) + all_pages.extend( self._local_ratings_numerical.GetNameToPageDict().values() ) + all_pages.extend( self._tag_repositories.GetNameToPageDict().values() ) + all_pages.extend( self._file_repositories.GetNameToPageDict().values() ) + all_pages.extend( self._message_depots.GetNameToPageDict().values() ) + all_pages.extend( self._servers_admin.GetNameToPageDict().values() ) + + for page in all_pages: + + if page.HasChanges(): self._edit_log.append( ( 'edit', ( page.GetOriginalServiceIdentifier(), page.GetInfo() ) ) ) + + + try: + + if len( self._edit_log ) > 0: wx.GetApp().Write( 'update_services', self._edit_log ) + + except Exception as e: wx.MessageBox( 'Saving services to DB raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def EventPageChanging( self, event ): + + try: self._CheckCurrentServiceIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + event.Veto() + + + + def EventRemove( self, event ): + + services_listbook = self._listbook.GetCurrentPage() + + service_panel = services_listbook.GetCurrentPage() + + if service_panel is not None: + + service_identifier = service_panel.GetOriginalServiceIdentifier() + + self._edit_log.append( ( 'delete', service_identifier ) ) + + services_listbook.DeleteCurrentPage() + + + + def EventServiceChanging( self, event ): + + try: self._CheckCurrentServiceIsValid() + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + event.Veto() + + + + def Import( self, paths ): + + self._CheckCurrentServiceIsValid() + + for path in paths: + + try: + + with open( path, 'rb' ) as f: file = f.read() + + ( service_identifier, credentials, extra_info ) = yaml.safe_load( file ) + + name = service_identifier.GetName() + + service_type = service_identifier.GetType() + + if service_type == HC.TAG_REPOSITORY: services_listbook = self._tag_repositories + elif service_type == HC.FILE_REPOSITORY: services_listbook = self._file_repositories + elif service_type == HC.MESSAGE_DEPOT: services_listbook = self._message_depots + elif service_type == HC.SERVER_ADMIN: services_listbook = self._servers_admin + + self._listbook.SelectPage( services_listbook ) + + if services_listbook.NameExists( name ): + + message = 'A service already exists with that name. Overwrite it?' + + with DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: + + page = services_listbook.GetNameToPageDict()[ name ] + + page.Update( service_identifier, credentials, extra_info ) + + + + else: + + self._edit_log.append( ( 'add', ( service_identifier, credentials, extra_info ) ) ) + + page = DialogManageServicesServicePanel( services_listbook, service_identifier, credentials, extra_info ) + + services_listbook.AddPage( page, name, select = True ) + + + except: + + wx.MessageBox( traceback.format_exc() ) + + + + +class DialogManageServicesServicePanel( wx.Panel ): + + def __init__( self, parent, service_identifier, credentials, extra_info ): + + wx.Panel.__init__( self, parent ) + + self._service_identifier = service_identifier + self._credentials = credentials + self._extra_info = extra_info + + service_type = service_identifier.GetType() + + def InitialiseControls(): + + self._service_name = wx.TextCtrl( self, value = self._service_identifier.GetName() ) + + if service_type in HC.REMOTE_SERVICES: self._service_credentials = wx.TextCtrl( self, value = self._credentials.GetConnectionString() ) + + if service_type == HC.MESSAGE_DEPOT: + + ( identity_name, check_period, private_key, receive_anon ) = self._extra_info + + self._identity_name = wx.TextCtrl( self, value = identity_name ) + + self._check_period = wx.SpinCtrl( self, min = 60, max = 86400 * 7, initial = check_period ) + + self._private_key = wx.TextCtrl( self, value = private_key, style = wx.TE_MULTILINE ) + + self._receive_anon = wx.CheckBox( self ) + self._receive_anon.SetValue( receive_anon ) + + elif service_identifier.GetType() == HC.LOCAL_RATING_LIKE: + + ( like, dislike ) = self._extra_info + + self._like = wx.TextCtrl( self, value = like ) + self._dislike = wx.TextCtrl( self, value = dislike ) + + elif service_identifier.GetType() == HC.LOCAL_RATING_NUMERICAL: + + ( lower, upper ) = self._extra_info + + self._lower = wx.SpinCtrl( self, min = -2000, max = 2000, initial = lower ) + self._upper = wx.SpinCtrl( self, min = -2000, max = 2000, initial = upper ) + + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='name' ), FLAGS_MIXED ) + gridbox.AddF( self._service_name, FLAGS_EXPAND_BOTH_WAYS ) + + if service_type in HC.REMOTE_SERVICES: + + gridbox.AddF( wx.StaticText( self, label='credentials' ), FLAGS_MIXED ) + gridbox.AddF( self._service_credentials, FLAGS_EXPAND_BOTH_WAYS ) + + + if service_type == HC.MESSAGE_DEPOT: + + gridbox.AddF( wx.StaticText( self, label='identity name' ), FLAGS_MIXED ) + gridbox.AddF( self._identity_name, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( self, label='update period' ), FLAGS_MIXED ) + gridbox.AddF( self._check_period, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( self, label='private key' ), FLAGS_MIXED ) + gridbox.AddF( self._private_key, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( self, label='receive messages from Anonymous?' ), FLAGS_MIXED ) + gridbox.AddF( self._receive_anon, FLAGS_EXPAND_BOTH_WAYS ) + + elif service_identifier.GetType() == HC.LOCAL_RATING_LIKE: + + gridbox.AddF( wx.StaticText( self, label='like' ), FLAGS_MIXED ) + gridbox.AddF( self._like, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( self, label='dislike' ), FLAGS_MIXED ) + gridbox.AddF( self._dislike, FLAGS_EXPAND_BOTH_WAYS ) + + elif service_identifier.GetType() == HC.LOCAL_RATING_NUMERICAL: + + gridbox.AddF( wx.StaticText( self, label='lower limit' ), FLAGS_MIXED ) + gridbox.AddF( self._lower, FLAGS_EXPAND_BOTH_WAYS ) + + gridbox.AddF( wx.StaticText( self, label='upper limit' ), FLAGS_MIXED ) + gridbox.AddF( self._upper, FLAGS_EXPAND_BOTH_WAYS ) + + + vbox.AddF( wx.StaticText( self, label = '- service -' ), FLAGS_SMALL_INDENT ) + vbox.AddF( gridbox, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + + InitialiseControls() + + InitialisePanel() + + + def GetInfo( self ): + + service_key = self._service_identifier.GetServiceKey() + + service_type = self._service_identifier.GetType() + + name = self._service_name.GetValue() + + if name == '': raise Exception( 'Please enter a name' ) + + service_identifier = HC.ClientServiceIdentifier( service_key, service_type, name ) + + if service_type in HC.REMOTE_SERVICES: + + connection_string = self._service_credentials.GetValue() + + if connection_string == '': raise Exception( 'Please enter some credentials' ) + + if '@' in connection_string: + + try: ( access_key, address ) = connection_string.split( '@' ) + except: raise Exception( 'Could not parse those credentials - no \'@\' symbol!' ) + + try: access_key = access_key.decode( 'hex' ) + except: raise Exception( 'Could not parse those credentials - could not understand access key!' ) + + try: ( host, port ) = address.split( ':' ) + except: raise Exception( 'Could not parse those credentials - no \':\' symbol!' ) + + try: port = int( port ) + except: raise Exception( 'Could not parse those credentials - could not understand the port!' ) + + credentials = CC.Credentials( host, port, access_key ) + + else: + + try: ( host, port ) = connection_string.split( ':' ) + except: raise Exception( 'Could not parse those credentials - no \':\' symbol!' ) + + try: port = int( port ) + except: raise Exception( 'Could not parse those credentials - could not understand the port!' ) + + credentials = CC.Credentials( host, port ) + + + else: credentials = None + + if service_type == HC.MESSAGE_DEPOT: extra_info = ( self._identity_name.GetValue(), self._check_period.GetValue(), self._private_key.GetValue(), self._receive_anon.GetValue() ) + elif service_type == HC.LOCAL_RATING_LIKE: extra_info = ( self._like.GetValue(), self._dislike.GetValue() ) + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + ( lower, upper ) = ( self._lower.GetValue(), self._upper.GetValue() ) + + if upper < lower: upper = lower + 1 + + extra_info = ( lower, upper ) + + else: extra_info = None + + return ( service_identifier, credentials, extra_info ) + + + def HasChanges( self ): + + ( service_identifier, credentials, extra_info ) = self.GetInfo() + + if service_identifier != self._service_identifier: return True + + if credentials != self._credentials: return True + + if extra_info != self._extra_info: return True + + return False + + + def GetOriginalServiceIdentifier( self ): return self._service_identifier + + def Update( self, service_identifier, credentials, extra_info ): + + service_type = service_identifier.GetType() + + self._service_name.SetValue( service_identifier.GetName() ) + + if service_type in HC.REMOTE_SERVICES: self._service_credentials.SetValue( credentials.GetConnectionString() ) + + if service_type == HC.MESSAGE_DEPOT: + + if len( extra_info ) == 3: + ( identity_name, check_period, private_key ) = extra_info + receive_anon = True + else: ( identity_name, check_period, private_key, receive_anon ) = extra_info + + self._identity_name.SetValue( identity_name ) + + self._check_period.SetValue( check_period ) + + self._private_key.SetValue( private_key ) + + self._receive_anon.SetValue( receive_anon ) + + elif service_type == HC.LOCAL_RATING_LIKE: + + ( like, dislike ) = extra_info + + self._like.SetValue( like ) + self._dislike.SetValue( dislike ) + + elif service_type == HC.LOCAL_RATING_NUMERICAL: + + ( lower, upper ) = extra_info + + self._lower.SetValue( lower ) + self._upper.SetValue( upper ) + + + +class DialogManageTagServicePrecedence( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + message = 'When services dispute over a file\'s tags,' + os.linesep + 'higher services will overrule those below.' + + self._explain = wx.StaticText( self, label = message ) + + self._tag_services = wx.ListBox( self ) + + tag_service_precedence = wx.GetApp().Read( 'tag_service_precedence' ) + + for service_identifier in tag_service_precedence: + + name = service_identifier.GetName() + + self._tag_services.Append( name, service_identifier ) + + + self._up = wx.Button( self, label = u'\u2191' ) + self._up.Bind( wx.EVT_BUTTON, self.EventUp ) + + self._down = wx.Button( self, label = u'\u2193' ) + self._down.Bind( wx.EVT_BUTTON, self.EventDown ) + + self._apply = wx.Button( self, label='apply' ) + self._apply.Bind( wx.EVT_BUTTON, self.EventOK ) + self._apply.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + updown_vbox = wx.BoxSizer( wx.VERTICAL ) + + updown_vbox.AddF( self._up, FLAGS_MIXED ) + updown_vbox.AddF( self._down, FLAGS_MIXED ) + + main_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + main_hbox.AddF( self._tag_services, FLAGS_EXPAND_BOTH_WAYS ) + main_hbox.AddF( updown_vbox, FLAGS_MIXED ) + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._apply, FLAGS_SMALL_INDENT ) + buttons.AddF( self._cancel, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._explain, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( main_hbox, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if y < 400: y = 400 + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'manage tag service precedence' ) + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOK( self, event ): + + try: + + service_identifiers = [ self._tag_services.GetClientData( i ) for i in range( self._tag_services.GetCount() ) ] + + wx.GetApp().Write( 'set_tag_service_precedence', service_identifiers ) + + except Exception as e: wx.MessageBox( 'Something went wrong when trying to save tag service precedence to the database: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def EventUp( self, event ): + + selection = self._tag_services.GetSelection() + + if selection != wx.NOT_FOUND: + + if selection > 0: + + service_identifier = self._tag_services.GetClientData( selection ) + + name = service_identifier.GetName() + + self._tag_services.Delete( selection ) + + self._tag_services.Insert( name, selection - 1, service_identifier ) + + self._tag_services.Select( selection - 1 ) + + + + + def EventDown( self, event ): + + selection = self._tag_services.GetSelection() + + if selection != wx.NOT_FOUND: + + if selection + 1 < self._tag_services.GetCount(): + + service_identifier = self._tag_services.GetClientData( selection ) + + name = service_identifier.GetName() + + self._tag_services.Delete( selection ) + + self._tag_services.Insert( name, selection + 1, service_identifier ) + + self._tag_services.Select( selection + 1 ) + + + + +class DialogManageTags( Dialog ): + + def __init__( self, parent, file_service_identifier, media ): + + def InitialiseControls(): + + self._tag_repositories = ClientGUICommon.ListBook( self ) + self._tag_repositories.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventServiceChanged ) + + service_identifiers = wx.GetApp().Read( 'service_identifiers', ( HC.TAG_REPOSITORY, ) ) + + for service_identifier in list( service_identifiers ) + [ CC.LOCAL_TAG_SERVICE_IDENTIFIER ]: + + service_type = service_identifier.GetType() + + page_info = ( DialogManageTagsPanel, ( self._tag_repositories, self._file_service_identifier, service_identifier, media ), {} ) + + name = service_identifier.GetName() + + self._tag_repositories.AddPage( page_info, name ) + + + default_tag_repository = self._options[ 'default_tag_repository' ] + + self._tag_repositories.Select( default_tag_repository.GetName() ) + + self._apply = wx.Button( self, label='Apply' ) + self._apply.Bind( wx.EVT_BUTTON, self.EventOk ) + self._apply.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + buttonbox = wx.BoxSizer( wx.HORIZONTAL ) + + buttonbox.AddF( self._apply, FLAGS_MIXED ) + buttonbox.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._tag_repositories, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( buttonbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x + 200, 500 ) ) + + + self._file_service_identifier = file_service_identifier + self._hashes = HC.IntelligentMassUnion( ( m.GetHashes() for m in media ) ) + + Dialog.__init__( self, parent, 'manage tags for ' + HC.ConvertIntToPrettyString( len( self._hashes ) ) + ' files' ) + + InitialiseControls() + + InitialisePanel() + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.RefreshAcceleratorTable() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + ( command, data ) = action + + if command == 'manage_tags': self.EventCancel( event ) + elif command == 'ok': self.EventOk( event ) + else: event.Skip() + + + + def EventOk( self, event ): + + try: + + content_updates = [] + + for page in self._tag_repositories.GetNameToPageDict().values(): + + if page.HasChanges(): + + service_identifier = page.GetServiceIdentifier() + + edit_log = page.GetEditLog() + + content_updates.append( CC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, self._hashes, info = edit_log ) ) + + + + if len( content_updates ) > 0: wx.GetApp().Write( 'content_updates', content_updates ) + + except Exception as e: wx.MessageBox( 'Saving pending mapping changes to DB raised this error: ' + unicode( e ) ) + + self.EndModal( wx.ID_OK ) + + + def EventServiceChanged( self, event ): + + page = self._tag_repositories.GetCurrentPage() + + wx.CallAfter( page.SetTagBoxFocus ) + + + def RefreshAcceleratorTable( self ): + + entries = [] + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): entries.extend( [ ( modifier, key, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) for ( key, action ) in key_dict.items() ] ) + + self.SetAcceleratorTable( wx.AcceleratorTable( entries ) ) + + +class DialogManageTagsPanel( wx.Panel ): + + def __init__( self, parent, file_service_identifier, tag_service_identifier, media ): + + def InitialiseControls(): + + self._tags_box = ClientGUICommon.TagsBoxManage( self, self.AddTag, self._current_tags, self._deleted_tags, self._pending_tags, self._petitioned_tags ) + + self._add_tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self, self.AddTag, self._file_service_identifier, self._tag_service_identifier ) + + self._show_deleted_tags = wx.CheckBox( self, label='Show deleted tags' ) + self._show_deleted_tags.Bind( wx.EVT_CHECKBOX, self.EventShowDeletedTags ) + + self._modify_mappers = wx.Button( self, label='Modify mappers' ) + self._modify_mappers.Bind( wx.EVT_BUTTON, self.EventModify ) + + self._copy_tags = wx.Button( self, label = 'copy tags' ) + self._copy_tags.Bind( wx.EVT_BUTTON, self.EventCopyTags ) + + self._paste_tags = wx.Button( self, label = 'paste tags' ) + self._paste_tags.Bind( wx.EVT_BUTTON, self.EventPasteTags ) + + if self._i_am_local_tag_service: + + self._show_deleted_tags.Hide() + self._modify_mappers.Hide() + + else: + + if not self._account.HasPermission( HC.POST_DATA ): self._add_tag_box.Hide() + if not self._account.HasPermission( HC.MANAGE_USERS ): self._modify_mappers.Hide() + + + + def InitialisePanel(): + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + special_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + special_hbox.AddF( self._show_deleted_tags, FLAGS_MIXED ) + special_hbox.AddF( self._modify_mappers, FLAGS_MIXED ) + + copy_paste_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + copy_paste_hbox.AddF( self._copy_tags, FLAGS_MIXED ) + copy_paste_hbox.AddF( self._paste_tags, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._tags_box, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( self._add_tag_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( copy_paste_hbox, FLAGS_BUTTON_SIZERS ) + vbox.AddF( special_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + + wx.Panel.__init__( self, parent ) + + self._file_service_identifier = file_service_identifier + self._tag_service_identifier = tag_service_identifier + + self._i_am_local_tag_service = self._tag_service_identifier.GetType() == HC.LOCAL_TAG + + self._edit_log = [] + + if not self._i_am_local_tag_service: + + service = wx.GetApp().Read( 'service', tag_service_identifier ) + + self._account = service.GetAccount() + + + ( self._current_tags, self._deleted_tags, self._pending_tags, self._petitioned_tags ) = CC.MediaIntersectCDPPTagServiceIdentifiers( media, tag_service_identifier ) + + self._current_tags.sort() + self._pending_tags.sort() + + InitialiseControls() + + InitialisePanel() + + + def AddTag( self, tag ): + + if tag is None: wx.PostEvent( self, wx.CommandEvent( commandType = wx.wxEVT_COMMAND_MENU_SELECTED, winid = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'ok' ) ) ) + else: + + if self._i_am_local_tag_service: + + if tag in self._pending_tags: + + self._pending_tags.remove( tag ) + + self._tags_box.RescindPend( tag ) + + elif tag in self._petitioned_tags: + + self._petitioned_tags.remove( tag ) + + self._tags_box.RescindPetition( tag ) + + elif tag in self._current_tags: + + self._petitioned_tags.append( tag ) + + self._tags_box.PetitionTag( tag ) + + else: + + self._pending_tags.append( tag ) + + self._tags_box.PendTag( tag ) + + + self._edit_log = [] + + self._edit_log.extend( [ ( CC.CONTENT_UPDATE_ADD, tag ) for tag in self._pending_tags ] ) + self._edit_log.extend( [ ( CC.CONTENT_UPDATE_DELETE, tag ) for tag in self._petitioned_tags ] ) + + else: + + if tag in self._pending_tags: + + self._pending_tags.remove( tag ) + + self._tags_box.RescindPend( tag ) + + self._edit_log.append( ( CC.CONTENT_UPDATE_RESCIND_PENDING, tag ) ) + + elif tag in self._petitioned_tags: + + self._petitioned_tags.remove( tag ) + + self._tags_box.RescindPetition( tag ) + + self._edit_log.append( ( CC.CONTENT_UPDATE_RESCIND_PETITION, tag ) ) + + elif tag in self._current_tags: + + if self._account.HasPermission( HC.RESOLVE_PETITIONS ): + + self._edit_log.append( ( CC.CONTENT_UPDATE_PETITION, ( tag, 'admin' ) ) ) + + self._petitioned_tags.append( tag ) + + self._tags_box.PetitionTag( tag ) + + elif self._account.HasPermission( HC.POST_PETITIONS ): + + message = 'Enter a reason for this tag to be removed. A janitor will review your petition.' + + with wx.TextEntryDialog( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + self._edit_log.append( ( CC.CONTENT_UPDATE_PETITION, ( tag, dlg.GetValue() ) ) ) + + self._petitioned_tags.append( tag ) + + self._tags_box.PetitionTag( tag ) + + + + + elif tag in self._deleted_tags: + + if self._account.HasPermission( HC.RESOLVE_PETITIONS ): + + self._edit_log.append( ( CC.CONTENT_UPDATE_PENDING, tag ) ) + + self._pending_tags.append( tag ) + + self._tags_box.PendTag( tag ) + + + else: + + self._edit_log.append( ( CC.CONTENT_UPDATE_PENDING, tag ) ) + + self._pending_tags.append( tag ) + + self._tags_box.PendTag( tag ) + + + + + + def EventCopyTags( self, event ): + + if wx.TheClipboard.Open(): + + tags = self._current_tags + self._pending_tags + + text = yaml.safe_dump( tags ) + + data = wx.TextDataObject( text ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def EventModify( self, event ): + + tag = self._tags_box.GetSelectedTag() + + if tag is not None and tag in self._current_tags or tag in self._petitioned_tags: + + subject_identifiers = [ HC.AccountIdentifier( hash = hash, tag = tag ) for hash in self._hashes ] + + try: + + with DialogModifyAccounts( self, self._tag_service_identifier, subject_identifiers ) as dlg: dlg.ShowModal() + + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + def EventPasteTags( self, event ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject() + + wx.TheClipboard.GetData( data ) + + wx.TheClipboard.Close() + + text = data.GetText() + + try: + + tags = yaml.safe_load( text ) + + tags = [ tag for tag in tags if tag not in self._current_tags and tag not in self._pending_tags ] + + for tag in tags: self.AddTag( tag ) + + except: wx.MessageBox( 'I could not understand what was in the clipboard' ) + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def EventShowDeletedTags( self, event ): self._tags_box.SetShowDeletedTags( self._show_deleted_tags.GetValue() ) + + def EventTagsBoxAction( self, event ): + + tag = self._tags_box.GetSelectedTag() + + if tag is not None: self.AddTag( tag ) + + + def GetEditLog( self ): return self._edit_log + + def GetServiceIdentifier( self ): return self._tag_service_identifier + + def HasChanges( self ): return len( self._edit_log ) > 0 + + def SetTagBoxFocus( self ): self._add_tag_box.SetFocus() + +class DialogMessage( Dialog ): + + def __init__( self, parent, message, ok_label = 'ok' ): + + def InitialiseControls(): + + self._ok = wx.Button( self, id = wx.ID_CANCEL, label = ok_label ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOk ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + + def InitialisePanel(): + + vbox = wx.BoxSizer( wx.VERTICAL ) + + text = wx.StaticText( self, label = str( message ) ) + + text.Wrap( 480 ) + + vbox.AddF( text, FLAGS_BIG_INDENT ) + vbox.AddF( self._ok, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'message', position = 'center' ) + + InitialiseControls() + + InitialisePanel() + + + def EventOk( self, event ): self.EndModal( wx.ID_OK ) + +class DialogModifyAccounts( Dialog ): + + def __init__( self, parent, service_identifier, subject_identifiers ): + + def InitialiseControls(): + + connection = self._service.GetConnection() + + if len( self._subject_identifiers ) == 1: + + ( subject_identifier, ) = self._subject_identifiers + + subject_string = connection.Get( 'accountinfo', subject_identifier = subject_identifier ) + + else: subject_string = 'modifying ' + HC.ConvertIntToPrettyString( len( self._subject_identifiers ) ) + ' accounts' + + self._subject_text = wx.StaticText( self, label = str( subject_string ) ) + + account_types = connection.Get( 'accounttypes' ) + + self._account_types = wx.Choice( self ) + + for account_type in account_types: self._account_types.Append( account_type.ConvertToString(), account_type ) + + self._account_types.SetSelection( 0 ) + + self._account_types_ok = wx.Button( self, label = 'Ok' ) + self._account_types_ok.Bind( wx.EVT_BUTTON, self.EventChangeAccountType ) + + self._add_to_expires = wx.Choice( self ) + + for ( string, value ) in HC.expirations: + + if value is not None: self._add_to_expires.Append( string, value ) # don't want 'add no limit' + + self._add_to_expires.SetSelection( 1 ) # three months + + self._add_to_expires_ok = wx.Button( self, label = 'Ok' ) + self._add_to_expires_ok.Bind( wx.EVT_BUTTON, self.EventAddToExpires ) + + self._set_expires = wx.Choice( self ) + for ( string, value ) in HC.expirations: self._set_expires.Append( string, value ) + self._set_expires.SetSelection( 1 ) # three months + + self._set_expires_ok = wx.Button( self, label = 'Ok' ) + self._set_expires_ok.Bind( wx.EVT_BUTTON, self.EventSetExpires ) + + self._ban = wx.Button( self, label = 'ban user' ) + self._ban.Bind( wx.EVT_BUTTON, self.EventBan ) + self._ban.SetBackgroundColour( ( 255, 0, 0 ) ) + self._ban.SetForegroundColour( ( 255, 255, 0 ) ) + + self._superban = wx.Button( self, label = 'ban user and delete every contribution they have ever made' ) + self._superban.Bind( wx.EVT_BUTTON, self.EventSuperban ) + self._superban.SetBackgroundColour( ( 255, 0, 0 ) ) + self._superban.SetForegroundColour( ( 255, 255, 0 ) ) + + self._exit = wx.Button( self, id = wx.ID_CANCEL, label='Exit' ) + self._exit.Bind( wx.EVT_BUTTON, lambda event: self.EndModal( wx.ID_OK ) ) + + if not self._service.GetAccount().HasPermission( HC.GENERAL_ADMIN ): + + self._account_types_ok.Disable() + self._add_to_expires_ok.Disable() + self._set_expires_ok.Disable() + + + + def InitialisePanel(): + + info_box = wx.BoxSizer( wx.VERTICAL ) + + info_box.AddF( wx.StaticText( self, label = '- account info -' ), FLAGS_SMALL_INDENT ) + info_box.AddF( self._subject_text, FLAGS_EXPAND_PERPENDICULAR ) + + account_types_box = wx.BoxSizer( wx.HORIZONTAL ) + + account_types_box.AddF( wx.StaticText( self, label = '- change account type -' ), FLAGS_SMALL_INDENT ) + account_types_box.AddF( self._account_types, FLAGS_EXPAND_BOTH_WAYS ) + account_types_box.AddF( self._account_types_ok, FLAGS_MIXED ) + + add_to_expires_box = wx.BoxSizer( wx.HORIZONTAL ) + + add_to_expires_box.AddF( wx.StaticText( self, label = 'Add to expires: ' ), FLAGS_MIXED ) + add_to_expires_box.AddF( self._add_to_expires, FLAGS_EXPAND_BOTH_WAYS ) + add_to_expires_box.AddF( self._add_to_expires_ok, FLAGS_MIXED ) + + set_expires_box = wx.BoxSizer( wx.HORIZONTAL ) + + set_expires_box.AddF( wx.StaticText( self, label = 'Set expires to: ' ), FLAGS_MIXED ) + set_expires_box.AddF( self._set_expires, FLAGS_EXPAND_BOTH_WAYS ) + set_expires_box.AddF( self._set_expires_ok, FLAGS_MIXED ) + + expires_box = wx.BoxSizer( wx.VERTICAL ) + + expires_box.AddF( wx.StaticText( self, label = '- change expiration -' ), FLAGS_SMALL_INDENT ) + expires_box.AddF( add_to_expires_box, FLAGS_EXPAND_PERPENDICULAR ) + expires_box.AddF( set_expires_box, FLAGS_EXPAND_PERPENDICULAR ) + + ban_box = wx.BoxSizer( wx.HORIZONTAL ) + + ban_box.AddF( wx.StaticText( self, label = '- bans -' ), FLAGS_SMALL_INDENT ) + ban_box.AddF( self._ban, FLAGS_BUTTON_SIZERS ) + ban_box.AddF( self._superban, FLAGS_BUTTON_SIZERS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( info_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( account_types_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( expires_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( ban_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._exit, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'modify account' ) + + self._service = wx.GetApp().Read( 'service', service_identifier ) + self._subject_identifiers = set( subject_identifiers ) + + InitialiseControls() + + InitialisePanel() + + + def _DoModification( self, action, **kwargs ): + + try: + + connection = self._service.GetConnection() + + kwargs[ 'subject_identifiers' ] = list( self._subject_identifiers ) + kwargs[ 'action' ] = action + + connection.Post( 'accountmodification', **kwargs ) + + if len( self._subject_identifiers ) == 1: + + ( subject_identifier, ) = self._subject_identifiers + + self._subject_text.SetLabel( str( connection.Get( 'accountinfo', subject_identifier = subject_identifier ) ) ) + + + except Exception as e: wx.MessageBox( unicode( e ) ) + + if len( self._subject_identifiers ) > 1: wx.MessageBox( 'Done!' ) + + + def EventAddToExpires( self, event ): self._DoModification( HC.ADD_TO_EXPIRES, expiration = self._add_to_expires.GetClientData( self._add_to_expires.GetSelection() ) ) + + def EventBan( self, event ): + + with wx.TextEntryDialog( self, 'Enter reason for the ban' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: self._DoModification( HC.BAN, reason = dlg.GetValue() ) + + + + def EventChangeAccountType( self, event ): self._DoModification( HC.CHANGE_ACCOUNT_TYPE, title = self._account_types.GetClientData( self._account_types.GetSelection() ).GetTitle() ) + + def EventSetExpires( self, event ): self._DoModification( HC.SET_EXPIRES, expiry = int( time.time() ) + self._set_expires.GetClientData( self._set_expires.GetSelection() ) ) + + def EventSuperban( self, event ): + + with wx.TextEntryDialog( self, 'Enter reason for the superban' ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: self._DoModification( HC.SUPERBAN, reason = dlg.GetValue() ) + + + +class DialogNews( Dialog ): + + def __init__( self, parent, service_identifier ): + + def InitialiseControls(): + + self._news = wx.TextCtrl( self, style=wx.TE_READONLY | wx.TE_MULTILINE ) + + self._previous = wx.Button( self, label='<' ) + self._previous.Bind( wx.EVT_BUTTON, self.EventPrevious ) + + self._news_position = wx.TextCtrl( self ) + + self._next = wx.Button( self, label='>' ) + self._next.Bind( wx.EVT_BUTTON, self.EventNext ) + + self._done = wx.Button( self, id = wx.ID_CANCEL, label='Done' ) + self._done.Bind( wx.EVT_BUTTON, self.EventOk ) + + + def InitialisePanel(): + + self._newslist = wx.GetApp().Read( 'news', service_identifier ) + + self._current_news_position = len( self._newslist ) + + self._ShowNews() + + buttonbox = wx.BoxSizer( wx.HORIZONTAL ) + + buttonbox.AddF( self._previous, FLAGS_MIXED ) + buttonbox.AddF( self._news_position, FLAGS_MIXED ) + buttonbox.AddF( self._next, FLAGS_MIXED ) + + donebox = wx.BoxSizer( wx.HORIZONTAL ) + + donebox.AddF( self._done, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._news, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( buttonbox, FLAGS_BUTTON_SIZERS ) + vbox.AddF( donebox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x + 200, 580 ) ) + + + Dialog.__init__( self, parent, 'news' ) + + InitialiseControls() + + InitialisePanel() + + + def _ShowNews( self ): + + if self._current_news_position == 0: + + self._news.SetValue( '' ) + + self._news_position.SetValue( 'No News' ) + + else: + + ( news, timestamp ) = self._newslist[ self._current_news_position - 1 ] + + self._news.SetValue( time.ctime( timestamp ) + ':' + os.linesep + os.linesep + news ) + + self._news_position.SetValue( HC.ConvertIntToPrettyString( self._current_news_position ) + ' / ' + HC.ConvertIntToPrettyString( len( self._newslist ) ) ) + + + + def EventNext( self, event ): + + if self._current_news_position < len( self._newslist ): self._current_news_position += 1 + + self._ShowNews() + + + def EventOk( self, event ): self.EndModal( wx.ID_OK ) + + def EventPrevious( self, event ): + + if self._current_news_position > 1: self._current_news_position -= 1 + + self._ShowNews() + + +class DialogPathsToTagsRegex( Dialog ): + + def __init__( self, parent, paths ): + + def InitialiseControls(): + + self._tag_repositories = ClientGUICommon.ListBook( self ) + self._tag_repositories.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventServiceChanged ) + + services = wx.GetApp().Read( 'services', ( HC.TAG_REPOSITORY, ) ) + + for service in services: + + account = service.GetAccount() + + if account.HasPermission( HC.POST_DATA ): + + service_identifier = service.GetServiceIdentifier() + + page_info = ( DialogPathsToTagsRegexPanel, ( self._tag_repositories, service_identifier, paths ), {} ) + + name = service_identifier.GetName() + + self._tag_repositories.AddPage( page_info, name ) + + + + page = DialogPathsToTagsRegexPanel( self._tag_repositories, CC.LOCAL_TAG_SERVICE_IDENTIFIER, paths ) + + name = CC.LOCAL_TAG_SERVICE_IDENTIFIER.GetName() + + self._tag_repositories.AddPage( page, name ) + + default_tag_repository = self._options[ 'default_tag_repository' ] + + self._tag_repositories.Select( default_tag_repository.GetName() ) + + self._add_button = wx.Button( self, label='Import Files' ) + self._add_button.Bind( wx.EVT_BUTTON, self.EventOK ) + self._add_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._close_button = wx.Button( self, id = wx.ID_CANCEL, label='Back to File Selection' ) + self._close_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._close_button.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._add_button, FLAGS_SMALL_INDENT ) + buttons.AddF( self._close_button, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._tag_repositories, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + self.SetInitialSize( ( 980, 680 ) ) + + + Dialog.__init__( self, parent, 'path tagging' ) + + self._paths = paths + + InitialiseControls() + + InitialisePanel() + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOK( self, event ): self.EndModal( wx.ID_OK ) + + def EventServiceChanged( self, event ): + + page = self._tag_repositories.GetCurrentPage() + + wx.CallAfter( page.SetTagBoxFocus ) + + + def GetInfo( self ): + + paths_to_tags = {} + + try: + + for path in self._paths: + + all_tags = {} + + for page in self._tag_repositories.GetNameToPageDict().values(): + + tags = page.GetTags( path ) + + if len( tags ) > 0: + + service_identifier = page.GetServiceIdentifier() + + all_tags[ service_identifier ] = tags + + + + if len( all_tags ) > 0: paths_to_tags[ path ] = all_tags + + + except Exception as e: wx.MessageBox( 'Saving pending mapping changes to DB raised this error: ' + unicode( e ) ) + + return paths_to_tags + + +class DialogPathsToTagsRegexPanel( wx.Panel ): + + ID_REGEX_WHITESPACE = 0 + ID_REGEX_NUMBER = 1 + ID_REGEX_ALPHANUMERIC = 2 + ID_REGEX_ANY = 3 + ID_REGEX_BEGINNING = 4 + ID_REGEX_END = 5 + ID_REGEX_0_OR_MORE_GREEDY = 6 + ID_REGEX_1_OR_MORE_GREEDY = 7 + ID_REGEX_0_OR_1_GREEDY = 8 + ID_REGEX_0_OR_MORE_MINIMAL = 9 + ID_REGEX_1_OR_MORE_MINIMAL = 10 + ID_REGEX_0_OR_1_MINIMAL = 11 + ID_REGEX_EXACTLY_M = 12 + ID_REGEX_M_TO_N_GREEDY = 13 + ID_REGEX_M_TO_N_MINIMAL = 14 + ID_REGEX_LOOKAHEAD = 15 + ID_REGEX_NEGATIVE_LOOKAHEAD = 16 + ID_REGEX_LOOKBEHIND = 17 + ID_REGEX_NEGATIVE_LOOKBEHIND = 18 + ID_REGEX_NUMBER_WITHOUT_ZEROES = 19 + ID_REGEX_NUMBER_EXT = 20 + ID_REGEX_AUTHOR = 21 + ID_REGEX_BACKSPACE = 22 + ID_REGEX_SET = 23 + ID_REGEX_NOT_SET = 24 + + def __init__( self, parent, service_identifier, paths ): + + def InitialiseControls(): + + self._paths_list = ClientGUICommon.SaneListCtrl( self, 300, [ ( 'path', 400 ), ( 'tags', -1 ) ] ) + + self._paths_list.Bind( wx.EVT_LIST_ITEM_SELECTED, self.EventItemSelected ) + + self._page_regex = wx.TextCtrl( self ) + self._chapter_regex = wx.TextCtrl( self ) + self._volume_regex = wx.TextCtrl( self ) + self._title_regex = wx.TextCtrl( self ) + self._series_regex = wx.TextCtrl( self ) + self._creator_regex = wx.TextCtrl( self ) + + self._update_button = wx.Button( self, label='update' ) + self._update_button.Bind( wx.EVT_BUTTON, self.EventUpdate ) + + self._regex_shortcuts = wx.Button( self, label = 'regex shortcuts' ) + self._regex_shortcuts.Bind( wx.EVT_BUTTON, self.EventRegexShortcuts ) + + self._regex_link = wx.HyperlinkCtrl( self, id = -1, label = 'a good regex introduction', url = 'http://www.aivosto.com/vbtips/regex.html' ) + + self._regexes = wx.ListBox( self ) + self._regexes.Bind( wx.EVT_LISTBOX_DCLICK, self.EventRemoveRegex ) + + self._regex_box = wx.TextCtrl( self, style=wx.TE_PROCESS_ENTER ) + self._regex_box.Bind( wx.EVT_TEXT_ENTER, self.EventAddRegex ) + + self._tags = ClientGUICommon.TagsBoxFlat( self, self.TagRemoved ) + + self._tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self, self.AddTag, CC.LOCAL_FILE_SERVICE_IDENTIFIER, service_identifier ) + + self._paths_to_single_tags = collections.defaultdict( list ) + + self._single_tags = ClientGUICommon.TagsBoxFlat( self, self.SingleTagRemoved ) + + self._single_tag_box = ClientGUICommon.AutoCompleteDropdownTagsWrite( self, self.AddTagSingle, CC.LOCAL_FILE_SERVICE_IDENTIFIER, service_identifier ) + self._single_tag_box.Disable() + + for path in self._paths: + + tags = self._GetTags( path ) + + tags_string = ', '.join( tags ) + + self._paths_list.Append( ( path, tags_string ), ( path, tags ) ) + + + + def InitialisePanel(): + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + gridbox.AddF( wx.StaticText( self, label='Page regex ' ), FLAGS_MIXED ) + gridbox.AddF( self._page_regex, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='Chapter regex ' ), FLAGS_MIXED ) + gridbox.AddF( self._chapter_regex, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='Volume regex ' ), FLAGS_MIXED ) + gridbox.AddF( self._volume_regex, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='Title regex ' ), FLAGS_MIXED ) + gridbox.AddF( self._title_regex, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='Series regex ' ), FLAGS_MIXED ) + gridbox.AddF( self._series_regex, FLAGS_EXPAND_BOTH_WAYS ) + gridbox.AddF( wx.StaticText( self, label='Creator regex ' ), FLAGS_MIXED ) + gridbox.AddF( self._creator_regex, FLAGS_EXPAND_BOTH_WAYS ) + + ns_vbox = wx.BoxSizer( wx.VERTICAL ) + + ns_vbox.AddF( wx.StaticText( self, label = '- quick namespaces -' ), FLAGS_SMALL_INDENT ) + + ns_vbox.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + ns_vbox.AddF( self._update_button, FLAGS_LONE_BUTTON ) + ns_vbox.AddF( self._regex_shortcuts, FLAGS_LONE_BUTTON ) + ns_vbox.AddF( self._regex_link, FLAGS_LONE_BUTTON ) + + regex_vbox = wx.BoxSizer( wx.VERTICAL ) + + regex_vbox.AddF( wx.StaticText( self, label = '- regexes -' ), FLAGS_SMALL_INDENT ) + + regex_vbox.AddF( self._regexes, FLAGS_EXPAND_BOTH_WAYS ) + regex_vbox.AddF( self._regex_box, FLAGS_EXPAND_PERPENDICULAR ) + + tag_vbox = wx.BoxSizer( wx.VERTICAL ) + + tag_vbox.AddF( wx.StaticText( self, label = '- tags for all -' ), FLAGS_SMALL_INDENT ) + + tag_vbox.AddF( self._tags, FLAGS_EXPAND_BOTH_WAYS ) + tag_vbox.AddF( self._tag_box, FLAGS_EXPAND_PERPENDICULAR ) + + tag_single_vbox = wx.BoxSizer( wx.VERTICAL ) + + tag_single_vbox.AddF( wx.StaticText( self, label = '- tags just for this file -' ), FLAGS_SMALL_INDENT ) + + tag_single_vbox.AddF( self._single_tags, FLAGS_EXPAND_BOTH_WAYS ) + tag_single_vbox.AddF( self._single_tag_box, FLAGS_EXPAND_PERPENDICULAR ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( ns_vbox, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( regex_vbox, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( tag_vbox, FLAGS_EXPAND_BOTH_WAYS ) + hbox.AddF( tag_single_vbox, FLAGS_EXPAND_BOTH_WAYS ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._paths_list, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( hbox, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + + wx.Panel.__init__( self, parent ) + + self._service_identifier = service_identifier + self._paths = paths + + InitialiseControls() + + InitialisePanel() + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + + + def _GetTags( self, path ): + + tags = [] + + tags.extend( self._tags.GetTags() ) + + for regex in self._regexes.GetStrings(): + + try: + + m = re.search( regex, path ) + + if m is not None: + + match = m.group() + + if len( match ) > 0: tags.append( match ) + + + except: pass + + + namespaced_regexes = [] + + namespaced_regexes.append( ( self._page_regex, 'page:' ) ) + namespaced_regexes.append( ( self._chapter_regex, 'chapter:' ) ) + namespaced_regexes.append( ( self._volume_regex, 'volume:' ) ) + namespaced_regexes.append( ( self._title_regex, 'title:' ) ) + namespaced_regexes.append( ( self._series_regex, 'series:' ) ) + namespaced_regexes.append( ( self._creator_regex, 'creator:' ) ) + + for ( control, prefix ) in namespaced_regexes: + + try: + + m = re.search( control.GetValue(), path ) + + if m is not None: + + match = m.group() + + if len( match ) > 0: tags.append( prefix + match ) + + + except: pass + + + if path in self._paths_to_single_tags: tags.extend( self._paths_to_single_tags[ path ] ) + + return tags + + + def _RefreshFileList( self ): + + for ( index, ( path, old_tags ) ) in enumerate( self._paths_list.GetClientData() ): + + # when doing regexes, make sure not to include '' results, same for system: and - started tags. + + tags = self._GetTags( path ) + + if tags != old_tags: + + tags_string = ', '.join( tags ) + + self._paths_list.UpdateRow( index, ( path, tags_string ), ( path, tags ) ) + + + + + def AddTag( self, tag ): + + if tag is not None: + + self._tags.AddTag( tag ) + + self._tag_box.Clear() + + self._RefreshFileList() + + + + def AddTagSingle( self, tag ): + + if tag is not None: + + self._single_tags.AddTag( tag ) + + self._single_tag_box.Clear() + + indices = self._paths_list.GetAllSelected() + + for index in indices: + + ( path, old_tags ) = self._paths_list.GetClientData( index ) + + self._paths_to_single_tags[ path ].append( tag ) + + + self._RefreshFileList() # make this more clever + + + + def EventAddRegex( self, event ): + + regex = self._regex_box.GetValue() + + if regex != '': + + self._regexes.Append( regex ) + + self._regex_box.Clear() + + self._RefreshFileList() + + + + def EventItemSelected( self, event ): + + single_tags = [] + + indices = self._paths_list.GetAllSelected() + + if len( indices ) > 0: + + path = self._paths_list.GetClientData( indices[0] )[0] + + if path in self._paths_to_single_tags: single_tags = self._paths_to_single_tags[ path ] + + self._single_tag_box.Enable() + + else: self._single_tag_box.Disable() + + self._single_tags.SetTags( single_tags ) + + + def EventMenu( self, event ): + + id = event.GetId() + + phrase = None + + if id == self.ID_REGEX_WHITESPACE: phrase = r'\s' + elif id == self.ID_REGEX_NUMBER: phrase = r'\d' + elif id == self.ID_REGEX_ALPHANUMERIC: phrase = r'\w' + elif id == self.ID_REGEX_ANY: phrase = r'.' + elif id == self.ID_REGEX_BACKSPACE: phrase = r'\\' + elif id == self.ID_REGEX_BEGINNING: phrase = r'^' + elif id == self.ID_REGEX_END: phrase = r'$' + elif id == self.ID_REGEX_SET: phrase = r'[...]' + elif id == self.ID_REGEX_NOT_SET: phrase = r'[^...]' + elif id == self.ID_REGEX_0_OR_MORE_GREEDY: phrase = r'*' + elif id == self.ID_REGEX_1_OR_MORE_GREEDY: phrase = r'+' + elif id == self.ID_REGEX_0_OR_1_GREEDY: phrase = r'?' + elif id == self.ID_REGEX_0_OR_MORE_MINIMAL: phrase = r'*?' + elif id == self.ID_REGEX_1_OR_MORE_MINIMAL: phrase = r'+?' + elif id == self.ID_REGEX_0_OR_1_MINIMAL: phrase = r'*' + elif id == self.ID_REGEX_EXACTLY_M: phrase = r'{m}' + elif id == self.ID_REGEX_M_TO_N_GREEDY: phrase = r'{m,n}' + elif id == self.ID_REGEX_M_TO_N_MINIMAL: phrase = r'{m,n}?' + elif id == self.ID_REGEX_LOOKAHEAD: phrase = r'(?=...)' + elif id == self.ID_REGEX_NEGATIVE_LOOKAHEAD: phrase = r'(?!...)' + elif id == self.ID_REGEX_LOOKBEHIND: phrase = r'(?<=...)' + elif id == self.ID_REGEX_NEGATIVE_LOOKBEHIND: phrase = r'(? 74 - [1-9]+\d*' ) + menu.Append( self.ID_REGEX_NUMBER_EXT, r'...0074.jpg -> 74 - [1-9]+\d*(?=.{4}$)' ) + menu.Append( self.ID_REGEX_AUTHOR, r'E:\my collection\author name - v4c1p0074.jpg -> author name - [^\\][\w\s]*(?=\s-)' ) + + self.PopupMenu( menu ) + + + def EventRemoveRegex( self, event ): + + selection = self._regexes.GetSelection() + + if selection != wx.NOT_FOUND: + + if len( self._regex_box.GetValue() ) == 0: self._regex_box.SetValue( self._regexes.GetString( selection ) ) + + self._regexes.Delete( selection ) + + self._RefreshFileList() + + + + def EventUpdate( self, event ): self._RefreshFileList() + + def GetServiceIdentifier( self ): return self._service_identifier + + # this prob needs to be made cleverer if I do the extra column + def GetTags( self, path ): return self._GetTags( path ) + + def SetTagBoxFocus( self ): self._tag_box.SetFocus() + + def SingleTagRemoved( self ): + + indices = self._paths_list.GetAllSelected() + + if len( indices ) > 0: + + path = self._paths_list.GetClientData( indices[0] )[0] + + self._paths_to_single_tags[ path ] = self._single_tags.GetTags() + + + self._RefreshFileList() + + + def TagRemoved( self ): self._RefreshFileList() + +class DialogProgress( Dialog ): + + def __init__( self, parent, job_key, cancel_event = None ): + + def InitialiseControls(): + + self._status = wx.StaticText( self, label = 'initialising', style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + self._gauge = ClientGUICommon.Gauge( self, range = 100 ) + self._time_taken_so_far = wx.StaticText( self, label = 'initialising', style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + self._time_left = wx.StaticText( self, label = 'initialising', style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + if cancel_event is not None: + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label = 'cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + + + self._time_started = None + + + def InitialisePanel(): + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._status, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._gauge, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._time_taken_so_far, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._time_left, FLAGS_EXPAND_PERPENDICULAR ) + + if cancel_event is not None: vbox.AddF( self._cancel, FLAGS_LONE_BUTTON ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if x < 640: x = 640 + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'progress', style = wx.SYSTEM_MENU | wx.CAPTION | wx.RESIZE_BORDER, position = 'center' ) + + self._job_key = job_key + + self._cancel_event = cancel_event + + InitialiseControls() + + InitialisePanel() + + self.Bind( wx.EVT_TIMER, self.EventTimer, id = ID_TIMER_UPDATE ) + + self._timer = wx.Timer( self, id = ID_TIMER_UPDATE ) + + self._timer.Start( 1000, wx.TIMER_CONTINUOUS ) + + HC.pubsub.sub( self, 'Update', 'progress_update' ) + + + def _DisplayTimes( self ): + + value = self._gauge.GetValue() + range = self._gauge.GetRange() + + if self._time_started is not None: + + time_taken_so_far = time.clock() - self._time_started + + if value > 1: time_left = HC.ConvertTimeToPrettyTime( time_taken_so_far * ( float( range - value ) / float( value ) ) ) + else: time_left = 'unknown' + + self._time_taken_so_far.SetLabel( 'elapsed: ' + HC.ConvertTimeToPrettyTime( time_taken_so_far ) ) + + self._time_left.SetLabel( 'remaining: ' + time_left ) + + + + def EventCancel( self, event ): + + self._cancel.Disable() + self._cancel_event.set() + + + def EventTimer( self, event ): + + value = self._gauge.GetValue() + range = self._gauge.GetRange() + + if value == range: self.EndModal( wx.OK ) + else: self._DisplayTimes() + + + def Update( self, job_key, index, range, status ): + + if job_key == self._job_key: + + if self._time_started is None: self._time_started = time.clock() + + if range != self._gauge.GetRange(): self._gauge.SetRange( range ) + + self._gauge.SetValue( index ) + + self._status.SetLabel( status ) + + self._DisplayTimes() + + + +class DialogSelectBooru( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + boorus = wx.GetApp().Read( 'boorus' ) + + self._boorus = wx.ListBox( self, style = wx.LB_SORT ) + self._boorus.Bind( wx.EVT_LISTBOX_DCLICK, self.EventSelect ) + self._boorus.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + for booru in boorus: self._boorus.Append( booru.GetName(), booru ) + + + def InitialisePanel(): + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._boorus, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if x < 320: x = 320 + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'select booru' ) + + InitialiseControls() + + InitialisePanel() + + + def EventKeyDown( self, event ): + + if event.KeyCode == wx.WXK_SPACE: + + selection = self._boorus.GetSelection() + + if selection != wx.NOT_FOUND: self.EndModal( wx.ID_OK ) + + elif event.KeyCode == wx.WXK_ESCAPE: self.EndModal( wx.ID_CANCEL ) + else: event.Skip() + + + def EventSelect( self, event ): self.EndModal( wx.ID_OK ) + + def GetBooru( self ): return self._boorus.GetClientData( self._boorus.GetSelection() ) + +class DialogSelectImageboard( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._tree = wx.TreeCtrl( self ) + self._tree.Bind( wx.EVT_TREE_ITEM_ACTIVATED, self.EventSelect ) + + all_imageboards = wx.GetApp().Read( 'imageboards' ) + + root_item = self._tree.AddRoot( 'all sites' ) + + for ( site, imageboards ) in all_imageboards: + + site_item = self._tree.AppendItem( root_item, site ) + + for imageboard in imageboards: + + name = imageboard.GetName() + + self._tree.AppendItem( site_item, name, data = wx.TreeItemData( imageboard ) ) + + + + self._tree.Expand( root_item ) + + + def InitialisePanel(): + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._tree, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if x < 320: x = 320 + if y < 320: y = 320 + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'select imageboard' ) + + InitialiseControls() + + InitialisePanel() + + + def EventSelect( self, event ): + + item = self._tree.GetSelection() + + if self._tree.GetItemData( item ).GetData() is None: self._tree.Toggle( item ) + else: self.EndModal( wx.ID_OK ) + + + def GetImageboard( self ): return self._tree.GetItemData( self._tree.GetSelection() ).GetData() + +class DialogSelectFromListOfStrings( Dialog ): + + def __init__( self, parent, title, list_of_strings ): + + def InitialiseControls(): + + self._strings = wx.ListBox( self, choices = list_of_strings ) + self._strings.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + self._strings.Bind( wx.EVT_LISTBOX_DCLICK, self.EventSelect ) + + + def InitialisePanel(): + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._strings, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + if x < 320: x = 320 + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, title ) + + InitialiseControls() + + InitialisePanel() + + + def EventKeyDown( self, event ): + + if event.KeyCode == wx.WXK_SPACE: + + selection = self._strings.GetSelection() + + if selection != wx.NOT_FOUND: self.EndModal( wx.ID_OK ) + + elif event.KeyCode == wx.WXK_ESCAPE: self.EndModal( wx.ID_CANCEL ) + else: event.Skip() + + + def EventSelect( self, event ): self.EndModal( wx.ID_OK ) + + def GetString( self ): return self._strings.GetStringSelection() + +class DialogSelectLocalFiles( Dialog ): + + def __init__( self, parent, paths = [] ): + + def InitialiseControls(): + + self._paths_list = ClientGUICommon.SaneListCtrl( self, 480, [ ( 'path', -1 ), ( 'guessed mime', 110 ), ( 'size', 60 ) ] ) + + self._paths_list.SetMinSize( ( 780, 360 ) ) + + self._add_button = wx.Button( self, label='Import now' ) + self._add_button.Bind( wx.EVT_BUTTON, self.EventOK ) + self._add_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._tag_button = wx.Button( self, label = 'Add tags before importing' ) + self._tag_button.Bind( wx.EVT_BUTTON, self.EventTags ) + self._tag_button.SetForegroundColour( ( 0, 128, 0 ) ) + + self._close_button = wx.Button( self, id = wx.ID_CANCEL, label='Cancel' ) + self._close_button.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._close_button.SetForegroundColour( ( 128, 0, 0 ) ) + + self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self ) + + self._add_files_button = wx.Button( self, label='Add Files' ) + self._add_files_button.Bind( wx.EVT_BUTTON, self.EventAddPaths ) + + self._add_folder_button = wx.Button( self, label='Add Folder' ) + self._add_folder_button.Bind( wx.EVT_BUTTON, self.EventAddFolder ) + + self._remove_files_button = wx.Button( self, label='Remove Files' ) + self._remove_files_button.Bind( wx.EVT_BUTTON, self.EventRemovePaths ) + + + def InitialisePanel(): + + file_buttons = wx.BoxSizer( wx.HORIZONTAL ) + + file_buttons.AddF( ( 20, 0 ), FLAGS_NONE ) + file_buttons.AddF( self._add_files_button, FLAGS_MIXED ) + file_buttons.AddF( self._add_folder_button, FLAGS_MIXED ) + file_buttons.AddF( self._remove_files_button, FLAGS_MIXED ) + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._add_button, FLAGS_MIXED ) + buttons.AddF( self._tag_button, FLAGS_MIXED ) + buttons.AddF( self._close_button, FLAGS_MIXED ) + + advanced_import_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_import_options.AddF( wx.StaticText( self, label = '- advanced import options -' ), FLAGS_SMALL_INDENT ) + advanced_import_options.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._paths_list, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( file_buttons, FLAGS_BUTTON_SIZERS ) + vbox.AddF( advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( ( 0, 5 ), FLAGS_NONE ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'importing files' ) + + self.SetDropTarget( ClientGUICommon.FileDropTarget( self._AddPathsToList ) ) + + InitialiseControls() + + InitialisePanel() + + self._AddPathsToList( paths ) + + + + def _AddPathsToList( self, paths ): + + good_paths = CC.ParseImportablePaths( paths ) + + odd_paths = False + + for path in good_paths: + + mime = HC.GetMimeFromPath( path ) + + if mime in HC.ALLOWED_MIMES: + + info = os.lstat( path ) + + size = info[6] + + if size > 0: + + pretty_size = HC.ConvertIntToBytes( size ) + + self._paths_list.Append( ( path, HC.mime_string_lookup[ mime ], pretty_size ), ( path, HC.mime_string_lookup[ mime ], size ) ) + + + else: odd_paths = True + + + if odd_paths: wx.MessageBox( 'At present hydrus can handle only jpegs, pngs, bmps, gifs, swfs and flvs. The other files have not been added.' ) + + + def _GetPaths( self ): return [ row[0] for row in self._paths_list.GetClientData() ] + + def EventAddPaths( self, event ): + + with wx.FileDialog( self, 'Select the files to add.', style=wx.FD_MULTIPLE ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + paths = dlg.GetPaths() + + self._AddPathsToList( paths ) + + + + + def EventAddFolder( self, event ): + + with wx.DirDialog( self, 'Select a folder to add.', style=wx.DD_DIR_MUST_EXIST ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + path = dlg.GetPath() + + self._AddPathsToList( ( path, ) ) + + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventOK( self, event ): + + paths = self._GetPaths() + + if len( paths ) > 0: + + advanced_import_options = self._advanced_import_options.GetInfo() + + HC.pubsub.pub( 'new_hdd_import', paths, advanced_import_options = advanced_import_options ) + + self.EndModal( wx.ID_OK ) + + + + def EventRemovePaths( self, event ): self._paths_list.RemoveAllSelected() + + def EventTags( self, event ): + + try: + + paths = self._GetPaths() + + if len( paths ) > 0: + + advanced_import_options = self._advanced_import_options.GetInfo() + + with DialogPathsToTagsRegex( self, paths ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + paths_to_tags = dlg.GetInfo() + + HC.pubsub.pub( 'new_hdd_import', paths, advanced_import_options = advanced_import_options, paths_to_tags = paths_to_tags ) + + self.EndModal( wx.ID_OK ) + + + + + except: wx.MessageBox( traceback.format_exc() ) + + +class DialogSetupCustomFilterActions( Dialog ): + + def __init__( self, parent ): + + def InitialiseControls(): + + self._actions = ClientGUICommon.SaneListCtrl( self, 480, [ ( 'modifier', 150 ), ( 'key', 150 ), ( 'service', -1 ), ( 'action', 250 ) ] ) + + self._actions.SetMinSize( ( 780, 360 ) ) + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): + + for ( key, action ) in key_dict.items(): + + if action in ( 'manage_tags', 'manage_ratings', 'archive', 'frame_back', 'frame_next', 'previous', 'next', 'first', 'last' ): + + service_identifier = None + pretty_service_identifier = '' + + ( pretty_modifier, pretty_key, pretty_action ) = HC.ConvertShortcutToPrettyShortcut( modifier, key, action ) + + self._actions.Append( ( pretty_modifier, pretty_key, pretty_service_identifier, pretty_action ), ( modifier, key, service_identifier, action ) ) + + + + + ( modifier, key, service_identifier, action ) = ( wx.ACCEL_NORMAL, wx.WXK_DELETE, None, 'delete' ) + + pretty_service_identifier = '' + + ( pretty_modifier, pretty_key, pretty_action ) = HC.ConvertShortcutToPrettyShortcut( modifier, key, action ) + + self._actions.Append( ( pretty_modifier, pretty_key, pretty_service_identifier, pretty_action ), ( modifier, key, service_identifier, action ) ) + + self._SortListCtrl() + + self._add = wx.Button( self, label='add' ) + self._add.Bind( wx.EVT_BUTTON, self.EventAdd ) + self._add.SetForegroundColour( ( 0, 128, 0 ) ) + + self._edit = wx.Button( self, label='edit' ) + self._edit.Bind( wx.EVT_BUTTON, self.EventEdit ) + + self._remove = wx.Button( self, label='remove' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + self._remove.SetForegroundColour( ( 128, 0, 0 ) ) + + self._ok = wx.Button( self, label = 'ok' ) + self._ok.Bind( wx.EVT_BUTTON, self.EventOK ) + self._ok.SetForegroundColour( ( 0, 128, 0 ) ) + + self._cancel = wx.Button( self, id = wx.ID_CANCEL, label='cancel' ) + self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel ) + self._cancel.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + action_buttons = wx.BoxSizer( wx.HORIZONTAL ) + + action_buttons.AddF( self._add, FLAGS_MIXED ) + action_buttons.AddF( self._edit, FLAGS_MIXED ) + action_buttons.AddF( self._remove, FLAGS_MIXED ) + + buttons = wx.BoxSizer( wx.HORIZONTAL ) + + buttons.AddF( self._ok, FLAGS_MIXED ) + buttons.AddF( self._cancel, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._actions, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( action_buttons, FLAGS_BUTTON_SIZERS ) + vbox.AddF( buttons, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'setup custom filter' ) + + InitialiseControls() + + InitialisePanel() + + + def _SortListCtrl( self ): self._actions.SortListItems( 3 ) + + def EventAdd( self, event ): + + with DialogInputCustomFilterAction( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + ( pretty_tuple, data_tuple ) = dlg.GetInfo() + + self._actions.Append( pretty_tuple, data_tuple ) + + self._SortListCtrl() + + + + + def EventCancel( self, event ): self.EndModal( wx.ID_CANCEL ) + + def EventEdit( self, event ): + + for index in self._actions.GetAllSelected(): + + ( modifier, key, service_identifier, action ) = self._actions.GetClientData( index ) + + with DialogInputCustomFilterAction( self, modifier = modifier, key = key, service_identifier = service_identifier, action = action ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + ( pretty_tuple, data_tuple ) = dlg.GetInfo() + + self._actions.UpdateRow( index, pretty_tuple, data_tuple ) + + self._SortListCtrl() + + + + + + def EventOK( self, event ): self.EndModal( wx.ID_OK ) + + def EventRemove( self, event ): self._actions.RemoveAllSelected() + + def GetActions( self ): + + raw_data = self._actions.GetClientData() + + actions = collections.defaultdict( dict ) + + for ( modifier, key, service_identifier, action ) in raw_data: actions[ modifier ][ key ] = ( service_identifier, action ) + + return actions + + +class DialogYesNo( Dialog ): + + def __init__( self, parent, message, yes_label = 'yes', no_label = 'no' ): + + def InitialiseControls(): + + self._yes = wx.Button( self, label = yes_label ) + self._yes.Bind( wx.EVT_BUTTON, self.EventYes ) + self._yes.SetForegroundColour( ( 0, 128, 0 ) ) + + self._no = wx.Button( self, id = wx.ID_CANCEL, label = no_label ) + self._no.Bind( wx.EVT_BUTTON, self.EventNo ) + self._no.SetForegroundColour( ( 128, 0, 0 ) ) + + + def InitialisePanel(): + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._yes, FLAGS_SMALL_INDENT ) + hbox.AddF( self._no, FLAGS_SMALL_INDENT ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + text = wx.StaticText( self, label = message ) + + text.Wrap( 480 ) + + vbox.AddF( text, FLAGS_BIG_INDENT ) + vbox.AddF( hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + ( x, y ) = self.GetEffectiveMinSize() + + self.SetInitialSize( ( x, y ) ) + + + Dialog.__init__( self, parent, 'are you sure?', position = 'center' ) + + InitialiseControls() + + InitialisePanel() + + self.Bind( wx.EVT_CHAR_HOOK, self.EventCharHook ) + + + def EventCharHook( self, event ): + + if event.KeyCode == wx.WXK_ESCAPE: self.EndModal( wx.ID_NO ) + else: event.Skip() + + + def EventNo( self, event ): self.EndModal( wx.ID_NO ) + + def EventYes( self, event ): self.EndModal( wx.ID_YES ) + \ No newline at end of file diff --git a/include/ClientGUIManagement.py b/include/ClientGUIManagement.py new file mode 100755 index 00000000..47aba14b --- /dev/null +++ b/include/ClientGUIManagement.py @@ -0,0 +1,3278 @@ +import HydrusConstants as HC +import HydrusImageHandling +import ClientConstants as CC +import ClientConstantsMessages +import ClientGUICommon +import ClientGUIDialogs +import ClientGUIMedia +import ClientGUIMixins +import ClientParsers +import json +import os +import threading +import time +import traceback +import urllib +import urlparse +import wx +import wx.lib.scrolledpanel + +CAPTCHA_FETCH_EVENT_TYPE = wx.NewEventType() +CAPTCHA_FETCH_EVENT = wx.PyEventBinder( CAPTCHA_FETCH_EVENT_TYPE ) + +ID_TIMER_CAPTCHA = wx.NewId() +ID_TIMER_DUMP = wx.NewId() +ID_TIMER_PROCESS_IMPORT_QUEUE = wx.NewId() +ID_TIMER_PROCESS_OUTER_QUEUE = wx.NewId() + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +class CaptchaControl( wx.Panel ): + + def __init__( self, parent, type, default ): + + wx.Panel.__init__( self, parent ) + + self._captcha_key = default + + self._captcha_challenge = None + self._captcha_runs_out = 0 + self._bitmap = wx.EmptyBitmap( 0, 0, 24 ) + + self._timer = wx.Timer( self, ID_TIMER_CAPTCHA ) + self.Bind( wx.EVT_TIMER, self.EventTimer, id = ID_TIMER_CAPTCHA ) + + self._captcha_panel = ClientGUICommon.BufferedWindow( self, size = ( 300, 57 ) ) + + self._refresh_button = wx.Button( self, label = '' ) + self._refresh_button.Bind( wx.EVT_BUTTON, self.EventRefreshCaptcha ) + self._refresh_button.Disable() + + self._captcha_time_left = wx.StaticText( self ) + + self._captcha_entry = wx.TextCtrl( self, style = wx.TE_PROCESS_ENTER ) + self._captcha_entry.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self._ready_button = wx.Button( self, label = '' ) + self._ready_button.Bind( wx.EVT_BUTTON, self.EventReady ) + + sub_vbox = wx.BoxSizer( wx.VERTICAL ) + + sub_vbox.AddF( self._refresh_button, FLAGS_EXPAND_BOTH_WAYS ) + sub_vbox.AddF( self._captcha_time_left, FLAGS_SMALL_INDENT ) + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( self._captcha_panel, FLAGS_NONE ) + hbox.AddF( sub_vbox, FLAGS_EXPAND_BOTH_WAYS ) + + hbox2 = wx.BoxSizer( wx.HORIZONTAL ) + + hbox2.AddF( self._captcha_entry, FLAGS_EXPAND_BOTH_WAYS ) + hbox2.AddF( self._ready_button, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self, label = '- recaptcha -' ), FLAGS_SMALL_INDENT ) + + vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( hbox2, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + self.Disable() + + + def _DrawEntry( self, entry = None ): + + if entry is None: + + self._captcha_entry.SetValue( '' ) + self._captcha_entry.Disable() + + else: self._captcha_entry.SetValue( entry ) + + + def _DrawMain( self ): + + dc = self._captcha_panel.GetDC() + + if self._captcha_challenge is None: + + dc.SetBackground( wx.Brush( wx.WHITE ) ) + + dc.Clear() + + self._refresh_button.SetLabel( '' ) + self._refresh_button.Disable() + + self._captcha_time_left.SetLabel( '' ) + + elif self._captcha_challenge == '': + + dc.SetBackground( wx.Brush( wx.WHITE ) ) + + dc.Clear() + + event = wx.NotifyEvent( CAPTCHA_FETCH_EVENT_TYPE ) + + self.ProcessEvent( event ) + + if event.IsAllowed(): + + self._refresh_button.SetLabel( 'get captcha' ) + self._refresh_button.Enable() + + else: + + self._refresh_button.SetLabel( 'not yet' ) + self._refresh_button.Disable() + + + self._captcha_time_left.SetLabel( '' ) + + else: + + hydrus_bmp = self._bitmap.CreateWxBmp() + + dc.DrawBitmap( hydrus_bmp, 0, 0 ) + + hydrus_bmp.Destroy() + + self._refresh_button.SetLabel( 'get new captcha' ) + self._refresh_button.Enable() + + self._captcha_time_left.SetLabel( HC.ConvertTimestampToPrettyExpires( self._captcha_runs_out ) ) + + + del dc + + + def _DrawReady( self, ready = None ): + + if ready is None: + + self._ready_button.SetLabel( '' ) + self._ready_button.Disable() + + else: + + if ready: + + self._captcha_entry.Disable() + self._ready_button.SetLabel( 'edit' ) + + else: + + self._captcha_entry.Enable() + self._ready_button.SetLabel( 'ready' ) + + + self._ready_button.Enable() + + + + def Disable( self ): + + self._captcha_challenge = None + self._captcha_runs_out = 0 + self._bitmap = wx.EmptyBitmap( 0, 0, 24 ) + + self._DrawMain() + self._DrawEntry() + self._DrawReady() + + self._timer.Stop() + + + def Enable( self ): + + self._captcha_challenge = '' + self._captcha_runs_out = 0 + self._bitmap = wx.EmptyBitmap( 0, 0, 24 ) + + self._DrawMain() + self._DrawEntry() + self._DrawReady() + + self._timer.Start( 1000, wx.TIMER_CONTINUOUS ) + + + def EnableWithValues( self, challenge, bitmap, captcha_runs_out, entry, ready ): + + if int( time.time() ) > captcha_runs_out: self.Enable() + else: + + self._captcha_challenge = challenge + self._captcha_runs_out = captcha_runs_out + self._bitmap = bitmap + + self._DrawMain() + self._DrawEntry( entry ) + self._DrawReady( ready ) + + self._timer.Start( 1000, wx.TIMER_CONTINUOUS ) + + + + def EventKeyDown( self, event ): + + if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ): self.EventReady( None ) + else: event.Skip() + + + def EventReady( self, event ): self._DrawReady( not self._ready_button.GetLabel() == 'edit' ) + + def EventRefreshCaptcha( self, event ): + + try: + + connection = CC.AdvancedHTTPConnection( scheme = 'http', host = 'www.google.com', port = 80 ) + + javascript_string = connection.request( 'GET', '/recaptcha/api/challenge?k=' + self._captcha_key ) + + ( trash, rest ) = javascript_string.split( 'challenge : \'', 1 ) + + ( self._captcha_challenge, trash ) = rest.split( '\'', 1 ) + + jpeg = connection.request( 'GET', '/recaptcha/api/image?c=' + self._captcha_challenge ) + + self._bitmap = HydrusImageHandling.GenerateHydrusBitmapFromFile( jpeg ) + + self._captcha_runs_out = int( time.time() ) + 5 * 60 - 15 + + self._DrawMain() + self._DrawEntry( '' ) + self._DrawReady( False ) + + self._timer.Start( 1000, wx.TIMER_CONTINUOUS ) + + except: + + wx.MessageBox( traceback.format_exc() ) + + + + def EventTimer( self, event ): + + if int( time.time() ) > self._captcha_runs_out: self.Enable() + else: self._DrawMain() + + + # change this to hold (current challenge, bmp, timestamp it runs out, value, whethere ready to post) + def GetValues( self ): return ( self._captcha_challenge, self._bitmap, self._captcha_runs_out, self._captcha_entry.GetValue(), self._ready_button.GetLabel() == 'edit' ) + +class Comment( wx.Panel ): + + def __init__( self, parent ): + + wx.Panel.__init__( self, parent ) + + self._initial_comment = '' + + self._comment = wx.TextCtrl( self, value = '', style = wx.TE_MULTILINE | wx.TE_READONLY, size = ( -1, 120 ) ) + self._comment.Disable() + + self._comment_append = wx.TextCtrl( self, value = '', style = wx.TE_MULTILINE | wx.TE_PROCESS_ENTER, size = ( -1, 120 ) ) + self._comment_append.Bind( wx.EVT_KEY_UP, self.EventKeyDown ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( wx.StaticText( self, label = '- comment -' ), FLAGS_SMALL_INDENT ) + + vbox.AddF( self._comment, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._comment_append, FLAGS_EXPAND_PERPENDICULAR ) + + self.SetSizer( vbox ) + + + def _SetComment( self ): + + append = self._comment_append.GetValue() + + if self._initial_comment != '' and append != '': comment = self._initial_comment + os.linesep + os.linesep + append + else: comment = self._initial_comment + append + + self._comment.SetValue( comment ) + + + def Disable( self ): + + self._initial_comment = '' + + self._comment_append.SetValue( '' ) + self._comment_append.Disable() + + self._SetComment() + + + def EnableWithValues( self, initial, append ): + + self._initial_comment = initial + + self._comment_append.SetValue( append ) + self._comment_append.Enable() + + self._SetComment() + + + def GetValues( self ): return ( self._initial_comment, self._comment_append.GetValue() ) + + def EventKeyDown( self, event ): + + self._SetComment() + + event.Skip() + + +class ManagementPanel( wx.lib.scrolledpanel.ScrolledPanel ): + + def __init__( self, parent, page, page_key, file_service_identifier = CC.LOCAL_FILE_SERVICE_IDENTIFIER ): + + wx.lib.scrolledpanel.ScrolledPanel.__init__( self, parent, style = wx.BORDER_NONE | wx.VSCROLL ) + + self.SetupScrolling() + + self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) ) + + self._page = page + self._page_key = page_key + self._file_service_identifier = file_service_identifier + self._tag_service_identifier = CC.NULL_SERVICE_IDENTIFIER + + HC.pubsub.sub( self, 'SetSearchFocus', 'set_search_focus' ) + + + def _MakeCollect( self, sizer ): + + self._collect_by = ClientGUICommon.ChoiceCollect( self, self._page_key ) + + sizer.AddF( self._collect_by, FLAGS_EXPAND_PERPENDICULAR ) + + + def _MakeCurrentSelectionTagsBox( self, sizer ): + + tags_box = ClientGUICommon.TagsBoxCPPWithSorter( self, self._page_key ) + + sizer.AddF( tags_box, FLAGS_EXPAND_BOTH_WAYS ) + + + def _MakeSort( self, sizer ): + + self._sort_by = ClientGUICommon.ChoiceSort( self, self._page_key ) + + sizer.AddF( self._sort_by, FLAGS_EXPAND_PERPENDICULAR ) + + + def SetSearchFocus( self, page_key ): pass + + def TryToClose( self ): pass + +class ManagementPanelDumper( ManagementPanel ): + + def __init__( self, parent, page, page_key, imageboard, media_results ): + + ManagementPanel.__init__( self, parent, page, page_key ) + + ( self._4chan_token, pin, timeout ) = wx.GetApp().Read( '4chan_pass' ) + + self._have_4chan_pass = timeout > int( time.time() ) + + self._imageboard = imageboard + + self._media_list = ClientGUIMixins.ListeningMediaList( CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results ) + + self._current_media = None + + self._dumping = False + self._actually_dumping = False + self._num_dumped = 0 + self._next_dump_index = 0 + self._next_dump_time = 0 + + self._file_post_name = 'upfile' + + self._timer = wx.Timer( self, ID_TIMER_DUMP ) + self.Bind( wx.EVT_TIMER, self.EventTimer, id = ID_TIMER_DUMP ) + + ( post_url, self._flood_time, self._form_fields, self._restrictions ) = self._imageboard.GetBoardInfo() + + o = urlparse.urlparse( post_url ) + + self._post_scheme = o.scheme + self._post_host = o.hostname + self._post_port = o.port + self._post_request = o.path + + self._progress_info = wx.StaticText( self ) + + self._progress_gauge = ClientGUICommon.Gauge( self ) + self._progress_gauge.SetRange( len( media_results ) ) + + self._start_button = wx.Button( self, label = 'start' ) + self._start_button.Bind( wx.EVT_BUTTON, self.EventStartButton ) + + prog_vbox = wx.BoxSizer( wx.VERTICAL ) + + prog_vbox.AddF( wx.StaticText( self, label = '- progress -' ), FLAGS_SMALL_INDENT ) + + prog_vbox.AddF( self._progress_info, FLAGS_EXPAND_PERPENDICULAR ) + prog_vbox.AddF( self._progress_gauge, FLAGS_EXPAND_PERPENDICULAR ) + prog_vbox.AddF( self._start_button, FLAGS_EXPAND_PERPENDICULAR ) + + # thread options + + self._thread_fields = {} + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + for ( name, type, default, editable ) in self._form_fields: + + if type in ( CC.FIELD_TEXT, CC.FIELD_THREAD_ID ): field = wx.TextCtrl( self, value = default ) + elif type == CC.FIELD_PASSWORD: field = wx.TextCtrl( self, value = default, style = wx.TE_PASSWORD ) + else: continue + + self._thread_fields[ name ] = ( type, field ) + + if editable: + + gridbox.AddF( wx.StaticText( self, label = name + ':' ), FLAGS_MIXED ) + gridbox.AddF( field, FLAGS_EXPAND_BOTH_WAYS ) + + else: field.Hide() + + + thread_options = wx.BoxSizer( wx.VERTICAL ) + + thread_options.AddF( wx.StaticText( self, label = '- thread options -' ), FLAGS_SMALL_INDENT ) + + thread_options.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + + # post options + + self._post_fields = {} + + postbox = wx.BoxSizer( wx.VERTICAL ) + + self._post_info = wx.StaticText( self, label = 'no file selected', style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + + gridbox = wx.FlexGridSizer( 0, 2 ) + + gridbox.AddGrowableCol( 1, 1 ) + + for ( name, type, default, editable ) in self._form_fields: + + if type == CC.FIELD_VERIFICATION_RECAPTCHA: + + if self._have_4chan_pass: continue + + field = CaptchaControl( self, type, default ) + field.Bind( CAPTCHA_FETCH_EVENT, self.EventCaptchaRefresh ) + + elif type == CC.FIELD_COMMENT: field = Comment( self ) + else: continue + + self._post_fields[ name ] = ( type, field, default ) + + postbox.AddF( field, FLAGS_EXPAND_PERPENDICULAR ) + + + for ( name, type, default, editable ) in self._form_fields: + + if type == CC.FIELD_CHECKBOX: + + field = wx.CheckBox( self ) + + field.SetValue( default == 'True' ) + + else: continue + + self._post_fields[ name ] = ( type, field, default ) + + gridbox.AddF( wx.StaticText( self, label = name + ':' ), FLAGS_MIXED ) + gridbox.AddF( field, FLAGS_EXPAND_BOTH_WAYS ) + + + for ( name, type, default, editable ) in self._form_fields: + + if type == CC.FIELD_FILE: self._file_post_name = name + + + post_options = wx.BoxSizer( wx.VERTICAL ) + + post_options.AddF( wx.StaticText( self, label = '- post options -' ), FLAGS_SMALL_INDENT ) + + post_options.AddF( self._post_info, FLAGS_EXPAND_PERPENDICULAR ) + post_options.AddF( postbox, FLAGS_EXPAND_PERPENDICULAR ) + post_options.AddF( gridbox, FLAGS_EXPAND_PERPENDICULAR ) + + advanced_tag_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_tag_options.AddF( wx.StaticText( self, label = '- advanced tag options -' ), FLAGS_SMALL_INDENT ) + + self._advanced_tag_options = ClientGUICommon.AdvancedTagOptions( self, 'include tags from', namespaces = [ 'creator', 'series', 'title', 'volume', 'chapter', 'page', 'character', 'person', 'all others' ] ) + + advanced_tag_options.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR ) + + # arrange stuff + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._MakeSort( vbox ) + + vbox.AddF( prog_vbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( thread_options, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( post_options, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + HC.pubsub.sub( self, 'FocusChanged', 'focus_changed' ) + HC.pubsub.sub( self, 'SortedMediaPulse', 'sorted_media_pulse' ) + + self._media_to_dump_info = {} + + for media in self._media_list.GetSortedMedia(): + + dump_status_enum = CC.DUMPER_NOT_DUMPED + + dump_status_string = 'not yet dumped' + + post_field_info = [] + + for ( name, ( type, field, default ) ) in self._post_fields.items(): + + if type == CC.FIELD_COMMENT: + + post_field_info.append( ( name, type, ( self._GetInitialComment( media ), '' ) ) ) + + elif type == CC.FIELD_CHECKBOX: post_field_info.append( ( name, type, default == 'True' ) ) + elif type == CC.FIELD_VERIFICATION_RECAPTCHA: post_field_info.append( ( name, type, None ) ) + + + self._media_to_dump_info[ media ] = ( dump_status_enum, dump_status_string, post_field_info ) + + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self._timer.Start( 1000, wx.TIMER_CONTINUOUS ) + + + def _THREADDoDump( self, media_to_dump, post_field_info, headers, body ): + + try: + + connection = CC.AdvancedHTTPConnection( scheme = self._post_scheme, host = self._post_host, port = self._post_port ) + + data = connection.request( 'POST', self._post_request, headers = headers, body = body ) + + ( status, phrase ) = ClientParsers.Parse4chanPostScreen( data ) + + except Exception as e: ( status, phrase ) = ( 'big error', unicode( e ) ) + + wx.CallAfter( self.CALLBACKDoneDump, media_to_dump, post_field_info, status, phrase ) + + + def _FreezeCurrentMediaPostInfo( self ): + + ( dump_status_enum, dump_status_string, post_field_info ) = self._media_to_dump_info[ self._current_media ] + + post_field_info = [] + + for ( name, ( type, field, default ) ) in self._post_fields.items(): + + if type == CC.FIELD_COMMENT: post_field_info.append( ( name, type, field.GetValues() ) ) + elif type == CC.FIELD_CHECKBOX: post_field_info.append( ( name, type, field.GetValue() ) ) + elif type == CC.FIELD_VERIFICATION_RECAPTCHA: post_field_info.append( ( name, type, field.GetValues() ) ) + + + self._media_to_dump_info[ self._current_media ] = ( dump_status_enum, dump_status_string, post_field_info ) + + + def _GetInitialComment( self, media ): + + try: index = self._media_list.GetMediaIndex( media ) + except: return 'media removed' + + num_files = len( self._media_list.GetSortedMedia() ) + + if index == 0: + + total_size = sum( [ m.GetSize() for m in self._media_list.GetSortedMedia() ] ) + + initial = 'Hydrus Network Client is starting a dump of ' + str( num_files ) + ' files, totalling ' + HC.ConvertIntToBytes( total_size ) + ':' + os.linesep + os.linesep + + else: initial = '' + + initial += str( index + 1 ) + '/' + str( num_files ) + + info = self._advanced_tag_options.GetInfo() + + for ( service_identifier, namespaces ) in info: + + ( current, deleted, pending, petitioned ) = media.GetTags().GetCDPP( service_identifier ) + + tags = current.union( pending ) + + tags_to_include = [] + + for namespace in namespaces: + + if namespace == 'all others': tags_to_include.extend( [ tag for tag in tags if not True in ( tag.startswith( n ) for n in namespaces if n != 'all others' ) ] ) + else: tags_to_include.extend( [ tag for tag in tags if tag.startswith( namespace + ':' ) ] ) + + + initial += os.linesep + os.linesep + ', '.join( tags_to_include ) + + + return initial + + + def _ShowCurrentMedia( self ): + + if self._current_media is None: + + self._post_info.SetLabel( 'no file selected' ) + + for ( name, ( type, field, default ) ) in self._post_fields.items(): + + if type == CC.FIELD_CHECKBOX: field.SetValue( False ) + + field.Disable() + + + else: + + num_files = len( self._media_list.GetSortedMedia() ) + + ( dump_status_enum, dump_status_string, post_field_info ) = self._media_to_dump_info[ self._current_media ] + + index = self._media_list.GetMediaIndex( self._current_media ) + + self._post_info.SetLabel( str( index + 1 ) + '/' + str( num_files ) + ': ' + dump_status_string ) + + for ( name, type, value ) in post_field_info: + + ( type, field, default ) = self._post_fields[ name ] + + if type == CC.FIELD_COMMENT: + + ( initial, append ) = value + + field.EnableWithValues( initial, append ) + + elif type == CC.FIELD_CHECKBOX: + + field.SetValue( value ) + field.Enable() + + elif type == CC.FIELD_VERIFICATION_RECAPTCHA: + + if value is None: field.Enable() + else: + + ( challenge, bitmap, captcha_runs_out, entry, ready ) = value + + field.EnableWithValues( challenge, bitmap, captcha_runs_out, entry, ready ) + + + + + if dump_status_enum in ( CC.DUMPER_DUMPED_OK, CC.DUMPER_UNRECOVERABLE_ERROR ): + + for ( name, ( type, field, default ) ) in self._post_fields.items(): + + if type == CC.FIELD_CHECKBOX: field.SetValue( False ) + + field.Disable() + + + + + + def _UpdatePendingInitialComments( self ): + + all_media_to_dump = self._media_list.GetSortedMedia()[ self._next_dump_index : ] + + for media_to_dump in all_media_to_dump: + + if self._current_media == media_to_dump: self._FreezeCurrentMediaPostInfo() + + ( dump_status_enum, dump_status_string, post_field_info ) = self._media_to_dump_info[ media_to_dump ] + + new_post_field_info = [] + + for ( name, type, value ) in post_field_info: + + if type == CC.FIELD_COMMENT: + + ( initial, append ) = value + + initial = self._GetInitialComment( media_to_dump ) + + new_post_field_info.append( ( name, type, ( initial, append ) ) ) + + else: new_post_field_info.append( ( name, type, value ) ) + + + self._media_to_dump_info[ media_to_dump ] = ( dump_status_enum, dump_status_string, new_post_field_info ) + + if self._current_media == media_to_dump: self._ShowCurrentMedia() + + + + def CALLBACKDoneDump( self, media_to_dump, post_field_info, status, phrase ): + + self._actually_dumping = False + + if status == 'success': + + dump_status_enum = CC.DUMPER_DUMPED_OK + dump_status_string = 'dumped ok' + + if self._current_media == media_to_dump: HC.pubsub.pub( 'set_focus', self._page_key, None ) + + self._next_dump_time = int( time.time() ) + self._flood_time + + self._num_dumped += 1 + + self._progress_gauge.SetValue( self._num_dumped ) + + self._next_dump_index += 1 + + elif status == 'captcha': + + dump_status_enum = CC.DUMPER_RECOVERABLE_ERROR + dump_status_string = 'captcha was incorrect' + + self._next_dump_time = int( time.time() ) + 10 + + new_post_field_info = [] + + for ( name, type, value ) in post_field_info: + + if type == CC.FIELD_VERIFICATION_RECAPTCHA: new_post_field_info.append( ( name, type, None ) ) + else: new_post_field_info.append( ( name, type, value ) ) + + if media_to_dump == self._current_media: + + ( type, field, default ) = self._post_fields[ name ] + + field.Enable() + + + + post_field_info = new_post_field_info + + elif status == 'too quick': + + dump_status_enum = CC.DUMPER_RECOVERABLE_ERROR + dump_status_string = '' + + self._progress_info.SetLabel( 'Flood limit hit, retrying.' ) + + self._next_dump_time = int( time.time() ) + self._flood_time + + elif status == 'big error': + + dump_status_enum = CC.DUMPER_UNRECOVERABLE_ERROR + dump_status_string = '' + + self._progress_info.SetLabel( 'error: ' + phrase ) + + self._start_button.Disable() + + self._timer.Stop() + + elif 'Thread specified does not exist' in phrase: + + dump_status_enum = CC.DUMPER_UNRECOVERABLE_ERROR + dump_status_string = '' + + self._progress_info.SetLabel( 'thread specified does not exist!' ) + + self._start_button.Disable() + + self._timer.Stop() + + else: + + dump_status_enum = CC.DUMPER_UNRECOVERABLE_ERROR + dump_status_string = phrase + + if self._current_media == media_to_dump: HC.pubsub.pub( 'set_focus', self._page_key, None ) + + self._next_dump_time = int( time.time() ) + self._flood_time + + self._next_dump_index += 1 + + + self._media_to_dump_info[ media_to_dump ] = ( dump_status_enum, dump_status_string, post_field_info ) + + ( hash, ) = media_to_dump.GetDisplayMedia().GetHashes() + + HC.pubsub.pub( 'file_dumped', self._page_key, hash, dump_status_enum ) + + if self._next_dump_index == len( self._media_list.GetSortedMedia() ): + + self._progress_info.SetLabel( 'done - ' + str( self._num_dumped ) + ' dumped' ) + + self._start_button.Disable() + + self._timer.Stop() + + + + def EventCaptchaRefresh( self, event ): + + try: + + index = self._media_list.GetMediaIndex( self._current_media ) + + if ( ( index + 1 ) - self._next_dump_index ) * ( self._flood_time + 10 ) > 5 * 60: event.Veto() + + except: event.Veto() + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'advanced_tag_options_changed': self._UpdatePendingInitialComments() + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + + + + def EventStartButton( self, event ): + + if self._start_button.GetLabel() in ( 'start', 'continue' ): + + for ( name, ( type, field ) ) in self._thread_fields.items(): + + if type == CC.FIELD_THREAD_ID: + + try: int( field.GetValue() ) + except: + + self._progress_info.SetLabel( 'set thread_id field first' ) + + return + + + + + for ( type, field ) in self._thread_fields.values(): field.Disable() + + self._dumping = True + self._start_button.SetLabel( 'pause' ) + + if self._next_dump_time == 0: self._next_dump_time = int( time.time() ) + 5 + + # disable thread fields here + + else: + + for ( type, field ) in self._thread_fields.values(): field.Enable() + + self._dumping = False + + if self._num_dumped == 0: self._start_button.SetLabel( 'start' ) + else: self._start_button.SetLabel( 'continue' ) + + + + def EventTimer( self, event ): + + if self._actually_dumping: return + + if self._dumping: + + time_left = self._next_dump_time - int( time.time() ) + + if time_left < 1: + + media_to_dump = self._media_list.GetSortedMedia()[ self._next_dump_index ] + + wait = False + + if self._current_media == media_to_dump: self._FreezeCurrentMediaPostInfo() + + ( dump_status_enum, dump_status_string, post_field_info ) = self._media_to_dump_info[ media_to_dump ] + + for ( name, type, value ) in post_field_info: + + if type == CC.FIELD_VERIFICATION_RECAPTCHA: + + if value is None: + + wait = True + + break + + else: + + ( challenge, bitmap, captcha_runs_out, entry, ready ) = value + + if int( time.time() ) > captcha_runs_out or not ready: + + wait = True + + break + + + + + + if wait: self._progress_info.SetLabel( 'waiting for captcha' ) + else: + + self._progress_info.SetLabel( 'dumping' ) # 100% cpu time here - may or may not be desirable + + post_fields = [] + + for ( name, ( type, field ) ) in self._thread_fields.items(): + + post_fields.append( ( name, type, field.GetValue() ) ) + + + for ( name, type, value ) in post_field_info: + + if type == CC.FIELD_VERIFICATION_RECAPTCHA: + + ( challenge, bitmap, captcha_runs_out, entry, ready ) = value + + post_fields.append( ( 'recaptcha_challenge_field', type, challenge ) ) + post_fields.append( ( 'recaptcha_response_field', type, entry ) ) + + elif type == CC.FIELD_COMMENT: + + ( initial, append ) = value + + comment = initial + + if len( append ) > 0: comment += os.linesep + os.linesep + append + + post_fields.append( ( name, type, comment ) ) + + else: post_fields.append( ( name, type, value ) ) + + + ( hash, ) = media_to_dump.GetDisplayMedia().GetHashes() + + file = wx.GetApp().Read( 'file', hash ) + + post_fields.append( ( self._file_post_name, CC.FIELD_FILE, ( hash, HC.GetMimeFromString( file ), file ) ) ) + + ( ct, body ) = CC.GenerateDumpMultipartFormDataCTAndBody( post_fields ) + + headers = {} + headers[ 'Content-Type' ] = ct + if self._have_4chan_pass: headers[ 'Cookie' ] = 'pass_enabled=1; pass_id=' + self._4chan_token + + self._actually_dumping = True + + threading.Thread( target = self._THREADDoDump, args = ( media_to_dump, post_field_info, headers, body ) ).start() + + + else: self._progress_info.SetLabel( 'dumping next file in ' + str( time_left ) + ' seconds' ) + + else: + + if self._num_dumped == 0: self._progress_info.SetLabel( 'will dump to ' + self._imageboard.GetName() ) + else: self._progress_info.SetLabel( 'paused after ' + str( self._num_dumped ) + ' files dumped' ) + + + + def FocusChanged( self, page_key, media ): + + if page_key == self._page_key and media != self._current_media: + + old_media = self._current_media + + if old_media is not None: self._FreezeCurrentMediaPostInfo() + + self._current_media = media + + self._ShowCurrentMedia() + + + + def SortedMediaPulse( self, page_key, media_results ): + + if page_key == self._page_key: + + self._media_list = ClientGUIMixins.ListeningMediaList( CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], media_results ) + + new_media_to_dump_info = {} + + for ( media, ( dump_status_enum, dump_status_string, post_field_info ) ) in self._media_to_dump_info.items(): + + new_post_field_info = [] + + for ( name, type, value ) in post_field_info: + + if type == CC.FIELD_COMMENT: + + ( initial, append ) = value + + initial = self._GetInitialComment( media ) + + value = ( initial, append ) + + + new_post_field_info.append( ( name, type, value ) ) + + + new_media_to_dump_info[ media ] = ( dump_status_enum, dump_status_string, new_post_field_info ) + + + self._media_to_dump_info = new_media_to_dump_info + + self._ShowCurrentMedia() + + if self._current_media is None and len( self._media_list.GetSortedMedia() ) > 0: HC.pubsub.pub( 'set_focus', self._page_key, self._media_list.GetSortedMedia()[0] ) + + + +class ManagementPanelImport( ManagementPanel ): + + def __init__( self, parent, page, page_key ): + + ManagementPanel.__init__( self, parent, page, page_key ) + + self._successful = 0 + self._failed = 0 + self._deleted = 0 + self._redundant = 0 + + self._import_queue = [] + self._import_queue_position = 0 + + self._pause_import = False + self._cancel_import_queue = threading.Event() + self._pause_outer_queue = False + self._cancel_outer_queue = threading.Event() + + self._currently_importing = False + self._currently_processing_import_queue = False + self._currently_processing_outer_queue = False + + self._import_overall_info = wx.StaticText( self ) + self._import_current_info = wx.StaticText( self ) + self._import_gauge = ClientGUICommon.Gauge( self ) + + self._import_pause_button = wx.Button( self, label = 'pause' ) + self._import_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseImport ) + self._import_pause_button.Disable() + + self._timer_process_import_queue = wx.Timer( self, id = ID_TIMER_PROCESS_IMPORT_QUEUE ) + + self.Bind( wx.EVT_TIMER, self.EventProcessImportQueue, id = ID_TIMER_PROCESS_IMPORT_QUEUE ) + + self._timer_process_import_queue.Start( 1000, wx.TIMER_ONE_SHOT ) + + HC.pubsub.sub( self, 'ImportDone', 'import_done' ) + HC.pubsub.sub( self, 'SetImportInfo', 'set_import_info' ) + HC.pubsub.sub( self, 'DoneAddingToImportQueue', 'done_adding_to_import_queue' ) + + + def _GetPreimportStatus( self ): + + status = 'importing ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) + + return status + + + def _GetPreprocessStatus( self ): pass + + def _GetStatusStrings( self ): + + strs = [] + + if self._successful > 0: strs.append( str( self._successful ) + ' successful' ) + if self._failed > 0: strs.append( str( self._failed ) + ' failed' ) + if self._deleted > 0: strs.append( str( self._deleted ) + ' already deleted' ) + if self._redundant > 0: strs.append( str( self._redundant ) + ' already in db' ) + + return strs + + + def _SetButtons( self ): + + if self._currently_processing_import_queue: self._import_pause_button.Enable() + else: self._import_pause_button.Disable() + + + def CALLBACKAddToImportQueue( self, items ): + + if self._currently_processing_import_queue: self._import_queue.extend( items ) + else: + + self._import_queue = items + self._import_queue_position = 0 + + self._timer_process_import_queue.Start( 10, wx.TIMER_ONE_SHOT ) + + self._currently_processing_import_queue = True + + self._SetButtons() + + + self._import_gauge.SetRange( len( self._import_queue ) ) + + + def CALLBACKImportArgs( self, file, advanced_import_options, service_identifiers_to_tags, url = None, exception = None ): + + if exception is None: + + self._import_current_info.SetLabel( self._GetPreimportStatus() ) + + wx.GetApp().WriteLowPriority( 'import_file_from_page', self._page_key, file, advanced_import_options = advanced_import_options, service_identifiers_to_tags = service_identifiers_to_tags, url = url ) + + else: + + self._currently_importing = False + self._import_current_info.SetLabel( unicode( exception ) ) + self._import_gauge.SetValue( self._import_queue_position + 1 ) + self._import_queue_position += 1 + + self._timer_process_import_queue.Start( 2000, wx.TIMER_ONE_SHOT ) + + + + def DoneAddingToImportQueue( self, page_key ): + + if self._page_key == page_key: + + self._currently_processing_outer_queue = False + + self._SetButtons() + + + + def EventPauseImport( self, event ): + + if self._pause_import: + + self._pause_import = False + + self._import_pause_button.SetLabel( 'pause' ) + self._import_pause_button.SetForegroundColour( ( 0, 0, 0 ) ) + + else: + + self._pause_import = True + + self._import_pause_button.SetLabel( 'resume' ) + self._import_pause_button.SetForegroundColour( ( 0, 128, 0 ) ) + + + + def EventProcessImportQueue( self, event ): + + status_strings = self._GetStatusStrings() + + self._import_overall_info.SetLabel( ', '.join( status_strings ) ) + + if self._pause_import: self._import_current_info.SetLabel( 'paused' ) + else: + + if self._cancel_import_queue.is_set(): self._import_queue = self._import_queue[ : self._import_queue_position ] # cut excess queue + + if len( self._import_queue ) == 0: self._import_current_info.SetLabel( '' ) + else: + + if not self._currently_importing: + + if self._import_queue_position < len( self._import_queue ): + + self._currently_importing = True + + self._import_current_info.SetLabel( self._GetPreprocessStatus() ) + + item = self._import_queue[ self._import_queue_position ] + + threading.Thread( target = self._THREADGetImportArgs, args = ( item, ), name = 'Generate Import Args' ).start() + + else: + + if self._currently_processing_outer_queue: self._import_current_info.SetLabel( 'waiting for more items' ) + else: + + if len( status_strings ) > 0: status = 'import done' + else: status = 'import abandoned' + + self._import_current_info.SetLabel( status ) + + self._currently_processing_import_queue = False + + self._cancel_import_queue = threading.Event() + self._cancel_outer_queue = threading.Event() + + self._SetButtons() + + + + + + + self._timer_process_import_queue.Start( 1000, wx.TIMER_ONE_SHOT ) + + + def ImportDone( self, page_key, result, exception = None ): + + if page_key == self._page_key: + + if result == 'successful': self._successful += 1 + elif result == 'failed': self._failed += 1 + elif result == 'deleted': self._deleted += 1 + elif result == 'redundant': self._redundant += 1 + + self._currently_importing = False + self._import_gauge.SetValue( self._import_queue_position + 1 ) + self._import_queue_position += 1 + + if exception is None: self._timer_process_import_queue.Start( 10, wx.TIMER_ONE_SHOT ) + else: + + print( os.linesep + 'Had trouble importing ' + str( self._import_queue[ self._import_queue_position - 1 ] ) + ':' + os.linesep + unicode( exception ) ) + + self._import_current_info.SetLabel( unicode( exception ) ) + + self._timer_process_import_queue.Start( 2000, wx.TIMER_ONE_SHOT ) + + + + + def SetImportInfo( self, page_key, info ): + + if self._page_key == page_key: self._import_current_info.SetLabel( info ) + + + def TryToClose( self ): + + if self._currently_processing_import_queue: + + with ClientGUIDialogs.DialogYesNo( self, 'This page is still importing. Are you sure you want to close it?' ) as dlg: + + if dlg.ShowModal() == wx.ID_NO: raise Exception() + + + + +class ManagementPanelImportHDD( ManagementPanelImport ): + + def __init__( self, parent, page, page_key, paths, advanced_import_options = {}, paths_to_tags = {} ): + + self._advanced_import_options = advanced_import_options + self._paths_to_tags = paths_to_tags + + ManagementPanelImport.__init__( self, parent, page, page_key ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._MakeSort( vbox ) + + vbox.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._import_pause_button, FLAGS_EXPAND_PERPENDICULAR ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + self.CALLBACKAddToImportQueue( paths ) + + + def _THREADGetImportArgs( self, queue_object ): + + try: + + path = queue_object + + with open( path, 'rb' ) as f: file = f.read() + + if path in self._paths_to_tags: service_identifiers_to_tags = self._paths_to_tags[ path ] + else: service_identifiers_to_tags = {} + + wx.CallAfter( self.CALLBACKImportArgs, file, self._advanced_import_options, service_identifiers_to_tags ) + + except Exception as e: + print( traceback.format_exc() ) + wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = e ) + + + def _GetPreprocessStatus( self ): + + status = 'reading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) + + return status + + +class ManagementPanelImportWithQueue( ManagementPanelImport ): + + def __init__( self, parent, page, page_key ): + + ManagementPanelImport.__init__( self, parent, page, page_key ) + + self._connections = {} + + self._import_cancel_button = wx.Button( self, label = 'that\'s enough' ) + self._import_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelImport ) + self._import_cancel_button.SetForegroundColour( ( 128, 0, 0 ) ) + self._import_cancel_button.Disable() + + self._outer_queue_info = wx.StaticText( self ) + self._outer_queue = wx.ListBox( self, size = ( -1, 200 ) ) + + self._new_queue_input = wx.TextCtrl( self, style=wx.TE_PROCESS_ENTER ) + self._new_queue_input.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self._up = wx.Button( self, label = u'\u2191' ) + self._up.Bind( wx.EVT_BUTTON, self.EventUp ) + + self._remove = wx.Button( self, label = 'X' ) + self._remove.Bind( wx.EVT_BUTTON, self.EventRemove ) + + self._down = wx.Button( self, label = u'\u2193' ) + self._down.Bind( wx.EVT_BUTTON, self.EventDown ) + + self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self ) + + self._outer_queue_timer = wx.Timer( self, id = ID_TIMER_PROCESS_OUTER_QUEUE ) + + self.Bind( wx.EVT_TIMER, self.EventProcessOuterQueue, id = ID_TIMER_PROCESS_OUTER_QUEUE ) + + self._outer_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT ) + + HC.pubsub.sub( self, 'SetOuterQueueInfo', 'set_outer_queue_info' ) + + + def _GetPreprocessStatus( self ): + + status = 'checking url status ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) + + return status + + + def _SetButtons( self ): + + if self._currently_processing_import_queue: + + self._import_pause_button.Enable() + self._import_cancel_button.Enable() + + else: + + self._import_pause_button.Disable() + self._import_cancel_button.Disable() + + + + def EventCancelImport( self, event ): + + self._cancel_import_queue.set() + self._cancel_outer_queue.set() + + if self._pause_import: self.EventPauseImport( event ) + if self._pause_outer_queue: self.EventPauseOuterQueue( event ) + + + def EventPauseOuterQueue( self, event ): pass + + def EventKeyDown( self, event ): + + if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ): + + url = self._new_queue_input.GetValue() + + if url != '': + + self._outer_queue.Append( url, url ) + + self._outer_queue_timer.Start( 10, wx.TIMER_ONE_SHOT ) + + self._new_queue_input.SetValue( '' ) + + + else: event.Skip() + + + def EventUp( self, event ): + + selection = self._outer_queue.GetSelection() + + if selection != wx.NOT_FOUND: + + if selection > 0: + + url = self._outer_queue.GetClientData( selection ) + + self._outer_queue.Delete( selection ) + + self._outer_queue.Insert( url, selection - 1, url ) + + self._outer_queue.Select( selection - 1 ) + + + + + def EventProcessOuterQueue( self, event ): + + if self._pause_outer_queue: self._outer_queue_info.SetLabel( 'paused' ) + else: + + if self._outer_queue.GetCount() > 0 and not self._currently_processing_import_queue and not self._currently_processing_outer_queue: + + self._currently_processing_outer_queue = True + + item = self._outer_queue.GetClientData( 0 ) + + self._outer_queue.Delete( 0 ) + + threading.Thread( target = self._THREADDownloadImportItems, args = ( item, ), name = 'Generate Import Items' ).start() + + + + self._outer_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT ) + + + def EventRemove( self, event ): + + selection = self._outer_queue.GetSelection() + + if selection != wx.NOT_FOUND: self._outer_queue.Delete( selection ) + + + def EventDown( self, event ): + + selection = self._outer_queue.GetSelection() + + if selection != wx.NOT_FOUND: + + if selection + 1 < self._outer_queue.GetCount(): + + url = self._outer_queue.GetClientData( selection ) + + self._outer_queue.Delete( selection ) + + self._outer_queue.Insert( url, selection + 1, url ) + + self._outer_queue.Select( selection + 1 ) + + + + + def SetOuterQueueInfo( self, page_key, info ): + + if self._page_key == page_key: self._outer_queue_info.SetLabel( info ) + + + def SetImportInfo( self, page_key, info ): + + if self._page_key == page_key: self._import_current_info.SetLabel( info ) + + + def SetSearchFocus( self, page_key ): + + if page_key == self._page_key: self._new_queue_input.SetFocus() + + +class ManagementPanelImportWithQueueAdvanced( ManagementPanelImportWithQueue ): + + def __init__( self, parent, page, page_key, name, namespaces ): + + ManagementPanelImportWithQueue.__init__( self, parent, page, page_key ) + + self._advanced_tag_options = ClientGUICommon.AdvancedTagOptions( self, 'send ' + name + ' tags to ', namespaces ) + + self._outer_queue_pause_button = wx.Button( self, label = 'pause' ) + self._outer_queue_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseOuterQueue ) + self._outer_queue_pause_button.Disable() + + self._outer_queue_cancel_button = wx.Button( self, label = 'that\'s enough' ) + self._outer_queue_cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelOuterQueue ) + self._outer_queue_cancel_button.SetForegroundColour( ( 128, 0, 0 ) ) + self._outer_queue_cancel_button.Disable() + + c_p_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + c_p_hbox.AddF( self._import_pause_button, FLAGS_EXPAND_BOTH_WAYS ) + c_p_hbox.AddF( self._import_cancel_button, FLAGS_EXPAND_BOTH_WAYS ) + + processing = wx.BoxSizer( wx.VERTICAL ) + + processing.AddF( wx.StaticText( self, label = '- processing -' ), FLAGS_SMALL_INDENT ) + + processing.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR ) + processing.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR ) + processing.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR ) + processing.AddF( c_p_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + queue_buttons_vbox = wx.BoxSizer( wx.VERTICAL ) + + queue_buttons_vbox.AddF( self._up, FLAGS_MIXED ) + queue_buttons_vbox.AddF( self._remove, FLAGS_MIXED ) + queue_buttons_vbox.AddF( self._down, FLAGS_MIXED ) + + queue_pause_buttons_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + queue_pause_buttons_hbox.AddF( self._outer_queue_pause_button, FLAGS_EXPAND_BOTH_WAYS ) + queue_pause_buttons_hbox.AddF( self._outer_queue_cancel_button, FLAGS_EXPAND_BOTH_WAYS ) + + queue_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + queue_hbox.AddF( self._outer_queue, FLAGS_EXPAND_BOTH_WAYS ) + queue_hbox.AddF( queue_buttons_vbox, FLAGS_MIXED ) + + queue_vbox = wx.BoxSizer( wx.VERTICAL ) + + queue_vbox.AddF( wx.StaticText( self, label = '- queue -' ), FLAGS_SMALL_INDENT ) + + queue_vbox.AddF( queue_pause_buttons_hbox, FLAGS_EXPAND_PERPENDICULAR ) + queue_vbox.AddF( self._outer_queue_info, FLAGS_EXPAND_PERPENDICULAR ) + queue_vbox.AddF( queue_hbox, FLAGS_EXPAND_BOTH_WAYS ) + queue_vbox.AddF( self._new_queue_input, FLAGS_EXPAND_PERPENDICULAR ) + + advanced_import_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_import_options.AddF( wx.StaticText( self, label = '- advanced import options -' ), FLAGS_SMALL_INDENT ) + + advanced_import_options.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + + advanced_tag_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_tag_options.AddF( wx.StaticText( self, label = '- advanced tag options -' ), FLAGS_SMALL_INDENT ) + + advanced_tag_options.AddF( self._advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._MakeSort( vbox ) + + vbox.AddF( processing, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( queue_vbox, FLAGS_EXPAND_BOTH_WAYS ) + self._InitExtraVboxElements( vbox ) + vbox.AddF( advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( advanced_tag_options, FLAGS_EXPAND_PERPENDICULAR ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + + # this could be in the advanced_tag_options class + def _DoRedundantTagContentUpdates( self, hash, tags ): + + tag_import_info = self._advanced_tag_options.GetInfo() + + if len( tag_import_info ) > 0: + + content_updates = [] + + for ( service_identifier, namespaces ) in tag_import_info: + + if len( namespaces ) > 0: + + tags_to_add_here = [] + + for namespace in namespaces: + + if namespace == '': tags_to_add_here.extend( [ HC.CleanTag( tag ) for tag in tags if not ':' in tag ] ) + else: tags_to_add_here.extend( [ HC.CleanTag( tag ) for tag in tags if tag.startswith( namespace + ':' ) ] ) + + + if len( tags_to_add_here ) > 0: + + if service_identifier == CC.LOCAL_TAG_SERVICE_IDENTIFIER: action = CC.CONTENT_UPDATE_ADD + else: action = CC.CONTENT_UPDATE_PENDING + + edit_log = [ ( action, tag ) for tag in tags_to_add_here ] + + content_updates.append( CC.ContentUpdate( CC.CONTENT_UPDATE_EDIT_LOG, service_identifier, ( hash, ), info = edit_log ) ) + + + + + if len( content_updates ) > 0: wx.GetApp().Write( 'content_updates', content_updates ) + + + + # this should probably be in the advanced_tag_options class + def _GetServiceIdentifiersToTags( self, tags ): + + tags = [ tag for tag in tags if tag is not None ] + + service_identifiers_to_tags = {} + + for ( service_identifier, namespaces ) in self._advanced_tag_options.GetInfo(): + + if len( namespaces ) > 0: + + tags_to_add_here = [] + + for namespace in namespaces: + + if namespace == '': tags_to_add_here.extend( [ HC.CleanTag( tag ) for tag in tags if not ':' in tag ] ) + else: tags_to_add_here.extend( [ HC.CleanTag( tag ) for tag in tags if tag.startswith( namespace + ':' ) ] ) + + + if len( tags_to_add_here ) > 0: service_identifiers_to_tags[ service_identifier ] = tags_to_add_here + + + + return service_identifiers_to_tags + + + def _InitExtraVboxElements( self, vbox ): pass + + def _SetButtons( self ): + + if self._currently_processing_import_queue: + + self._import_pause_button.Enable() + self._import_cancel_button.Enable() + + else: + + self._import_pause_button.Disable() + self._import_cancel_button.Disable() + + + if self._currently_processing_outer_queue: + + self._outer_queue_pause_button.Enable() + self._outer_queue_cancel_button.Enable() + + else: + + self._outer_queue_pause_button.Disable() + self._outer_queue_cancel_button.Disable() + + + + def EventCancelOuterQueue( self, event ): + + self._cancel_outer_queue.set() + + if self._pause_outer_queue: self.EventPauseOuterQueue( event ) + + + def EventPauseOuterQueue( self, event ): + + if self._pause_outer_queue: + + self._pause_outer_queue = False + + self._outer_queue_pause_button.SetLabel( 'pause' ) + self._outer_queue_pause_button.SetForegroundColour( ( 0, 0, 0 ) ) + + else: + + self._pause_outer_queue = True + + self._outer_queue_pause_button.SetLabel( 'resume' ) + self._outer_queue_pause_button.SetForegroundColour( ( 0, 128, 0 ) ) + + + +class ManagementPanelImportWithQueueAdvancedBooru( ManagementPanelImportWithQueueAdvanced ): + + def __init__( self, parent, page, page_key, booru ): + + self._booru = booru + + name = self._booru.GetName() + namespaces = booru.GetNamespaces() + + ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces ) + + + def _GetImageUrlAndTags( self, html, url ): + + ( search_url, search_separator, gallery_advance_num, thumb_classname, image_id, image_data, tag_classnames_to_namespaces ) = self._booru.GetData() + + ( image_url, tags ) = ClientParsers.ParseBooruPage( html, url, tag_classnames_to_namespaces, image_id = image_id, image_data = image_data ) + + return ( image_url, tags ) + + + def _THREADGetImportArgs( self, queue_object ): + + try: + + url = queue_object + + ( status, hash ) = wx.GetApp().Read( 'url_status', url ) + + if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new' + + if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' ) + elif status == 'redundant': + + ( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) ) + + HC.pubsub.pub( 'add_media_result', self._page_key, media_result ) + + tag_import_info = self._advanced_tag_options.GetInfo() + + if len( tag_import_info ) > 0: + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + html = connection.geturl( url ) + + ( image_url, tags ) = self._GetImageUrlAndTags( html, url ) + + self._DoRedundantTagContentUpdates( hash, tags ) + + + HC.pubsub.pub( 'import_done', self._page_key, 'redundant' ) + + else: + + HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) ) + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + html = connection.geturl( url ) + + ( image_url, tags ) = self._GetImageUrlAndTags( html, url ) + + parse_result = urlparse.urlparse( image_url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + file = connection.geturl( image_url ) + + service_identifiers_to_tags = self._GetServiceIdentifiersToTags( tags ) + + advanced_import_options = self._advanced_import_options.GetInfo() + + wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options, service_identifiers_to_tags, url = url ) + + + except Exception as e: + print( traceback.format_exc() ) + wx.CallAfter( self.CALLBACKImportArgs, self._page_key, '', {}, {}, exception = e ) + + + def _THREADDownloadImportItems( self, tags_string ): + + # this is important, because we'll instantiate new objects in the eventcancel + + cancel_import = self._cancel_import_queue + cancel_download = self._cancel_outer_queue + + try: + + tags = tags_string.split( ' ' ) + + ( search_url, gallery_advance_num, search_separator, thumb_classname ) = self._booru.GetGalleryParsingInfo() + + urls = [] + + example_url = search_url.replace( '%tags%', search_separator.join( tags ) ).replace( '%index%', '0' ) + + connection = CC.AdvancedHTTPConnection( url = example_url ) + + if gallery_advance_num == 1: i = 1 # page 1, 2, 3 + else: i = 0 # index 0, 25, 50 + + total_urls_found = 0 + + while True: + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'found ' + str( total_urls_found ) + ' urls' ) + + while self._pause_outer_queue: time.sleep( 1 ) + + if cancel_import.is_set(): break + if cancel_download.is_set(): break + + current_url = search_url.replace( '%tags%', search_separator.join( tags ) ).replace( '%index%', str( i * gallery_advance_num ) ) + + html = connection.geturl( current_url ) + + urls = ClientParsers.ParseBooruGallery( html, current_url, thumb_classname ) + + total_urls_found += len( urls ) + + if len( urls ) == 0: break + else: wx.CallAfter( self.CALLBACKAddToImportQueue, urls ) + + i += 1 + + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, '' ) + + except HC.NotFoundException: pass + except Exception as e: + print( traceback.format_exc() ) + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) ) + + HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key ) + + +class ManagementPanelImportWithQueueAdvancedDeviantArt( ManagementPanelImportWithQueueAdvanced ): + + def __init__( self, parent, page, page_key ): + + name = 'deviant art' + namespaces = [ 'creator', 'title', '' ] + + ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces ) + + self._new_queue_input.SetValue( 'artist username' ) + + + def _THREADGetImportArgs( self, queue_object ): + + try: + + ( url, tags ) = queue_object + + ( status, hash ) = wx.GetApp().Read( 'url_status', url ) + + if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new' + + if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' ) + elif status == 'redundant': + + ( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) ) + + HC.pubsub.pub( 'add_media_result', self._page_key, media_result ) + + self._DoRedundantTagContentUpdates( hash, tags ) + + HC.pubsub.pub( 'import_done', self._page_key, 'redundant' ) + + else: + + HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) ) + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + file = connection.geturl( url ) + + service_identifiers_to_tags = self._GetServiceIdentifiersToTags( tags ) + + advanced_import_options = self._advanced_import_options.GetInfo() + + wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options, service_identifiers_to_tags, url = url ) + + + except HC.NotFoundException: wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = Exception( 'Cannot download full image.' ) ) + except Exception as e: + print( traceback.format_exc() ) + wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = e ) + + + def _THREADDownloadImportItems( self, artist ): + + # this is important, because we'll instantiate new objects in the eventcancel + + cancel_import = self._cancel_import_queue + cancel_download = self._cancel_outer_queue + + try: + + gallery_url = 'http://' + artist + '.deviantart.com/gallery/?catpath=/&offset=' + + example_url = gallery_url + '0' + + connection = CC.AdvancedHTTPConnection( url = example_url ) + + i = 0 + + total_results_found = 0 + + while True: + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'found ' + str( total_results_found ) + ' urls' ) + + while self._pause_outer_queue: time.sleep( 1 ) + + if cancel_import.is_set(): break + if cancel_download.is_set(): break + + current_url = gallery_url + str( i ) + + html = connection.geturl( current_url ) + + results = ClientParsers.ParseDeviantArtGallery( html ) + + total_results_found += len( results ) + + if len( results ) == 0: break + else: wx.CallAfter( self.CALLBACKAddToImportQueue, results ) + + i += 24 + + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, '' ) + + except Exception as e: + print( traceback.format_exc() ) + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) ) + + HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key ) + + +class ManagementPanelImportWithQueueAdvancedHentaiFoundry( ManagementPanelImportWithQueueAdvanced ): + + def __init__( self, parent, page, page_key ): + + name = 'hentai foundry' + namespaces = [ 'creator', 'title', '' ] + + ManagementPanelImportWithQueueAdvanced.__init__( self, parent, page, page_key, name, namespaces ) + + self._session_established = False + + self._new_queue_input.Disable() + + HC.pubsub.sub( self, 'SessionEstablished', 'import_session_established' ) + + threading.Thread( target = self._THREADEstablishSession, name = 'HF Session Thread' ).start() + + + def _InitExtraVboxElements( self, vbox ): + + self._advanced_hentai_foundry_options = ClientGUICommon.AdvancedHentaiFoundryOptions( self ) + + advanced_hentai_foundry_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_hentai_foundry_options.AddF( wx.StaticText( self, label = '- advanced hentai foundry options -' ), FLAGS_SMALL_INDENT ) + + advanced_hentai_foundry_options.AddF( self._advanced_hentai_foundry_options, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( advanced_hentai_foundry_options, FLAGS_EXPAND_PERPENDICULAR ) + + + def _SetFilter( self ): + + filter = self._advanced_hentai_foundry_options.GetInfo() + + cookies = self._search_connection.GetCookies() + + raw_csrf = cookies[ 'YII_CSRF_TOKEN' ] # YII_CSRF_TOKEN=19b05b536885ec60b8b37650a32f8deb11c08cd1s%3A40%3A%222917dcfbfbf2eda2c1fbe43f4d4c4ec4b6902b32%22%3B + + processed_csrf = urllib.unquote( raw_csrf ) # 19b05b536885ec60b8b37650a32f8deb11c08cd1s:40:"2917dcfbfbf2eda2c1fbe43f4d4c4ec4b6902b32"; + + csrf_token = processed_csrf.split( '"' )[1] # the 2917... bit + + filter[ 'YII_CSRF_TOKEN' ] = csrf_token + + body = urllib.urlencode( filter ) + + headers = {} + headers[ 'Content-Type' ] = 'application/x-www-form-urlencoded' + + self._search_connection.request( 'POST', '/site/filters', headers = headers, body = body ) + + + def _THREADGetImportArgs( self, queue_object ): + + try: + + url = queue_object + + ( status, hash ) = wx.GetApp().Read( 'url_status', url ) + + if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new' + + if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' ) + elif status == 'redundant': + + ( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) ) + + HC.pubsub.pub( 'add_media_result', self._page_key, media_result ) + + tag_import_info = self._advanced_tag_options.GetInfo() + + if len( tag_import_info ) > 0: + + html = self._page_connection.geturl( url ) + + ( image_url, tags ) = ClientParsers.ParseHentaiFoundryPage( html ) + + self._DoRedundantTagContentUpdates( hash, tags ) + + + HC.pubsub.pub( 'import_done', self._page_key, 'redundant' ) + + else: + + HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) ) + + html = self._page_connection.geturl( url ) + + ( image_url, tags ) = ClientParsers.ParseHentaiFoundryPage( html ) + + parse_result = urlparse.urlparse( image_url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + file = connection.geturl( image_url ) + + service_identifiers_to_tags = self._GetServiceIdentifiersToTags( tags ) + + advanced_import_options = self._advanced_import_options.GetInfo() + + wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options, service_identifiers_to_tags, url = url ) + + + except HC.NotFoundException: wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = Exception( 'Cannot download full image.' ) ) + except Exception as e: + print( traceback.format_exc() ) + wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = e ) + + + def _THREADEstablishSession( self ): + + try: + + self._search_connection = CC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True ) + self._page_connection = CC.AdvancedHTTPConnection( url = 'http://www.hentai-foundry.com', accept_cookies = True ) + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'establishing session with hentai foundry' ) + + # this establishes the php session cookie, the csrf cookie, and tells hf that we are 18 years of age + self._search_connection.request( 'GET', '/?enterAgree=1' ) + self._page_connection.request( 'GET', '/?enterAgree=1' ) + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'session established' ) + + time.sleep( 1 ) + + HC.pubsub.pub( 'import_session_established', self._page_key ) + + except Exception as e: + print( traceback.format_exc() ) + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) ) + + + def SessionEstablished( self, page_key ): + + self._new_queue_input.Enable() + + self._session_established = True + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'session established - ready to download' ) + + +class ManagementPanelImportWithQueueAdvancedHentaiFoundryArtist( ManagementPanelImportWithQueueAdvancedHentaiFoundry ): + + def __init__( self, parent, page, page_key ): + + ManagementPanelImportWithQueueAdvancedHentaiFoundry.__init__( self, parent, page, page_key ) + + self._new_queue_input.SetValue( 'artist username' ) + + + def _THREADDownloadImportItems( self, artist ): + + # this is important, because we'll instantiate new objects in the eventcancel + + cancel_import = self._cancel_import_queue + cancel_download = self._cancel_outer_queue + + try: + + self._SetFilter() + + pictures_done = False + scraps_done = False + + currently_doing = 'pictures' + + total_results_found = 0 + + i = 1 + + while True: + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'found ' + str( total_results_found ) + ' urls' ) + + while self._pause_outer_queue: time.sleep( 1 ) + + if cancel_import.is_set(): break + if cancel_download.is_set(): break + + if currently_doing == 'pictures': gallery_url = 'http://www.hentai-foundry.com/pictures/user/' + artist + else: gallery_url = 'http://www.hentai-foundry.com/pictures/user/' + artist + '/scraps' + + current_url = gallery_url + '/page/' + str( i ) + + html = self._search_connection.geturl( current_url ) + + urls = ClientParsers.ParseHentaiFoundryGallery( html ) + + total_results_found += len( urls ) + + wx.CallAfter( self.CALLBACKAddToImportQueue, urls ) + + if 'class="next"' not in html: + + if currently_doing == 'pictures': pictures_done = True + else: scraps_done = True + + + if pictures_done and scraps_done: break + + if currently_doing == 'pictures': + + if scraps_done: i += 1 + else: currently_doing = 'scraps' + + else: + + if not pictures_done: currently_doing = 'pictures' + + i += 1 + + + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, '' ) + + except Exception as e: + print( traceback.format_exc() ) + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) ) + + HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key ) + + +class ManagementPanelImportWithQueueAdvancedHentaiFoundryTags( ManagementPanelImportWithQueueAdvancedHentaiFoundry ): + + def __init__( self, parent, page, page_key ): + + ManagementPanelImportWithQueueAdvancedHentaiFoundry.__init__( self, parent, page, page_key ) + + self._new_queue_input.SetValue( 'search tags' ) + + + def _THREADDownloadImportItems( self, tags_string ): + + # this is important, because we'll instantiate new objects in the eventcancel + + cancel_import = self._cancel_import_queue + cancel_download = self._cancel_outer_queue + + try: + + self._SetFilter() + + tags = tags_string.split( ' ' ) + + gallery_url = 'http://www.hentai-foundry.com/search/pictures?query=' + '+'.join( tags ) + '&search_in=all&scraps=-1&page=' + # scraps = 0 hide + # -1 means show both + # 1 means scraps only. wetf + + total_results_found = 0 + + i = 1 + + while True: + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'found ' + str( total_results_found ) + ' urls' ) + + while self._pause_outer_queue: time.sleep( 1 ) + + if cancel_import.is_set(): break + if cancel_download.is_set(): break + + current_url = gallery_url + str( i ) + + html = self._search_connection.geturl( current_url ) + + urls = ClientParsers.ParseHentaiFoundryGallery( html ) + + total_results_found += len( urls ) + + if 'class="next"' not in html: break + else: wx.CallAfter( self.CALLBACKAddToImportQueue, urls ) + + i += 1 + + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, '' ) + + except Exception as e: + print( traceback.format_exc() ) + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) ) + + HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key ) + + +class ManagementPanelImportWithQueueURL( ManagementPanelImportWithQueue ): + + def __init__( self, parent, page, page_key ): + + ManagementPanelImportWithQueue.__init__( self, parent, page, page_key ) + + c_p_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + c_p_hbox.AddF( self._import_pause_button, FLAGS_EXPAND_BOTH_WAYS ) + c_p_hbox.AddF( self._import_cancel_button, FLAGS_EXPAND_BOTH_WAYS ) + + processing = wx.BoxSizer( wx.VERTICAL ) + + processing.AddF( wx.StaticText( self, label = '- processing -' ), FLAGS_SMALL_INDENT ) + + processing.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR ) + processing.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR ) + processing.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR ) + processing.AddF( c_p_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + queue_buttons_vbox = wx.BoxSizer( wx.VERTICAL ) + + queue_buttons_vbox.AddF( self._up, FLAGS_MIXED ) + queue_buttons_vbox.AddF( self._remove, FLAGS_MIXED ) + queue_buttons_vbox.AddF( self._down, FLAGS_MIXED ) + + queue_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + queue_hbox.AddF( self._outer_queue, FLAGS_EXPAND_BOTH_WAYS ) + queue_hbox.AddF( queue_buttons_vbox, FLAGS_MIXED ) + + queue_vbox = wx.BoxSizer( wx.VERTICAL ) + + queue_vbox.AddF( wx.StaticText( self, label = '- queue -' ), FLAGS_SMALL_INDENT ) + + queue_vbox.AddF( self._outer_queue_info, FLAGS_EXPAND_PERPENDICULAR ) + queue_vbox.AddF( queue_hbox, FLAGS_EXPAND_BOTH_WAYS ) + queue_vbox.AddF( self._new_queue_input, FLAGS_EXPAND_PERPENDICULAR ) + + advanced_import_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_import_options.AddF( wx.StaticText( self, label = '- advanced import options -' ), FLAGS_SMALL_INDENT ) + + advanced_import_options.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._MakeSort( vbox ) + + vbox.AddF( processing, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( queue_vbox, FLAGS_EXPAND_BOTH_WAYS ) + vbox.AddF( advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + + def _THREADGetImportArgs( self, queue_object ): + + try: + + url = queue_object + + ( status, hash ) = wx.GetApp().Read( 'url_status', url ) + + if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new' + + if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' ) + elif status == 'redundant': + + ( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) ) + + HC.pubsub.pub( 'add_media_result', self._page_key, media_result ) + HC.pubsub.pub( 'import_done', self._page_key, 'redundant' ) + + else: + + HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) ) + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + file = connection.geturl( url ) + + advanced_import_options = self._advanced_import_options.GetInfo() + + service_identifiers_to_tags = {} + + wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options, service_identifiers_to_tags, url = url ) + + + except Exception as e: + print( traceback.format_exc() ) + wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = e ) + + + def _THREADDownloadImportItems( self, url ): + + try: + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'parsing url' ) + + try: + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + except: raise Exception( 'Could not parse that URL' ) + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'Connecting to address' ) + + try: connection = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + except: raise Exception( 'Could not connect to server' ) + + try: html = connection.geturl( url ) + except: raise Exception( 'Could not download that url' ) + + HC.pubsub.pub( 'set_outer_queue_info', self._page_key, 'parsing html' ) + + try: urls = ClientParsers.ParsePage( html, url ) + except: raise Exception( 'Could not parse that URL\'s html' ) + + wx.CallAfter( self.CALLBACKAddToImportQueue, urls ) + + except Exception as e: HC.pubsub.pub( 'set_outer_queue_info', self._page_key, unicode( e ) ) + + HC.pubsub.pub( 'done_adding_to_import_queue', self._page_key ) + + +class ManagementPanelImportThreadWatcher( ManagementPanelImport ): + + def __init__( self, parent, page, page_key ): + + ManagementPanelImport.__init__( self, parent, page, page_key ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._connections = {} + + self._MakeSort( vbox ) + + vbox.AddF( self._import_overall_info, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._import_current_info, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._import_gauge, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._import_pause_button, FLAGS_EXPAND_PERPENDICULAR ) + + thread_vbox = wx.BoxSizer( wx.VERTICAL ) + + thread_vbox.AddF( wx.StaticText( self, label = '- thread checker -' ), FLAGS_SMALL_INDENT ) + + self._thread_info = wx.StaticText( self, label = '' ) + + self._thread_time = wx.SpinCtrl( self, initial = 180, min = 30, max = 1800 ) + + self._thread_input = wx.TextCtrl( self, style = wx.TE_PROCESS_ENTER ) + self._thread_input.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self._thread_pause_button = wx.Button( self, label = 'pause' ) + self._thread_pause_button.Bind( wx.EVT_BUTTON, self.EventPauseChecker ) + self._thread_pause_button.SetForegroundColour( ( 128, 0, 0 ) ) + self._thread_pause_button.Disable() + + hbox = wx.BoxSizer( wx.HORIZONTAL ) + + hbox.AddF( wx.StaticText( self, label = 'check every ' ), FLAGS_MIXED ) + hbox.AddF( self._thread_time, FLAGS_MIXED ) + hbox.AddF( wx.StaticText( self, label = ' seconds' ), FLAGS_MIXED ) + + thread_vbox.AddF( self._thread_info, FLAGS_EXPAND_PERPENDICULAR ) + thread_vbox.AddF( self._thread_input, FLAGS_EXPAND_PERPENDICULAR ) + thread_vbox.AddF( hbox, FLAGS_EXPAND_PERPENDICULAR ) + thread_vbox.AddF( self._thread_pause_button, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( thread_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + self._advanced_import_options = ClientGUICommon.AdvancedImportOptions( self ) + + advanced_import_options = wx.BoxSizer( wx.VERTICAL ) + + advanced_import_options.AddF( wx.StaticText( self, label = '- advanced import options -' ), FLAGS_SMALL_INDENT ) + + advanced_import_options.AddF( self._advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( advanced_import_options, FLAGS_EXPAND_PERPENDICULAR ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + self._last_thread_check = None + self._4chan_board = None + self._thread_id = None + self._currently_checking_thread = False + self._currently_paused = False + self._image_infos_already_added = set() + + self._outer_queue_timer = wx.Timer( self, id = ID_TIMER_PROCESS_OUTER_QUEUE ) + + self.Bind( wx.EVT_TIMER, self.EventProcessOuterQueue, id = ID_TIMER_PROCESS_OUTER_QUEUE ) + + self._outer_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT ) + + HC.pubsub.sub( self, 'SetThreadInfo', 'set_thread_info' ) + + + def _THREADFetchThread( self ): + + HC.pubsub.pub( 'set_thread_info', self._page_key, 'checking thread' ) + + url = 'http://api.4chan.org/' + self._4chan_board + '/res/' + self._thread_id + '.json' + + try: + + connection = CC.AdvancedHTTPConnection( url = url ) + + raw_json = connection.geturl( url ) + + json_dict = json.loads( raw_json ) + + posts_list = json_dict[ 'posts' ] + + image_infos = [ ( post[ 'md5' ].decode( 'base64' ), str( post[ 'tim' ] ), post[ 'ext' ] ) for post in posts_list if 'md5' in post ] + + image_infos_i_can_add = [ image_info for image_info in image_infos if image_info not in self._image_infos_already_added ] + + self._image_infos_already_added.update( image_infos_i_can_add ) + + if len( image_infos_i_can_add ) > 0: wx.CallAfter( self.CALLBACKAddToImportQueue, image_infos_i_can_add ) + + except HC.NotFoundException: + + HC.pubsub.pub( 'set_thread_info', self._page_key, 'Thread 404' ) + + wx.CallAfter( self._thread_pause_button.Disable ) + + return + + except Exception as e: + + HC.pubsub.pub( 'set_thread_info', self._page_key, unicode( e ) ) + + wx.CallAfter( self._thread_pause_button.Disable ) + + return + + + self._last_thread_check = int( time.time() ) + + self._currently_checking_thread = False + + + def _THREADGetImportArgs( self, queue_object ): + + try: + + ( md5, image_name, ext ) = queue_object + + ( status, hash ) = wx.GetApp().Read( 'md5_status', md5 ) + + if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new' + + if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' ) + elif status == 'redundant': + + ( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) ) + + HC.pubsub.pub( 'add_media_result', self._page_key, media_result ) + HC.pubsub.pub( 'import_done', self._page_key, 'redundant' ) + + else: + + url = 'http://images.4chan.org/' + self._4chan_board + '/src/' + image_name + ext + + ( status, hash ) = wx.GetApp().Read( 'url_status', url ) + + if status == 'deleted' and 'exclude_deleted_files' not in self._advanced_import_options.GetInfo(): status = 'new' + + if status == 'deleted': HC.pubsub.pub( 'import_done', self._page_key, 'deleted' ) + elif status == 'redundant': + + ( media_result, ) = wx.GetApp().Read( 'media_results', CC.FileSearchContext(), ( hash, ) ) + + HC.pubsub.pub( 'add_media_result', self._page_key, media_result ) + HC.pubsub.pub( 'import_done', self._page_key, 'redundant' ) + + else: + + HC.pubsub.pub( 'set_import_info', self._page_key, 'downloading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) ) + + parse_result = urlparse.urlparse( url ) + + ( scheme, host, port ) = ( parse_result.scheme, parse_result.hostname, parse_result.port ) + + if ( scheme, host, port ) not in self._connections: self._connections[ ( scheme, host, port ) ] = CC.AdvancedHTTPConnection( scheme = scheme, host = host, port = port ) + + connection = self._connections[ ( scheme, host, port ) ] + + file = connection.geturl( url ) + + advanced_import_options = self._advanced_import_options.GetInfo() + + service_identifiers_to_tags = {} + + wx.CallAfter( self.CALLBACKImportArgs, file, advanced_import_options, service_identifiers_to_tags, url = url ) + + + + except Exception as e: + print( traceback.format_exc() ) + wx.CallAfter( self.CALLBACKImportArgs, '', {}, {}, exception = e ) + + + def _GetPreprocessStatus( self ): + + status = 'reading ' + str( self._import_queue_position + 1 ) + '/' + str( len( self._import_queue ) ) + + return status + + + def EventKeyDown( self, event ): + + if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ): + + url = self._thread_input.GetValue() + + if url == '': return + + try: + + try: + + parse_result = urlparse.urlparse( url ) + + host = parse_result.hostname + + request = parse_result.path + + if host is None or request is None: raise Exception() + + except: raise Exception ( 'Could not understand that url!' ) + + if host is None or '4chan.org' not in host: raise Exception( 'This only works for 4chan right now!' ) + + try: ( nothing, board, res, thread_id ) = request.split( '/' ) + except: raise Exception( 'Could not understand the board or thread id!' ) + + except Exception as e: + + self._thread_info.SetLabel( unicode( e ) ) + + return + + + self._4chan_board = board + self._thread_id = thread_id + + self._last_thread_check = 0 + + self._thread_input.Disable() + self._thread_pause_button.Enable() + + else: event.Skip() + + + def EventProcessOuterQueue( self, event ): + + if self._4chan_board is None: self._thread_info.SetLabel( 'enter a 4chan thread url' ) + elif self._currently_paused: self._thread_info.SetLabel( 'paused' ) + elif not self._currently_checking_thread: + + thread_time = self._thread_time.GetValue() + + if thread_time < 30: thread_time = 30 + + next_thread_check = self._last_thread_check + thread_time + + if next_thread_check < int( time.time() ): + + self._currently_checking_thread = True + + threading.Thread( target = self._THREADFetchThread, name = 'Fetch Thread' ).start() + + else: self._thread_info.SetLabel( 'rechecking thread ' + HC.ConvertTimestampToPrettyPending( next_thread_check ) ) + + + self._outer_queue_timer.Start( 1000, wx.TIMER_ONE_SHOT ) + + + def EventPauseChecker( self, event ): + + if self._currently_paused: + + self._currently_paused = False + + self._thread_pause_button.SetLabel( 'pause' ) + self._thread_pause_button.SetForegroundColour( ( 0, 0, 0 ) ) + + else: + + self._currently_paused = True + + self._thread_pause_button.SetLabel( 'resume' ) + self._thread_pause_button.SetForegroundColour( ( 0, 128, 0 ) ) + + + + def SetSearchFocus( self, page_key ): + + if page_key == self._page_key: self._thread_input.SetFocus() + + + def SetThreadInfo( self, page_key, info ): + + if self._page_key == page_key: self._thread_info.SetLabel( info ) + + +class ManagementPanelPetitions( ManagementPanel ): + + def __init__( self, parent, page, page_key, file_service_identifier ): + + ManagementPanel.__init__( self, parent, page, page_key, file_service_identifier ) + + self._service = wx.GetApp().Read( 'service', file_service_identifier ) + self._can_ban = self._service.GetAccount().HasPermission( HC.MANAGE_USERS ) + + self._num_petitions = None + self._current_petition = None + + self._num_petitions_text = wx.StaticText( self ) + + refresh_num_petitions = wx.Button( self, label = 'refresh' ) + refresh_num_petitions.Bind( wx.EVT_BUTTON, self.EventRefreshNumPetitions ) + + self._get_petition = wx.Button( self, label = 'get petition' ) + self._get_petition.Bind( wx.EVT_BUTTON, self.EventGetPetition ) + self._get_petition.Disable() + + self._petition_info_text_ctrl = wx.TextCtrl( self, style = wx.TE_READONLY | wx.TE_MULTILINE ) + + self._approve = wx.Button( self, label = 'approve' ) + self._approve.Bind( wx.EVT_BUTTON, self.EventApprove ) + self._approve.SetForegroundColour( ( 0, 128, 0 ) ) + self._approve.Disable() + + self._deny = wx.Button( self, label = 'deny' ) + self._deny.Bind( wx.EVT_BUTTON, self.EventDeny ) + self._deny.SetForegroundColour( ( 128, 0, 0 ) ) + self._deny.Disable() + + self._modify_petitioner = wx.Button( self, label = 'modify petitioner' ) + self._modify_petitioner.Bind( wx.EVT_BUTTON, self.EventModifyPetitioner ) + self._modify_petitioner.Disable() + if not self._can_ban: self._modify_petitioner.Hide() + + p_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + p_hbox.AddF( self._approve, FLAGS_EXPAND_BOTH_WAYS ) + p_hbox.AddF( self._deny, FLAGS_EXPAND_BOTH_WAYS ) + + petition_sizer = wx.BoxSizer( wx.VERTICAL ) + + petition_sizer.AddF( wx.StaticText( self, label = '- petition -' ), FLAGS_SMALL_INDENT ) + + petition_sizer.AddF( self._petition_info_text_ctrl, FLAGS_EXPAND_BOTH_WAYS ) + petition_sizer.AddF( p_hbox, FLAGS_EXPAND_PERPENDICULAR ) + petition_sizer.AddF( self._modify_petitioner, FLAGS_EXPAND_PERPENDICULAR ) + + num_petitions_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + num_petitions_hbox.AddF( self._num_petitions_text, FLAGS_EXPAND_BOTH_WAYS ) + num_petitions_hbox.AddF( refresh_num_petitions, FLAGS_MIXED ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._MakeSort( vbox ) + self._MakeCollect( vbox ) + + vbox.AddF( num_petitions_hbox, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._get_petition, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( petition_sizer, FLAGS_EXPAND_BOTH_WAYS ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + wx.CallAfter( self.EventRefreshNumPetitions, None ) + + HC.pubsub.sub( self, 'RefreshQuery', 'refresh_query' ) + + + def _DrawCurrentPetition( self ): + + if self._current_petition is None: + + self._petition_info_text_ctrl.SetValue( '' ) + self._approve.Disable() + self._deny.Disable() + + if self._can_ban: self._modify_petitioner.Disable() + + panel = ClientGUIMedia.MediaPanelNoQuery( self._page, self._page_key, self._file_service_identifier ) + + else: + + self._petition_info_text_ctrl.SetValue( self._current_petition.GetPetitionString() ) + self._approve.Enable() + self._deny.Enable() + + if self._can_ban: self._modify_petitioner.Enable() + + search_context = CC.FileSearchContext( self._file_service_identifier ) + + with wx.BusyCursor(): file_query_result = wx.GetApp().Read( 'media_results', search_context, self._current_petition.GetPetitionHashes() ) + + panel = ClientGUIMedia.MediaPanelThumbnails( self._page, self._page_key, self._file_service_identifier, [], file_query_result ) + + panel.Collect( self._page_key, self._collect_by.GetChoice() ) + + panel.Sort( self._page_key, self._sort_by.GetChoice() ) + + + HC.pubsub.pub( 'swap_media_panel', self._page_key, panel ) + + + def _DrawNumPetitions( self ): + + self._num_petitions_text.SetLabel( HC.ConvertIntToPrettyString( self._num_petitions ) + ' petitions' ) + + if self._num_petitions > 0: self._get_petition.Enable() + else: self._get_petition.Disable() + + + def EventApprove( self, event ): + + connection = self._service.GetConnection() + + petition_object = self._current_petition.GetClientPetition() + + connection.Post( 'petitions', petitions = petition_object ) + + if isinstance( self._current_petition, HC.ServerFilePetition ): + + hashes = self._current_petition.GetPetitionHashes() + + content_updates = [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, self._file_service_identifier, hashes ) ] + + elif isinstance( self._current_petition, HC.ServerMappingPetition ): + + ( reason, tag, hashes ) = self._current_petition.GetPetitionInfo() + + content_updates = [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, self._file_service_identifier, hashes, tag ) ] + + + wx.GetApp().Write( 'content_updates', content_updates ) + + self._current_petition = None + + self._DrawCurrentPetition() + + self.EventRefreshNumPetitions( event ) + + + def EventDeny( self, event ): + + connection = self._service.GetConnection() + + petition_object = self._current_petition.GetClientPetitionDenial() + + # needs work + connection.Post( 'petitiondenial', petition_denial = petition_object ) + + self._current_petition = None + + self._DrawCurrentPetition() + + self.EventRefreshNumPetitions( event ) + + + def EventGetPetition( self, event ): + + try: + + connection = self._service.GetConnection() + + self._current_petition = connection.Get( 'petition' ) + + self._DrawCurrentPetition() + + except: + + wx.MessageBox( traceback.format_exc() ) + + self._current_petition = None + + self._DrawCurrentPetition() + + + + def EventModifyPetitioner( self, event ): + + with ClientGUIDialogs.DialogModifyAccounts( self, self._file_service_identifier, ( self._current_petition.GetPetitionerIdentifier(), ) ) as dlg: dlg.ShowModal() + + + def EventRefreshNumPetitions( self, event ): + + self._num_petitions_text.SetLabel( u'Fetching\u2026' ) + + try: + + connection = self._service.GetConnection() + + self._num_petitions = connection.Get( 'numpetitions' ) + + self._DrawNumPetitions() + + if self._num_petitions > 0: self.EventGetPetition( event ) + + except Exception as e: self._num_petitions_text.SetLabel( unicode( e ) ) + + + def RefreshQuery( self, page_key ): + + if page_key == self._page_key: self._DrawCurrentPetition() + + +class ManagementPanelQuery( ManagementPanel ): + + def __init__( self, parent, page, page_key, file_service_identifier, initial_predicates = [] ): + + ManagementPanel.__init__( self, parent, page, page_key, file_service_identifier ) + + self._query_key = os.urandom( 32 ) + self._synchronised = True + self._include_current_tags = True + self._include_pending_tags = True + + self._current_predicates_box = ClientGUICommon.TagsBoxPredicates( self, self._page_key, initial_predicates ) + + self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self, self._page_key, self._file_service_identifier, CC.NULL_SERVICE_IDENTIFIER, self._page.GetMedia ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + self._MakeSort( vbox ) + self._MakeCollect( vbox ) + + vbox.AddF( self._current_predicates_box, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._searchbox, FLAGS_EXPAND_PERPENDICULAR ) + + self._MakeCurrentSelectionTagsBox( vbox ) + + self.SetSizer( vbox ) + + if len( initial_predicates ) > 0: wx.CallAfter( self._DoQuery ) + + HC.pubsub.sub( self, 'AddPredicate', 'add_predicate' ) + HC.pubsub.sub( self, 'ChangeFileRepository', 'change_file_repository' ) + HC.pubsub.sub( self, 'ChangeTagRepository', 'change_tag_repository' ) + HC.pubsub.sub( self, 'IncludeCurrent', 'notify_include_current' ) + HC.pubsub.sub( self, 'IncludePending', 'notify_include_pending' ) + HC.pubsub.sub( self, 'SearchImmediately', 'notify_search_immediately' ) + HC.pubsub.sub( self, 'ShowQuery', 'file_query_done' ) + HC.pubsub.sub( self, 'RefreshQuery', 'refresh_query' ) + HC.pubsub.sub( self, 'RemovePredicate', 'remove_predicate' ) + + + def _DoQuery( self ): + + if self._synchronised: + + try: + + current_predicates = self._current_predicates_box.GetPredicates() + + if len( current_predicates ) > 0: + + self._query_key = os.urandom( 32 ) + + include_current = self._include_current_tags + include_pending = self._include_pending_tags + + search_context = CC.FileSearchContext( self._file_service_identifier, self._tag_service_identifier, include_current, include_pending, current_predicates ) + + wx.GetApp().Read( 'do_file_query', self._query_key, search_context ) + + panel = ClientGUIMedia.MediaPanelLoading( self._page, self._page_key, self._file_service_identifier ) + + else: panel = ClientGUIMedia.MediaPanelNoQuery( self._page, self._page_key, self._file_service_identifier ) + + HC.pubsub.pub( 'swap_media_panel', self._page_key, panel ) + + except: wx.MessageBox( traceback.format_exc() ) + + + + def AddPredicate( self, page_key, predicate ): + + if page_key == self._page_key: + + if predicate is not None: + + if predicate in ( 'system:size', 'system:age', 'system:hash', 'system:limit', 'system:numtags', 'system:width', 'system:height', 'system:ratio', 'system:duration', u'system:mime', u'system:rating', u'system:similar_to' ): + + with ClientGUIDialogs.DialogInputFileSystemPredicate( self, predicate ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: predicate = dlg.GetString() + else: return + + + elif predicate == 'system:untagged': predicate = 'system:numtags=0' + + if self._current_predicates_box.HasPredicate( predicate ): self._current_predicates_box.RemovePredicate( predicate ) + else: + + if predicate in ( 'system:inbox', 'system:archive', 'system:local', 'system:not local' ): + + if predicate == 'system:inbox': removee = 'system:archive' + elif predicate == 'system:archive': removee = 'system:inbox' + elif predicate == 'system:local': removee = 'system:not local' + elif predicate == 'system:not local': removee = 'system:local' + + else: + + if predicate.startswith( '-' ): removee = predicate[1:] + else: removee = '-' + predicate + + + if self._current_predicates_box.HasPredicate( removee ): self._current_predicates_box.RemovePredicate( removee ) + + self._current_predicates_box.AddPredicate( predicate ) + + + + self._DoQuery() + + + + def ChangeFileRepository( self, page_key, service_identifier ): + + if page_key == self._page_key: + + self._file_service_identifier = service_identifier + + self._DoQuery() + + + + def ChangeTagRepository( self, page_key, service_identifier ): + + if page_key == self._page_key: + + self._tag_service_identifier = service_identifier + + current_predicates = self._current_predicates_box.GetPredicates() + + # if we are basing the search on the tag service or there are any regular tags... + if self._file_service_identifier == CC.NULL_SERVICE_IDENTIFIER or False in ( pred.startswith( 'system:' ) for pred in current_predicates ): self._DoQuery() + + + + def IncludeCurrent( self, page_key, value ): + + if page_key == self._page_key: + + self._include_current_tags = value + + self._DoQuery() + + + + def IncludePending( self, page_key, value ): + + if page_key == self._page_key: + + self._include_pending_tags = value + + self._DoQuery() + + + + def RefreshQuery( self, page_key ): + + if page_key == self._page_key: self._DoQuery() + + + def RemovePredicate( self, page_key, predicate ): + + if page_key == self._page_key: + + if self._current_predicates_box.HasPredicate( predicate ): + + self._current_predicates_box.RemovePredicate( predicate ) + + self._DoQuery() + + + + + def SearchImmediately( self, page_key, value ): + + if page_key == self._page_key: + + self._synchronised = value + + self._DoQuery() + + + + def SetSearchFocus( self, page_key ): + + if page_key == self._page_key: self._searchbox.SetFocus() + + + def ShowQuery( self, query_key, file_query_result ): + + try: + + if query_key == self._query_key: + + current_predicates = self._current_predicates_box.GetPredicates() + + panel = ClientGUIMedia.MediaPanelThumbnails( self._page, self._page_key, self._file_service_identifier, current_predicates, file_query_result ) + + panel.Collect( self._page_key, self._collect_by.GetChoice() ) + + panel.Sort( self._page_key, self._sort_by.GetChoice() ) + + HC.pubsub.pub( 'swap_media_panel', self._page_key, panel ) + + + except: wx.MessageBox( traceback.format_exc() ) + + +class ManagementPanelMessages( wx.ScrolledWindow ): + + def __init__( self, parent, page_key, identity ): + + wx.ScrolledWindow.__init__( self, parent, style = wx.BORDER_NONE | wx.HSCROLL | wx.VSCROLL ) + + self.SetScrollRate( 0, 20 ) + + self._page_key = page_key + self._identity = identity + + self._query_key = os.urandom( 32 ) + + # sort out push-refresh later + #self._refresh_inbox = wx.Button( self, label = 'refresh inbox' ) + #self._refresh_inbox.Bind( wx.EVT_BUTTON, self.EventRefreshInbox ) + #self._refresh_inbox.SetForegroundColour( ( 0, 128, 0 ) ) + + actions = wx.BoxSizer( wx.VERTICAL ) + + actions.AddF( wx.StaticText( self, label = '- actions -' ), FLAGS_SMALL_INDENT ) + + self._compose = wx.Button( self, label = 'compose' ) + self._compose.Bind( wx.EVT_BUTTON, self.EventCompose ) + self._compose.SetForegroundColour( ( 0, 128, 0 ) ) + + actions.AddF( self._compose, FLAGS_EXPAND_PERPENDICULAR ) + #vbox.AddF( self._refresh_inbox, FLAGS_EXPAND_PERPENDICULAR ) + + search = wx.BoxSizer( wx.VERTICAL ) + + search.AddF( wx.StaticText( self, label = '- search -' ), FLAGS_SMALL_INDENT ) + + self._current_predicates_box = ClientGUICommon.ListBoxMessagesPredicates( self, self._page_key, [ 'system:inbox' ] ) + + self._synchronised = ClientGUICommon.OnOffButton( self, self._page_key, 'notify_search_immediately', on_label = 'searching immediately', off_label = 'waiting' ) + self._synchronised.SetToolTipString( 'select whether to renew the search as soon as a new predicate is entered' ) + + self._searchbox = ClientGUICommon.AutoCompleteDropdownMessageTerms( self, self._page_key, self._identity ) + + search.AddF( self._current_predicates_box, FLAGS_EXPAND_BOTH_WAYS ) + search.AddF( self._synchronised, FLAGS_EXPAND_PERPENDICULAR ) + search.AddF( self._searchbox, FLAGS_EXPAND_PERPENDICULAR ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( actions, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( search, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + HC.pubsub.sub( self, 'AddPredicate', 'add_predicate' ) + HC.pubsub.sub( self, 'SearchImmediately', 'notify_search_immediately' ) + HC.pubsub.sub( self, 'ShowQuery', 'message_query_done' ) + HC.pubsub.sub( self, 'RefreshQuery', 'refresh_query' ) + HC.pubsub.sub( self, 'RemovePredicate', 'remove_predicate' ) + + wx.CallAfter( self._DoQuery ) + + + def _DoQuery( self ): + + if self._synchronised.IsOn(): + + try: + + current_predicates = self._current_predicates_box.GetPredicates() + + HC.pubsub.pub( 'set_conversations', self._page_key, [] ) + + if len( current_predicates ) > 0: + + self._query_key = os.urandom( 32 ) + + search_context = ClientConstantsMessages.MessageSearchContext( self._identity, current_predicates ) + + wx.GetApp().Read( 'do_message_query', self._query_key, search_context ) + + + except: wx.MessageBox( traceback.format_exc() ) + + + + def AddPredicate( self, page_key, predicate ): + + if page_key == self._page_key: + + if predicate is not None: + + if predicate in ( 'system:started_by', 'system:from', 'system:to', 'system:age', 'system:numattachments' ): + + with ClientGUIDialogs.DialogInputMessageSystemPredicate( self, predicate ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: predicate = dlg.GetString() + else: return + + + elif predicate == 'system:unread': predicate = 'system:status=unread' + elif predicate == 'system:drafts': predicate = 'system:draft' + + if self._current_predicates_box.HasPredicate( predicate ): self._current_predicates_box.RemovePredicate( predicate ) + else: + + if predicate in ( 'system:inbox', 'system:archive' ): + + if predicate == 'system:inbox': removee = 'system:archive' + elif predicate == 'system:archive': removee = 'system:inbox' + + else: + + if predicate.startswith( '-' ): removee = predicate[1:] + else: removee = '-' + predicate + + + if self._current_predicates_box.HasPredicate( removee ): self._current_predicates_box.RemovePredicate( removee ) + + self._current_predicates_box.AddPredicate( predicate ) + + + + self._DoQuery() + + + + def EventCompose( self, event ): HC.pubsub.pub( 'new_compose_frame', self._identity ) + + def EventRefreshInbox( self, event ): + + # tell db to do it, and that'll spam the appropriate pubsubs (which will tell this to just refresh query, I think is best) + + pass + + + def RefreshQuery( self, page_key ): + + if page_key == self._page_key: self._DoQuery() + + + def RemovePredicate( self, page_key, predicate ): + + if page_key == self._page_key: + + if self._current_predicates_box.HasPredicate( predicate ): + + self._current_predicates_box.RemovePredicate( predicate ) + + self._DoQuery() + + + + + def SearchImmediately( self, page_key, value ): + + if page_key == self._page_key and value: self._DoQuery() + + + def SetSearchFocus( self, page_key ): + + if page_key == self._page_key: self._searchbox.SetFocus() + + + def ShowQuery( self, query_key, conversations ): + + try: + + if query_key == self._query_key: HC.pubsub.pub( 'set_conversations', self._page_key, conversations ) + + except: wx.MessageBox( traceback.format_exc() ) + + + def TryToClose( self ): + + pass + + # if have a open draft, save it! + + \ No newline at end of file diff --git a/include/ClientGUIMedia.py b/include/ClientGUIMedia.py new file mode 100755 index 00000000..46a69066 --- /dev/null +++ b/include/ClientGUIMedia.py @@ -0,0 +1,1896 @@ +import HydrusConstants as HC +import ClientConstants as CC +import ClientGUICommon +import ClientGUIDialogs +import ClientGUICanvas +import ClientGUIMixins +import itertools +import os +import random +import threading +import traceback +import wx + +# Option Enums + +ID_TIMER_WATERFALL = wx.NewId() +ID_TIMER_ANIMATION = wx.NewId() + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +def AddFileServiceIdentifiersToMenu( menu, file_service_identifiers, phrase, action ): + + if len( file_service_identifiers ) == 1: + + ( s_i, ) = file_service_identifiers + + if action == CC.ID_NULL: id = CC.ID_NULL + else: id = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action, s_i ) + + menu.Append( id, phrase + ' ' + s_i.GetName() ) + + else: + + submenu = wx.Menu() + + for s_i in file_service_identifiers: + + if action == CC.ID_NULL: id = CC.ID_NULL + else: id = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action, s_i ) + + submenu.Append( id, s_i.GetName() ) + + + menu.AppendMenu( CC.ID_NULL, phrase + u'\u2026', submenu ) + + +class MediaPanel( ClientGUIMixins.ListeningMediaList, wx.ScrolledWindow ): + + def __init__( self, parent, page_key, file_service_identifier, predicates, file_query_result ): + + wx.ScrolledWindow.__init__( self, parent, size = ( 0, 0 ) ) + ClientGUIMixins.ListeningMediaList.__init__( self, file_service_identifier, predicates, file_query_result ) + + self.SetDoubleBuffered( True ) + + self._options = wx.GetApp().Read( 'options' ) + + self.SetScrollRate( 0, 50 ) + + self._page_key = page_key + + self._focussed_media = None + self._shift_focussed_media = None + + self._selected_media = set() + + HC.pubsub.sub( self, 'AddMediaResult', 'add_media_result' ) + HC.pubsub.sub( self, 'SetFocussedMedia', 'set_focus' ) + HC.pubsub.sub( self, 'PageHidden', 'page_hidden' ) + HC.pubsub.sub( self, 'PageShown', 'page_shown' ) + HC.pubsub.sub( self, 'Collect', 'collect_media' ) + HC.pubsub.sub( self, 'Sort', 'sort_media' ) + HC.pubsub.sub( self, 'FileDumped', 'file_dumped' ) + + self._PublishSelectionChange() + + + def _Archive( self ): + + hashes = self._GetSelectedHashes( CC.DISCRIMINANT_INBOX ) + + if len( hashes ) > 0: wx.GetApp().Write( 'content_updates', [ CC.ContentUpdate( CC.CONTENT_UPDATE_ARCHIVE, CC.LOCAL_FILE_SERVICE_IDENTIFIER, hashes ) ] ) + + + def _CopyHashToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( self._focussed_media.GetDisplayMedia().GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _CopyHashesToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( os.linesep.join( [ hash.encode( 'hex' ) for hash in self._GetSelectedHashes() ] ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _CopyLocalUrlToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( 'http://127.0.0.1:45865/file?hash=' + self._focussed_media.GetDisplayMedia().GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _CopyPathToClipboard( self ): + + if wx.TheClipboard.Open(): + + data = wx.TextDataObject( HC.CLIENT_FILES_DIR + os.path.sep + self._focussed_media.GetDisplayMedia().GetHash().encode( 'hex' ) ) + + wx.TheClipboard.SetData( data ) + + wx.TheClipboard.Close() + + else: wx.MessageBox( 'I could not get permission to access the clipboard.' ) + + + def _CustomFilter( self ): + + with ClientGUIDialogs.DialogSetupCustomFilterActions( self ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + actions = dlg.GetActions() + + media_results = self.GenerateMediaResults( discriminant = CC.DISCRIMINANT_LOCAL, selected_media = set( self._selected_media ) ) + + if len( media_results ) > 0: + + try: ClientGUICanvas.CanvasFullscreenMediaListCustomFilter( self.GetTopLevelParent(), self._page_key, self._file_service_identifier, self._predicates, media_results, actions ) + except: wx.MessageBox( traceback.format_exc() ) + + + + + + def _Delete( self, file_service_identifier ): + + if file_service_identifier.GetType() == HC.LOCAL_FILE: + + hashes = self._GetSelectedHashes( CC.DISCRIMINANT_LOCAL ) + + num_to_delete = len( hashes ) + + if num_to_delete: + + if num_to_delete == 1: message = 'Are you sure you want to delete this file?' + else: message = 'Are you sure you want to delete these ' + HC.ConvertIntToPrettyString( num_to_delete ) + ' files?' + + with ClientGUIDialogs.DialogYesNo( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: + + try: wx.GetApp().Write( 'content_updates', [ CC.ContentUpdate( CC.CONTENT_UPDATE_DELETE, file_service_identifier, hashes ) ] ) + except: wx.MessageBox( traceback.format_exc() ) + + + + + else: + + hashes = self._GetSelectedHashes() + + wx.GetApp().Write( 'petition_files', file_service_identifier, hashes, 'admin' ) + + + + def _DeselectAll( self ): + + if len( self._selected_media ) > 0: + + for m in self._selected_media: m.Deselect() + + self._ReblitMedia( self._selected_media ) + + self._selected_media = set() + + self._SetFocussedMedia( None ) + self._shift_focussed_media = None + + with wx.FrozenWindow( self ): self._ReblitCanvas() + + self._PublishSelectionChange() + + + + def _DeselectSelect( self, media_to_deselect, media_to_select ): + + if len( media_to_deselect ) > 0: + + for m in media_to_deselect: m.Deselect() + + self._ReblitMedia( media_to_deselect ) + + self._selected_media.difference_update( media_to_deselect ) + + + if len( media_to_select ) > 0: + + for m in media_to_select: m.Select() + + self._ReblitMedia( media_to_select ) + + self._selected_media.update( media_to_select ) + + + self._PublishSelectionChange() + + + def _FullScreen( self, first_media = None ): + + media_results = self.GenerateMediaResults( discriminant = CC.DISCRIMINANT_LOCAL ) + + if len( media_results ) > 0: + + if first_media is None and self._focussed_media is not None: first_media = self._focussed_media + + if first_media is not None and first_media.GetFileServiceIdentifiersCDPP().HasLocal(): first_hash = first_media.GetDisplayMedia().GetHash() + else: first_hash = None + + ClientGUICanvas.CanvasFullscreenMediaListBrowser( self.GetTopLevelParent(), self._page_key, self._file_service_identifier, self._predicates, media_results, first_hash ) + + + + def _Filter( self ): + + media_results = self.GenerateMediaResults( discriminant = CC.DISCRIMINANT_LOCAL, selected_media = set( self._selected_media ) ) + + if len( media_results ) > 0: + + try: ClientGUICanvas.CanvasFullscreenMediaListFilter( self.GetTopLevelParent(), self._page_key, self._file_service_identifier, self._predicates, media_results ) + except: wx.MessageBox( traceback.format_exc() ) + + + + def _GetHashes( self ): return HC.IntelligentMassUnion( [ media.GetHashes() for media in self._sorted_media ] ) + + def _GetNumSelected( self ): return sum( [ media.GetNumFiles() for media in self._selected_media ] ) + + def _GetPrettyStatus( self ): + + num_files = sum( [ media.GetNumFiles() for media in self._sorted_media ] ) + + num_selected = self._GetNumSelected() + + pretty_total_size = self._GetPrettyTotalSelectedSize() + + if num_selected == 0: + + if num_files == 1: return '1 file' + else: return HC.ConvertIntToPrettyString( num_files ) + ' files' + + elif num_selected == 1: return '1 of ' + HC.ConvertIntToPrettyString( num_files ) + ' files selected, ' + pretty_total_size + else: return HC.ConvertIntToPrettyString( num_selected ) + ' of ' + HC.ConvertIntToPrettyString( num_files ) + ' files selected, totalling ' + pretty_total_size + + + def _GetPrettyTotalSelectedSize( self ): + + total_size = sum( [ media.GetSize() for media in self._selected_media ] ) + + unknown_size = False in ( media.IsSizeDefinite() for media in self._selected_media ) + + if total_size == 0: + + if unknown_size: return 'unknown size' + else: return HC.ConvertIntToBytes( 0 ) + + else: + + if unknown_size: return HC.ConvertIntToBytes( total_size ) + ' + some unknown size' + else: return HC.ConvertIntToBytes( total_size ) + + + + def _GetSelectedHashes( self, discriminant = None, not_uploaded_to = None ): return HC.IntelligentMassUnion( ( media.GetHashes( discriminant, not_uploaded_to ) for media in self._selected_media ) ) + + def _GetSimilarTo( self ): + + if self._focussed_media is not None: + + hash = self._focussed_media.GetDisplayMedia().GetHash() + + HC.pubsub.pub( 'new_similar_to', self._file_service_identifier, hash ) + + + + def _HitMedia( self, media, ctrl, shift ): + + if media is None: + + if not ctrl and not shift: self._DeselectAll() + + else: + + if ctrl: + + if media.IsSelected(): + + self._DeselectSelect( ( media, ), () ) + + if self._focussed_media == media: self._SetFocussedMedia( None ) + + else: + + self._DeselectSelect( (), ( media, ) ) + + if self._focussed_media is None: self._SetFocussedMedia( media ) + + + self._shift_focussed_media = None + + elif shift and self._focussed_media is not None: + + if self._shift_focussed_media is None: self._shift_focussed_media = self._focussed_media + + start_index = self._sorted_media_to_indices[ self._shift_focussed_media ] + + end_index = self._sorted_media_to_indices[ media ] + + if start_index < end_index: media_i_want_selected_at_the_end = set( self._sorted_media[ start_index : end_index + 1 ] ) + else: media_i_want_selected_at_the_end = set( self._sorted_media[ end_index : start_index + 1 ] ) + + self._DeselectSelect( self._selected_media - media_i_want_selected_at_the_end, media_i_want_selected_at_the_end - self._selected_media ) + + self._SetFocussedMedia( media ) + + else: + + if not media.IsSelected(): self._DeselectSelect( self._selected_media, ( media, ) ) + else: self._PublishSelectionChange() + + self._SetFocussedMedia( media ) + self._shift_focussed_media = None + + + + if self._focussed_media is not None: + + ( x, y ) = self._GetMediaCoordinates( self._focussed_media ) + + ( start_x, start_y ) = self.GetViewStart() + + ( x_unit, y_unit ) = self.GetScrollPixelsPerUnit() + + ( width, height ) = self.GetClientSize() + + if y < start_y * y_unit: + + y_to_scroll_to = y / y_unit + + self.Scroll( -1, y_to_scroll_to ) + + wx.PostEvent( self, wx.ScrollWinEvent( wx.wxEVT_SCROLLWIN_THUMBRELEASE ) ) + + elif y > ( start_y * y_unit ) + height: + + y_to_scroll_to = ( y - height ) / y_unit + + self.Scroll( -1, y_to_scroll_to + 3 ) + + wx.PostEvent( self, wx.ScrollWinEvent( wx.wxEVT_SCROLLWIN_THUMBRELEASE ) ) + + + + + def _ManageRatings( self ): + + if len( self._selected_media ) > 0: + + service_identifiers = wx.GetApp().Read( 'service_identifiers', HC.RATINGS_SERVICES ) + + if len( service_identifiers ) > 0: + + try: + + with ClientGUIDialogs.DialogManageRatings( None, self._selected_media ) as dlg: dlg.ShowModal() + + self.SetFocus() + + except: wx.MessageBox( traceback.format_exc() ) + + + + + def _ManageTags( self ): + + if len( self._selected_media ) > 0: + + try: + + with ClientGUIDialogs.DialogManageTags( None, self._file_service_identifier, self._selected_media ) as dlg: dlg.ShowModal() + + self.SetFocus() + + except: wx.MessageBox( traceback.format_exc() ) + + + + def _ModifyUploaders( self, file_service_identifier ): + + hashes = self._GetSelectedHashes() + + if hashes is not None and len( hashes ) > 0: + + with ClientGUIDialogs.DialogModifyAccounts( self, file_service_identifier, [ HC.AccountIdentifier( hash = hash ) for hash in hashes ] ) as dlg: dlg.ShowModal() + + self.SetFocus() + + + + def _NewThreadDumper( self ): + + # can't do normal _getselectedhashes because we want to keep order! + + args = [ media.GetHashes( CC.DISCRIMINANT_LOCAL ) for media in self._sorted_media if media in self._selected_media ] + + hashes = [ h for h in itertools.chain( *args ) ] + + if len( hashes ) > 0: HC.pubsub.pub( 'new_thread_dumper', hashes ) + + + def _PetitionFiles( self, file_service_identifier ): + + hashes = self._GetSelectedHashes() + + if hashes is not None and len( hashes ) > 0: + + if len( hashes ) == 1: message = 'Enter a reason for this file to be removed from ' + file_service_identifier.GetName() + '.' + else: message = 'Enter a reason for these ' + HC.ConvertIntToPrettyString( len( hashes ) ) + ' files to be removed from ' + file_service_identifier.GetName() + '.' + + with wx.TextEntryDialog( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: wx.GetApp().Write( 'petition_files', file_service_identifier, hashes, dlg.GetValue() ) + + + self.SetFocus() + + + + def _PublishSelectionChange( self ): + + if len( self._selected_media ) == 0: tags_media = self._sorted_media + else: tags_media = self._selected_media + + HC.pubsub.pub( 'new_tags_selection', self._page_key, tags_media ) + HC.pubsub.pub( 'new_page_status', self._page_key, self._GetPrettyStatus() ) + + + def _RatingsFilter( self, service_identifier ): + + if service_identifier is None: + + service_identifier = ClientGUIDialogs.SelectServiceIdentifier( service_types = ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ) + + if service_identifier is None: return + + + media_results = self.GenerateMediaResults( discriminant = CC.DISCRIMINANT_LOCAL, selected_media = set( self._selected_media ), unrated = service_identifier ) + + if len( media_results ) > 0: + + try: ClientGUICanvas.RatingsFilterFrame( self.GetTopLevelParent(), self._page_key, service_identifier, media_results ) + except: wx.MessageBox( traceback.format_exc() ) + + + + def _ReblitMedia( self, media ): pass + + def _ReblitCanvas( self ): pass + + def _RefitCanvas( self ): pass + + def _RemoveMedia( self, singleton_media, collected_media ): + + ClientGUIMixins.ListeningMediaList._RemoveMedia( self, singleton_media, collected_media ) + + self._selected_media.difference_update( singleton_media ) + self._selected_media.difference_update( collected_media ) + + if self._focussed_media not in self._selected_media: self._SetFocussedMedia( None ) + + self._shift_focussed_media = None + + with wx.FrozenWindow( self ): + + self._RefitCanvas() + + self._ReblitCanvas() + + + self._PublishSelectionChange() + + HC.pubsub.pub( 'sorted_media_pulse', self._page_key, self.GenerateMediaResults() ) + + + def _ScrollEnd( self ): + + if len( self._sorted_media ) > 0: self._HitMedia( self._sorted_media[ -1 ], False, False ) + + + def _ScrollHome( self ): + + if len( self._sorted_media ) > 0: self._HitMedia( self._sorted_media[ 0 ], False, False ) + + + def _SelectAll( self ): + + for media in self._sorted_media: media.Select() + + self._selected_media = set( self._sorted_media ) + + with wx.FrozenWindow( self ): self._ReblitCanvas() + + self._PublishSelectionChange() + + + def _SetFocussedMedia( self, media ): + + self._focussed_media = media + + HC.pubsub.pub( 'focus_changed', self._page_key, media ) + + + def _UploadFiles( self, file_service_identifier ): + + hashes = self._GetSelectedHashes( not_uploaded_to = file_service_identifier ) + + if hashes is not None and len( hashes ) > 0: + + try: wx.GetApp().Write( 'add_uploads', file_service_identifier, hashes ) + except Exception as e: wx.MessageBox( unicode( e ) ) + + + + def AddMediaResult( self, page_key, media_result ): + + if page_key == self._page_key: return ClientGUIMixins.ListeningMediaList.AddMediaResult( self, media_result ) + + + def Archive( self, hashes ): + + ClientGUIMixins.ListeningMediaList.Archive( self, hashes ) + + affected_media = self._GetMedia( hashes ) + + if len( affected_media ) > 0: self._ReblitMedia( affected_media ) + + self._PublishSelectionChange() + + if self._focussed_media is not None: self._HitMedia( self._focussed_media, False, False ) + + + def Collect( self, page_key, collect_by ): + + if page_key == self._page_key: + + ClientGUIMixins.ListeningMediaList.Collect( self, collect_by ) + + self._DeselectAll() + + with wx.FrozenWindow( self ): self._RefitCanvas() + + # no refresh needed since the sort call that always comes after will do it + + + + def FileDumped( self, page_key, hash, status ): + + if page_key == self._page_key: + + media = self._GetMedia( { hash } ) + + for m in media: m.Dumped( status ) + + self._ReblitMedia( media ) + + + + def PageHidden( self, page_key ): + + if page_key == self._page_key: HC.pubsub.pub( 'focus_changed', self._page_key, None ) + + + def PageShown( self, page_key ): + + if page_key == self._page_key: + + HC.pubsub.pub( 'focus_changed', self._page_key, self._focussed_media ) + + self._PublishSelectionChange() + + + + def ProcessContentUpdates( self, content_updates ): + + ClientGUIMixins.ListeningMediaList.ProcessContentUpdates( self, content_updates ) + + for content_update in content_updates: + + service_identifier = content_update.GetServiceIdentifier() + + service_type = service_identifier.GetType() + + hashes = content_update.GetHashes() + + affected_media = self._GetMedia( hashes ) + + action = content_update.GetAction() + + if action == CC.CONTENT_UPDATE_DELETE and service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE ) and self._focussed_media in affected_media: self._SetFocussedMedia( None ) + + if len( affected_media ) > 0: self._ReblitMedia( affected_media ) + + + self._PublishSelectionChange() + + if self._focussed_media is not None: self._HitMedia( self._focussed_media, False, False ) + + + def ProcessServiceUpdate( self, update ): + + ClientGUIMixins.ListeningMediaList.ProcessServiceUpdate( self, update ) + + action = update.GetAction() + + service_identifier = update.GetServiceIdentifier() + + if action in ( CC.SERVICE_UPDATE_DELETE_PENDING, CC.SERVICE_UPDATE_RESET ): + + with wx.FrozenWindow( self ): + + self._RefitCanvas() + + self._ReblitCanvas() + + + + self._PublishSelectionChange() + + + def SetFocussedMedia( self, page_key, media ): + + if page_key == self._page_key: + + if media is None: self._SetFocussedMedia( None ) + else: + + try: + + my_media = self._GetMedia( media.GetHashes() )[0] + + self._HitMedia( my_media, False, False ) + + except: pass + + + + + def Sort( self, page_key, sort_by ): + + if page_key == self._page_key: + + ClientGUIMixins.ListeningMediaList.Sort( self, sort_by ) + + with wx.FrozenWindow( self ): self._ReblitCanvas() + + + HC.pubsub.pub( 'sorted_media_pulse', self._page_key, self.GenerateMediaResults() ) + + +class MediaPanelNoQuery( MediaPanel ): + + def __init__( self, parent, page_key, file_service_identifier ): MediaPanel.__init__( self, parent, page_key, file_service_identifier, [], CC.FileQueryResult( file_service_identifier, [], [] ) ) + + def _GetPrettyStatus( self ): return 'No query' + + def GetSortedMedia( self ): return None + +class MediaPanelLoading( MediaPanel ): + + def __init__( self, parent, page_key, file_service_identifier ): MediaPanel.__init__( self, parent, page_key, file_service_identifier, [], CC.FileQueryResult( file_service_identifier, [], [] ) ) + + def _GetPrettyStatus( self ): return u'Loading\u2026' + + def GetSortedMedia( self ): return None + +class MediaPanelThumbnails( MediaPanel ): + + def __init__( self, parent, page_key, file_service_identifier, predicates, file_query_result ): + + MediaPanel.__init__( self, parent, page_key, file_service_identifier, predicates, file_query_result ) + + self._num_columns = 1 + self._num_rows_in_client_height = 0 + self._last_visible_row = 0 + + self._timer_waterfall = wx.Timer( self, ID_TIMER_WATERFALL ) + self._thumbnails_to_waterfall = [] + + self._timer_animation = wx.Timer( self, ID_TIMER_ANIMATION ) + self._thumbnails_being_faded_in = {} + + self._drawn_up_to = 0 + + self._thumbnail_span_dimensions = CC.AddPaddingToDimensions( wx.GetApp().Read( 'options' )[ 'thumbnail_dimensions' ], ( CC.THUMBNAIL_BORDER + CC.THUMBNAIL_MARGIN ) * 2 ) + + ( thumbnail_span_width, thumbnail_span_height ) = self._thumbnail_span_dimensions + + self.SetScrollRate( 0, thumbnail_span_height ) + + self._canvas_bmp = wx.EmptyBitmap( 0, 0 ) + + self.Bind( wx.EVT_SCROLLWIN, self.EventScroll ) + self.Bind( wx.EVT_LEFT_DOWN, self.EventSelection ) + self.Bind( wx.EVT_RIGHT_UP, self.EventShowMenu ) + self.Bind( wx.EVT_LEFT_DCLICK, self.EventMouseFullScreen ) + self.Bind( wx.EVT_MIDDLE_DOWN, self.EventMouseFullScreen ) + self.Bind( wx.EVT_PAINT, self.EventPaint ) + self.Bind( wx.EVT_SIZE, self.EventResize ) + self.Bind( wx.EVT_TIMER, self.EventTimerWaterfall, id = ID_TIMER_WATERFALL ) + self.Bind( wx.EVT_TIMER, self.EventTimerAnimation, id = ID_TIMER_ANIMATION ) + + self.Bind( wx.EVT_KEY_DOWN, self.EventKeyDown ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.RefreshAcceleratorTable() + + HC.pubsub.sub( self, 'NewThumbnails', 'new_thumbnails' ) + HC.pubsub.sub( self, 'ThumbnailsResized', 'thumbnail_resize' ) + HC.pubsub.sub( self, 'RefreshAcceleratorTable', 'options_updated' ) + + + def _BlitThumbnail( self, thumbnail ): + + ( x, y ) = self._GetMediaCoordinates( thumbnail ) + + if ( x, y ) != ( -1, -1 ): + + self._thumbnails_being_faded_in[ ( thumbnail.GetBmp(), x, y ) ] = 0.0 + + if not self._timer_animation.IsRunning(): self._timer_animation.Start( 0, wx.TIMER_ONE_SHOT ) + + + + def _CalculateLastVisibleRow( self ): + + ( x, y ) = self.GetViewStart() + + ( xUnit, yUnit ) = self.GetScrollPixelsPerUnit() + + y_offset = y * yUnit + + ( my_client_width, my_client_height ) = self.GetClientSize() + + y_end = y_offset + my_client_height + + ( thumbnail_span_width, thumbnail_span_height ) = self._thumbnail_span_dimensions + + total_thumbs_to_end = ( y_end / thumbnail_span_height ) + 1 + + return total_thumbs_to_end + + + def _ExportFiles( self ): + + job_key = os.urandom( 32 ) + + cancel_event = threading.Event() + + with ClientGUIDialogs.DialogProgress( self, job_key, cancel_event ) as dlg: + + wx.GetApp().Write( 'export_files', job_key, self._GetSelectedHashes( CC.DISCRIMINANT_LOCAL ), cancel_event ) + + dlg.ShowModal() + + + + def _GenerateMediaCollection( self, media_results ): return ThumbnailMediaCollection( self._file_service_identifier, self._predicates, media_results ) + + def _GenerateMediaSingleton( self, media_result ): return ThumbnailMediaSingleton( self._file_service_identifier, media_result ) + + def _GetMediaCoordinates( self, media ): + + try: index = self._sorted_media_to_indices[ media ] + except: return ( -1, -1 ) + + row = index / self._num_columns + column = index % self._num_columns + + ( thumbnail_span_width, thumbnail_span_height ) = self._thumbnail_span_dimensions + + ( x, y ) = ( column * thumbnail_span_width + CC.THUMBNAIL_MARGIN, row * thumbnail_span_height + CC.THUMBNAIL_MARGIN ) + + return ( x, y ) + + + def _GetScrolledDC( self ): + + cdc = wx.ClientDC( self ) + + self.DoPrepareDC( cdc ) # because this is a scrolled window + + return wx.BufferedDC( cdc, self._canvas_bmp ) + + + def _GetThumbnailUnderMouse( self, mouse_event ): + + ( xUnit, yUnit ) = self.GetScrollPixelsPerUnit() + + ( x_scroll, y_scroll ) = self.GetViewStart() + + y_offset = y_scroll * yUnit + + x = mouse_event.GetX() + y = mouse_event.GetY() + y_offset + + ( t_span_x, t_span_y ) = self._thumbnail_span_dimensions + + x_mod = x % t_span_x + y_mod = y % t_span_y + + if x_mod <= CC.THUMBNAIL_MARGIN or y_mod <= CC.THUMBNAIL_MARGIN or x_mod > t_span_x - CC.THUMBNAIL_MARGIN or y_mod > t_span_y - CC.THUMBNAIL_MARGIN: return None + + column_index = ( x / t_span_x ) + row_index = ( y / t_span_y ) + + if column_index >= self._num_columns: return None + + thumbnail_index = self._num_columns * row_index + column_index + + if thumbnail_index >= len( self._sorted_media ): return None + + return self._sorted_media[ thumbnail_index ] + + + def _MoveFocussedThumbnail( self, rows, columns, shift ): + + if self._focussed_media is not None: + + current_position = self._sorted_media_to_indices[ self._focussed_media ] + + new_position = current_position + columns + ( self._num_columns * rows ) + + if new_position < 0: new_position = 0 + elif new_position > len( self._sorted_media ) - 1: new_position = len( self._sorted_media ) - 1 + + self._HitMedia( self._sorted_media[ new_position ], False, shift ) + + + + def _ReblitMedia( self, thumbnails ): [ self._BlitThumbnail( t ) for t in thumbnails if t.IsLoaded() ] + + def _ReblitCanvas( self ): + + ( canvas_width, canvas_height ) = self._canvas_bmp.GetSize() + + ( thumbnail_width, thumbnail_height ) = self._thumbnail_span_dimensions + + to_row = canvas_height / thumbnail_height + + if to_row > 0: + + dc = self._GetScrolledDC() + + from_row = 0 + + num_rows_to_draw = ( to_row - from_row ) + 1 # +1 because caller assumes it is inclusive + + ( thumbnail_span_width, thumbnail_span_height ) = self._thumbnail_span_dimensions + + ( my_width, my_height ) = self._canvas_bmp.GetSize() + + dc.SetBrush( wx.Brush( wx.WHITE ) ) + + dc.SetPen( wx.TRANSPARENT_PEN ) + + begin_white_y = ( from_row ) * ( thumbnail_span_height + CC.THUMBNAIL_MARGIN ) + height_white_y = num_rows_to_draw * ( thumbnail_span_height + CC.THUMBNAIL_MARGIN ) + + dc.DrawRectangle( 0, begin_white_y, my_width, height_white_y ) # this incremental clear is so we don't have to do a potentially _very_ expensive (0.4s or more!) dc.Clear() + + thumbnails_to_render_later = [] + + first_index = from_row * self._num_columns + + last_index = first_index + ( num_rows_to_draw * self._num_columns ) + + for ( sub_index, thumbnail ) in enumerate( self._sorted_media[ first_index : last_index ] ): + + current_row = from_row + ( sub_index / self._num_columns ) + + current_col = sub_index % self._num_columns + + if thumbnail.IsLoaded(): dc.DrawBitmap( thumbnail.GetBmp(), current_col * thumbnail_span_width + CC.THUMBNAIL_MARGIN, current_row * thumbnail_span_height + CC.THUMBNAIL_MARGIN ) + else: thumbnails_to_render_later.append( thumbnail ) + + + self._last_visible_row = to_row + + self._thumbnails_to_waterfall = thumbnails_to_render_later + self._thumbnails_being_faded_in = {} + + random.shuffle( self._thumbnails_to_waterfall ) + + if not self._timer_waterfall.IsRunning(): self._timer_waterfall.Start( 20, wx.TIMER_ONE_SHOT ) + + + + def _RefitCanvas( self ): + + ( client_width, client_height ) = self.GetClientSize() + + if client_width > 0 and client_height > 0: + + ( thumbnail_width, thumbnail_height ) = self._thumbnail_span_dimensions + + num_media = len( self._sorted_media ) + + num_rows = num_media / self._num_columns + + if num_media % self._num_columns > 0: num_rows += 1 + + last_visible_row = min( int( self._CalculateLastVisibleRow() * 1.5 ), num_rows ) + + canvas_width = client_width + + canvas_height = max( last_visible_row * thumbnail_height, client_height ) + + if ( canvas_width, canvas_height ) != self._canvas_bmp.GetSize(): self._canvas_bmp = wx.EmptyBitmap( canvas_width, canvas_height, 24 ) + + virtual_width = client_width + + virtual_height = max( num_rows * thumbnail_height, client_height ) + + if ( virtual_width, virtual_height ) != self.GetVirtualSize(): self.SetVirtualSize( ( virtual_width, virtual_height ) ) + + + + def AddMediaResult( self, page_key, media_result ): + + if page_key == self._page_key: + + num_media = len( self._sorted_media ) + + old_num_rows = num_media / self._num_columns + + if num_media % self._num_columns > 0: old_num_rows += 1 + + media = MediaPanel.AddMediaResult( self, page_key, media_result ) + + num_media = len( self._sorted_media ) + + num_rows = num_media / self._num_columns + + if num_media % self._num_columns > 0: num_rows += 1 + + if old_num_rows != num_rows: + + with wx.FrozenWindow( self ): + + self._RefitCanvas() + + self._ReblitCanvas() + + + + if num_rows < self._last_visible_row: self._BlitThumbnail( media ) + + self._PublishSelectionChange() + + + + def EventKeyDown( self, event ): + + # accelerator tables can't handle escape key in windows, gg + + if event.GetKeyCode() == wx.WXK_ESCAPE: self._DeselectAll() + else: event.Skip() + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command == 'archive': self._Archive() + elif command == 'copy_files': + with wx.BusyCursor(): wx.GetApp().Write( 'copy_files', self._GetSelectedHashes( CC.DISCRIMINANT_LOCAL ) ) + elif command == 'copy_hash': self._CopyHashToClipboard() + elif command == 'copy_hashes': self._CopyHashesToClipboard() + elif command == 'copy_local_url': self._CopyLocalUrlToClipboard() + elif command == 'copy_path': self._CopyPathToClipboard() + elif command == 'ctrl-space': + + if self._focussed_media is not None: self._HitMedia( self._focussed_media, True, False ) + + elif command == 'custom_filter': self._CustomFilter() + elif command == 'delete': self._Delete( data ) + elif command == 'deselect': self._DeselectAll() + elif command == 'download': wx.GetApp().Write( 'add_downloads', data, self._GetSelectedHashes( CC.DISCRIMINANT_NOT_LOCAL ) ) + elif command == 'export': self._ExportFiles() + elif command == 'filter': self._Filter() + elif command == 'fullscreen': self._FullScreen() + elif command == 'get_similar_to': self._GetSimilarTo() + elif command == 'manage_ratings': self._ManageRatings() + elif command == 'manage_tags': self._ManageTags() + elif command == 'modify_account': self._ModifyUploaders( data ) + elif command == 'new_thread_dumper': self._NewThreadDumper() + elif command == 'petition': self._PetitionFiles( data ) + elif command == 'ratings_filter': self._RatingsFilter( data ) + elif command == 'scroll_end': self._ScrollEnd() + elif command == 'scroll_home': self._ScrollHome() + elif command == 'select_all': self._SelectAll() + elif command == 'upload': self._UploadFiles( data ) + elif command == 'key_up': self._MoveFocussedThumbnail( -1, 0, False ) + elif command == 'key_down': self._MoveFocussedThumbnail( 1, 0, False ) + elif command == 'key_left': self._MoveFocussedThumbnail( 0, -1, False ) + elif command == 'key_right': self._MoveFocussedThumbnail( 0, 1, False ) + elif command == 'key_shift_up': self._MoveFocussedThumbnail( -1, 0, True ) + elif command == 'key_shift_down': self._MoveFocussedThumbnail( 1, 0, True ) + elif command == 'key_shift_left': self._MoveFocussedThumbnail( 0, -1, True ) + elif command == 'key_shift_right': self._MoveFocussedThumbnail( 0, 1, True ) + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + + + + + def EventMouseFullScreen( self, event ): + + t = self._GetThumbnailUnderMouse( event ) + + if t is not None: + + if t.GetFileServiceIdentifiersCDPP().HasLocal(): self._FullScreen( t ) + elif self._file_service_identifier != CC.NULL_SERVICE_IDENTIFIER: wx.GetApp().Write( 'add_downloads', self._file_service_identifier, t.GetHashes() ) + + + + def EventPaint( self, event ): wx.BufferedPaintDC( self, self._canvas_bmp, wx.BUFFER_VIRTUAL_AREA ) + + def EventResize( self, event ): + + old_numcols = self._num_columns + old_numclientrows = self._num_rows_in_client_height + + ( client_width, client_height ) = self.GetClientSize() + + ( thumbnail_width, thumbnail_height ) = self._thumbnail_span_dimensions + + self._num_columns = client_width / thumbnail_width + + if self._num_columns == 0: self._num_columns = 1 + + num_media = len( self._sorted_media ) + + num_rows = num_media / self._num_columns + + if num_media % self._num_columns > 0: num_rows += 1 + + if self._num_columns != old_numcols: + + with wx.FrozenWindow( self ): + + self._RefitCanvas() + + self._ReblitCanvas() + + + else: + + # the client is the actual window, remember, not the scrollable virtual bmp + + self._num_rows_in_client_height = client_height / thumbnail_height + + if client_height % thumbnail_height > 0: self._num_rows_in_client_height += 1 + + if self._num_rows_in_client_height > old_numclientrows: + + with wx.FrozenWindow( self ): self._ReblitCanvas() + + + + + def EventSelection( self, event ): + + self._HitMedia( self._GetThumbnailUnderMouse( event ), event.CmdDown(), event.ShiftDown() ) + + event.Skip() + + + def EventShowMenu( self, event ): + + thumbnail = self._GetThumbnailUnderMouse( event ) + + menu = wx.Menu() + + if thumbnail is None: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select_all' ), 'select all' ) + else: + + self._HitMedia( thumbnail, event.CmdDown(), event.ShiftDown() ) + + if self._focussed_media is not None: + + # variables + + num_selected = self._GetNumSelected() + + multiple_selected = num_selected > 1 + + services = wx.GetApp().Read( 'services' ) + + tag_repositories = [ service for service in services if service.GetServiceIdentifier().GetType() == HC.TAG_REPOSITORY ] + + file_repositories = [ service for service in services if service.GetServiceIdentifier().GetType() == HC.FILE_REPOSITORY ] + + local_ratings_services = [ service for service in services if service.GetServiceIdentifier().GetType() in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ) ] + + i_can_post_ratings = len( local_ratings_services ) > 0 + + downloadable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.GET_DATA ) } + uploadable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.POST_DATA ) } + petition_resolvable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.RESOLVE_PETITIONS ) } + petitionable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.POST_PETITIONS ) } - petition_resolvable_file_service_identifiers + user_manageable_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.MANAGE_USERS ) } + admin_file_service_identifiers = { repository.GetServiceIdentifier() for repository in file_repositories if repository.GetAccount().HasPermission( HC.GENERAL_ADMIN ) } + + all_service_identifiers = [ media.GetFileServiceIdentifiersCDPP() for media in self._selected_media ] + + selection_has_local = True in ( s_is.HasLocal() for s_is in all_service_identifiers ) + selection_has_inbox = True in ( media.HasInbox() for media in self._selected_media ) + + if multiple_selected: + + uploaded_phrase = 'all uploaded to' + pending_phrase = 'all pending to' + petitioned_phrase = 'all petitioned from' + deleted_phrase = 'all deleted from' + + download_phrase = 'download all possible from' + upload_phrase = 'upload all possible to' + petition_phrase = 'petition all possible for removal from' + remote_delete_phrase = 'delete all possible from' + modify_account_phrase = 'modify the accounts that uploaded these to' + + manage_tags_phrase = 'manage tags for all' + manage_ratings_phrase = 'manage ratings for all' + + archive_phrase = 'archive all' + local_delete_phrase = 'delete all' + dump_phrase = 'dump all' + export_phrase = 'export all' + copy_phrase = 'files' + + else: + + uploaded_phrase = 'uploaded to' + pending_phrase = 'pending to' + petitioned_phrase = 'petitioned from' + deleted_phrase = 'deleted from' + + download_phrase = 'download from' + upload_phrase = 'upload to' + petition_phrase = 'petition for removal from' + remote_delete_phrase = 'delete from' + modify_account_phrase = 'modify the account that uploaded this to' + + manage_tags_phrase = 'manage tags' + manage_ratings_phrase = 'manage ratings' + + archive_phrase = 'archive' + local_delete_phrase = 'delete' + dump_phrase = 'dump' + export_phrase = 'export' + copy_phrase = 'file' + + + # info about the files + + all_current_file_service_identifiers = [ service_identifiers.GetCurrentRemote() for service_identifiers in all_service_identifiers ] + + current_file_service_identifiers = HC.IntelligentMassIntersect( all_current_file_service_identifiers ) + + some_current_file_service_identifiers = HC.IntelligentMassUnion( all_current_file_service_identifiers ) - current_file_service_identifiers + + all_pending_file_service_identifiers = [ service_identifiers.GetPendingRemote() for service_identifiers in all_service_identifiers ] + + pending_file_service_identifiers = HC.IntelligentMassIntersect( all_pending_file_service_identifiers ) + + some_pending_file_service_identifiers = HC.IntelligentMassUnion( all_pending_file_service_identifiers ) - pending_file_service_identifiers + + all_petitioned_file_service_identifiers = [ service_identifiers.GetPetitionedRemote() for service_identifiers in all_service_identifiers ] + + petitioned_file_service_identifiers = HC.IntelligentMassIntersect( all_petitioned_file_service_identifiers ) + + some_petitioned_file_service_identifiers = HC.IntelligentMassUnion( all_petitioned_file_service_identifiers ) - petitioned_file_service_identifiers + + all_deleted_file_service_identifiers = [ service_identifiers.GetDeletedRemote() for service_identifiers in all_service_identifiers ] + + deleted_file_service_identifiers = HC.IntelligentMassIntersect( all_deleted_file_service_identifiers ) + + some_deleted_file_service_identifiers = HC.IntelligentMassUnion( all_deleted_file_service_identifiers ) - deleted_file_service_identifiers + + # valid commands for the files + + selection_uploadable_file_service_identifiers = set() + + for s_is in all_service_identifiers: + + # we can upload (set pending) to a repo_id when we have permission, a file is local, not current, not pending, and either ( not deleted or admin ) + + if s_is.HasLocal(): selection_uploadable_file_service_identifiers.update( uploadable_file_service_identifiers - s_is.GetCurrentRemote() - s_is.GetPendingRemote() - ( s_is.GetDeletedRemote() - admin_file_service_identifiers ) ) + + + selection_downloadable_file_service_identifiers = set() + + for s_is in all_service_identifiers: + + # we can download (set pending to local) when we have permission, a file is not local and not already downloading and current + + if not s_is.HasLocal() and not s_is.HasDownloading(): selection_downloadable_file_service_identifiers.update( downloadable_file_service_identifiers & s_is.GetCurrentRemote() ) + + + selection_petitionable_file_service_identifiers = set() + + for s_is in all_service_identifiers: + + # we can petition when we have permission and a file is current + # we can re-petition an already petitioned file + + selection_petitionable_file_service_identifiers.update( petitionable_file_service_identifiers & s_is.GetCurrentRemote() ) + + + selection_deletable_file_service_identifiers = set() + + for s_is in all_service_identifiers: + + # we can delete remote when we have permission and a file is current and it is not already petitioned + + selection_deletable_file_service_identifiers.update( ( petition_resolvable_file_service_identifiers & s_is.GetCurrentRemote() ) - s_is.GetPetitionedRemote() ) + + + selection_modifyable_file_service_identifiers = set() + + for s_is in all_service_identifiers: + + # we can modify users when we have permission and the file is current or deleted + + selection_modifyable_file_service_identifiers.update( user_manageable_file_service_identifiers & ( s_is.GetCurrentRemote() | s_is.GetDeletedRemote() ) ) + + + # do the actual menu + + if multiple_selected: menu.Append( CC.ID_NULL, HC.ConvertIntToPrettyString( num_selected ) + ' files, ' + self._GetPrettyTotalSelectedSize() ) + else: + + menu.Append( CC.ID_NULL, thumbnail.GetPrettyInfo() ) + menu.Append( CC.ID_NULL, thumbnail.GetPrettyAge() ) + + + if len( some_current_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, some_current_file_service_identifiers, 'some uploaded to', CC.ID_NULL ) + + if len( current_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, current_file_service_identifiers, uploaded_phrase, CC.ID_NULL ) + + if len( some_pending_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, some_pending_file_service_identifiers, 'some pending to', CC.ID_NULL ) + + if len( pending_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, pending_file_service_identifiers, pending_phrase, CC.ID_NULL ) + + if len( some_petitioned_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, some_petitioned_file_service_identifiers, 'some petitioned from', CC.ID_NULL ) + + if len( petitioned_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, petitioned_file_service_identifiers, petitioned_phrase, CC.ID_NULL ) + + if len( some_deleted_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, some_deleted_file_service_identifiers, 'some deleted from', CC.ID_NULL ) + + if len( deleted_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, deleted_file_service_identifiers, deleted_phrase, CC.ID_NULL ) + + menu.AppendSeparator() + + if len( selection_downloadable_file_service_identifiers ) > 0 or len( selection_uploadable_file_service_identifiers ) > 0 or len( selection_petitionable_file_service_identifiers ) > 0 or len( selection_deletable_file_service_identifiers ) > 0 or len( selection_modifyable_file_service_identifiers ) > 0: + + if len( selection_downloadable_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, selection_downloadable_file_service_identifiers, download_phrase, 'download' ) + + if len( selection_uploadable_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, selection_uploadable_file_service_identifiers, upload_phrase, 'upload' ) + + if len( selection_petitionable_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, selection_petitionable_file_service_identifiers, petition_phrase, 'petition' ) + + if len( selection_deletable_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, selection_deletable_file_service_identifiers, remote_delete_phrase, 'delete' ) + + if len( selection_modifyable_file_service_identifiers ) > 0: AddFileServiceIdentifiersToMenu( menu, selection_modifyable_file_service_identifiers, modify_account_phrase, 'modify_account' ) + + menu.AppendSeparator() + + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_tags' ), manage_tags_phrase ) + + if i_can_post_ratings: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'manage_ratings' ), manage_ratings_phrase ) + + menu.AppendSeparator() + + if selection_has_local: + + if multiple_selected: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'filter' ), 'filter' ) + + if i_can_post_ratings: + + ratings_filter_menu = wx.Menu() + + for service in local_ratings_services: ratings_filter_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'ratings_filter', service.GetServiceIdentifier() ), service.GetServiceIdentifier().GetName() ) + + menu.AppendMenu( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'ratings_filter' ), 'ratings filter', ratings_filter_menu ) + + + if multiple_selected: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'custom_filter' ), 'custom filter' ) + + if multiple_selected or i_can_post_ratings: menu.AppendSeparator() + + if selection_has_inbox: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'archive' ), archive_phrase ) + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ), local_delete_phrase ) + + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'export' ), export_phrase ) + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'new_thread_dumper' ), dump_phrase ) + + menu.AppendSeparator() + + copy_menu = wx.Menu() + + copy_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_files' ), copy_phrase ) + copy_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_hash' ) , 'hash' ) + if multiple_selected: copy_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_hashes' ) , 'hashes' ) + copy_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_path' ) , 'path' ) + copy_menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_local_url' ) , 'local url' ) + + menu.AppendMenu( CC.ID_NULL, 'copy', copy_menu ) + + if self._focussed_media.HasImages(): + + menu.AppendSeparator() + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'get_similar_to' ) , 'find very similar images' ) + + + + + self.PopupMenu( menu ) + + menu.Destroy() + + event.Skip() + + + def EventScroll( self, event ): + + num_media = len( self._sorted_media ) + + num_rows = num_media / self._num_columns + + if num_media % self._num_columns > 0: num_rows += 1 + + current_last_visible_row = min( int( self._CalculateLastVisibleRow() * 1.25 ), num_rows ) + + if current_last_visible_row > num_rows: current_last_visible_row = num_rows + + if current_last_visible_row > self._last_visible_row: + + with wx.FrozenWindow( self ): + + self._RefitCanvas() + + self._ReblitCanvas() + + + + event.Skip() + + + def EventTimerAnimation( self, event ): + + dc = self._GetScrolledDC() + + all_info = self._thumbnails_being_faded_in.items() + + for ( key, current_alpha ) in all_info: + + ( bmp, x, y ) = key + + current_alpha += 0.25 + + if current_alpha < 1.0: + + image = bmp.ConvertToImage() + + image.InitAlpha() + + image = image.AdjustChannels( 1, 1, 1, current_alpha ) + + bmp_to_use = wx.BitmapFromImage( image, 32 ) + + self._thumbnails_being_faded_in[ key ] = current_alpha + + else: + + bmp_to_use = bmp + + del self._thumbnails_being_faded_in[ key ] + + + dc.DrawBitmap( bmp_to_use, x, y, True ) + + + + + if len( self._thumbnails_being_faded_in ) > 0: self._timer_animation.Start( 32, wx.TIMER_ONE_SHOT ) + + + def EventTimerWaterfall( self, event ): + + how_many = random.randint( 6, 12 ) + + for thumbnail in self._thumbnails_to_waterfall[-how_many:]: self._BlitThumbnail( thumbnail ) + + self._thumbnails_to_waterfall = self._thumbnails_to_waterfall[:-how_many] + + if len( self._thumbnails_to_waterfall ) > 0: self._timer_waterfall.Start( 5, wx.TIMER_ONE_SHOT ) + + + def NewThumbnails( self, hashes ): + + affected_thumbnails = self._GetMedia( hashes ) + + if len( affected_thumbnails ) > 0: + + for t in affected_thumbnails: t.ReloadFromDB() + + self._ReblitMedia( affected_thumbnails ) + + + + def RefreshAcceleratorTable( self ): + + entries = [ + ( wx.ACCEL_NORMAL, wx.WXK_HOME, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_home' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_HOME, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_home' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_END, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_end' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_END, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'scroll_end' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete', CC.LOCAL_FILE_SERVICE_IDENTIFIER ) ), + ( wx.ACCEL_NORMAL, wx.WXK_RETURN, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fullscreen' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_ENTER, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'fullscreen' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_UP, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_up' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_UP, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_up' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_DOWN, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_down' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DOWN, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_down' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_LEFT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_left' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_LEFT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_left' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_RIGHT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_right' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_RIGHT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_right' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_UP, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_up' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_UP, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_up' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_DOWN, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_down' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_DOWN, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_down' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_LEFT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_left' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_LEFT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_left' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_RIGHT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_right' ) ), + ( wx.ACCEL_SHIFT, wx.WXK_NUMPAD_RIGHT, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'key_shift_right' ) ), + ( wx.ACCEL_CMD, ord( 'A' ), CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select_all' ) ), + ( wx.ACCEL_CTRL, ord( 'c' ), CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'copy_files' ) ), + ( wx.ACCEL_CTRL, wx.WXK_SPACE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'ctrl-space' ) ) + ] + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): entries.extend( [ ( modifier, key, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) for ( key, action ) in key_dict.items() ] ) + + self.SetAcceleratorTable( wx.AcceleratorTable( entries ) ) + + + def Sort( self, page_key, sort_by ): + + MediaPanel.Sort( self, page_key, sort_by ) + + for thumbnail in self._collected_media: + + thumbnail.ReloadFromDB() + + + self._ReblitMedia( self._collected_media ) + + + def ThumbnailsResized( self ): + + self._thumbnail_span_dimensions = CC.AddPaddingToDimensions( wx.GetApp().Read( 'options' )[ 'thumbnail_dimensions' ], ( CC.THUMBNAIL_BORDER + CC.THUMBNAIL_MARGIN ) * 2 ) + + ( thumbnail_span_width, thumbnail_span_height ) = self._thumbnail_span_dimensions + + ( client_width, client_height ) = self.GetClientSize() + + self._num_columns = client_width / thumbnail_span_width + + if self._num_columns == 0: self._num_columns = 1 + + self.SetScrollRate( 0, thumbnail_span_height ) + + for t in self._sorted_media: t.ReloadFromDBLater() + + with wx.FrozenWindow( self ): + + self._RefitCanvas() + + self._ReblitCanvas() + + + +class Selectable(): + + def __init__( self ): self._selected = False + + def Deselect( self ): self._selected = False + + def IsLoaded( self ): return False + + def IsSelected( self ): return self._selected + + def Select( self ): self._selected = True + +# keep this around, just for reference +class ThumbGridSizer( wx.PySizer ): + + def __init__( self, parent_container ): + + wx.PySizer.__init__( self ) + + self._parent_container = parent_container + + self._thumbnails = [] + + self._options = wx.GetApp().Read( 'options' ) + + + def _GetThumbnailDimensions( self ): return CC.AddPaddingToDimensions( self._options[ 'thumbnail_dimensions' ], ( CC.THUMBNAIL_MARGIN + CC.THUMBNAIL_BORDER ) * 2 ) + + def AddThumbnail( self, thumbnail ): self._thumbnails.append( thumbnail ) + + def CalcMin( self ): + + ( width, height ) = self._parent_container.GetClientSize() + + ( thumbnail_width, thumbnail_height ) = self._GetThumbnailDimensions() + + self._num_columns = width / thumbnail_width + + if self._num_columns == 0: self._num_columns = 1 + + num_items = len( self._parent_container ) + + my_min_height = num_items / self._num_columns + + if num_items % self._num_columns > 0: my_min_height += 1 + + my_min_height *= thumbnail_height + + return wx.Size( width, my_min_height ) + + + def RecalcSizes( self ): + + w = self.GetContainingWindow() + + ( xUnit, yUnit ) = w.GetScrollPixelsPerUnit() + + ( x, y ) = w.GetViewStart() + + y_offset = y * yUnit + + ( thumbnail_width, thumbnail_height ) = self._GetThumbnailDimensions() + + for ( index, thumbnail ) in enumerate( self.GetChildren() ): + + current_col = index % self._num_columns + current_row = index / self._num_columns + + thumbnail.SetDimension( ( current_col * thumbnail_width, current_row * thumbnail_height - y_offset ), ( thumbnail_width, thumbnail_height ) ) + + + +class Thumbnail( Selectable ): + + def __init__( self, file_service_identifier ): + + Selectable.__init__( self ) + + self._dump_status = CC.DUMPER_NOT_DUMPED + self._hydrus_bmp = None + self._file_service_identifier = file_service_identifier + + self._my_dimensions = CC.AddPaddingToDimensions( wx.GetApp().Read( 'options' )[ 'thumbnail_dimensions' ], CC.THUMBNAIL_BORDER * 2 ) + + + def _LoadFromDB( self ): + + display_hash = self.GetDisplayMedia().GetHash() + + mime = self.GetDisplayMedia().GetMime() + + if mime in HC.IMAGES: + + my_file_service_identifiers = self.GetFileServiceIdentifiersCDPP().GetCurrent() + + if CC.LOCAL_FILE_SERVICE_IDENTIFIER in my_file_service_identifiers: thumbnail_file_service_identifier = CC.LOCAL_FILE_SERVICE_IDENTIFIER + elif len( my_file_service_identifiers ) > 0: thumbnail_file_service_identifier = list( my_file_service_identifiers )[0] + else: thumbnail_file_service_identifier = self._file_service_identifier + + self._hydrus_bmp = wx.GetApp().GetThumbnailCache().GetThumbnail( thumbnail_file_service_identifier, display_hash ) + + elif mime == HC.APPLICATION_FLASH: self._hydrus_bmp = wx.GetApp().GetThumbnailCache().GetFlashThumbnail() + elif mime == HC.VIDEO_FLV: self._hydrus_bmp = wx.GetApp().GetThumbnailCache().GetFLVThumbnail() + else: self._hydrus_bmp = wx.GetApp().GetThumbnailCache().GetNotFoundThumbnail() + + + def GetBmp( self ): + + inbox = self.HasInbox() + + local = self.GetFileServiceIdentifiersCDPP().HasLocal() + + ( creators, series, titles, volumes, chapters, pages ) = self.GetTags().GetCSTVCP() + + if self._hydrus_bmp is None: self._LoadFromDB() + + ( width, height ) = self._my_dimensions + + bmp = wx.EmptyBitmap( width, height, 24 ) + + dc = wx.MemoryDC( bmp ) + + if not local: + + if self._selected: dc.SetBackground( wx.Brush( wx.Colour( 64, 64, 72 ) ) ) # Payne's Gray + else: dc.SetBackground( wx.Brush( wx.Colour( 32, 32, 36 ) ) ) # 50% Payne's Gray + + else: + + if self._selected: dc.SetBackground( wx.Brush( CC.COLOUR_SELECTED ) ) + else: dc.SetBackground( wx.Brush( wx.WHITE ) ) + + + dc.Clear() + + ( thumb_width, thumb_height ) = self._hydrus_bmp.GetSize() + + x_offset = ( width - thumb_width ) / 2 + + y_offset = ( height - thumb_height ) / 2 + + hydrus_bmp = self._hydrus_bmp.CreateWxBmp() + + dc.DrawBitmap( hydrus_bmp, x_offset, y_offset ) + + hydrus_bmp.Destroy() + + collections_string = '' + + if len( volumes ) > 0: + + if len( volumes ) == 1: + + ( volume, ) = volumes + + collections_string = 'v' + str( volume ) + + else: collections_string = 'v' + str( min( volumes ) ) + '-' + str( max( volumes ) ) + + + if len( chapters ) > 0: + + if len( chapters ) == 1: + + ( chapter, ) = chapters + + collections_string_append = 'c' + str( chapter ) + + else: collections_string_append = 'c' + str( min( chapters ) ) + '-' + str( max( chapters ) ) + + if len( collections_string ) > 0: collections_string += '-' + collections_string_append + else: collections_string = collections_string_append + + + if len( pages ) > 0: + + if len( pages ) == 1: + + ( page, ) = pages + + collections_string_append = 'p' + str( page ) + + else: collections_string_append = 'p' + str( min( pages ) ) + '-' + str( max( pages ) ) + + if len( collections_string ) > 0: collections_string += '-' + collections_string_append + else: collections_string = collections_string_append + + + if len( collections_string ) > 0: + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( text_x, text_y ) = dc.GetTextExtent( collections_string ) + + top_left_x = width - text_x - CC.THUMBNAIL_BORDER + top_left_y = height - text_y - CC.THUMBNAIL_BORDER + + dc.SetBrush( wx.Brush( CC.COLOUR_UNSELECTED ) ) + + dc.SetTextForeground( CC.COLOUR_SELECTED_DARK ) + + dc.SetPen( wx.TRANSPARENT_PEN ) + + dc.DrawRectangle( top_left_x - 1, top_left_y - 1, text_x + 2, text_y + 2 ) + + dc.DrawText( collections_string, top_left_x, top_left_y ) + + + if len( creators ) > 0: upper_info_string = ', '.join( creators ) + elif len( series ) > 0: upper_info_string = ', '.join( series ) + elif len( titles ) > 0: upper_info_string = ', '.join( titles ) + else: upper_info_string = '' + + if len( upper_info_string ) > 0: + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( text_x, text_y ) = dc.GetTextExtent( upper_info_string ) + + top_left_x = int( ( width - text_x ) / 2 ) + top_left_y = CC.THUMBNAIL_BORDER + + dc.SetBrush( wx.Brush( CC.COLOUR_UNSELECTED ) ) + + dc.SetTextForeground( CC.COLOUR_SELECTED_DARK ) + + dc.SetPen( wx.TRANSPARENT_PEN ) + + dc.DrawRectangle( 0, top_left_y - 1, width, text_y + 2 ) + + dc.DrawText( upper_info_string, top_left_x, top_left_y ) + + + dc.SetBrush( wx.TRANSPARENT_BRUSH ) + + if not local: + + if self._selected: colour = wx.Colour( 227, 66, 52 ) # Vermillion, lol + else: colour = wx.Colour( 248, 208, 204 ) # 25% Vermillion, 75% White + + else: + + if self._selected: colour = CC.COLOUR_SELECTED_DARK + else: colour = CC.COLOUR_UNSELECTED + + + dc.SetPen( wx.Pen( colour, style=wx.SOLID ) ) + + dc.DrawRectangle( 0, 0, width, height ) + + file_service_identifiers = self.GetFileServiceIdentifiersCDPP() + + if inbox: dc.DrawBitmap( CC.GlobalBMPs.inbox_bmp, width - 18, 0 ) + elif CC.LOCAL_FILE_SERVICE_IDENTIFIER in file_service_identifiers.GetPending(): dc.DrawBitmap( CC.GlobalBMPs.downloading_bmp, width - 18, 0 ) + + if self._dump_status == CC.DUMPER_DUMPED_OK: dc.DrawBitmap( CC.GlobalBMPs.dump_ok, width - 18, 18 ) + elif self._dump_status == CC.DUMPER_RECOVERABLE_ERROR: dc.DrawBitmap( CC.GlobalBMPs.dump_recoverable, width - 18, 18 ) + elif self._dump_status == CC.DUMPER_UNRECOVERABLE_ERROR: dc.DrawBitmap( CC.GlobalBMPs.dump_fail, width - 18, 18 ) + + if self.IsCollection(): + + dc.DrawBitmap( CC.GlobalBMPs.collection_bmp, 1, height - 17 ) + + num_files_str = str( len( self._hashes ) ) + + dc.SetFont( wx.SystemSettings.GetFont( wx.SYS_DEFAULT_GUI_FONT ) ) + + ( text_x, text_y ) = dc.GetTextExtent( num_files_str ) + + dc.SetBrush( wx.Brush( CC.COLOUR_UNSELECTED ) ) + + dc.SetTextForeground( CC.COLOUR_SELECTED_DARK ) + + dc.SetPen( wx.TRANSPARENT_PEN ) + + dc.DrawRectangle( 17, height - text_y - 3, text_x + 2, text_y + 2 ) + + dc.DrawText( num_files_str, 18, height - text_y - 2 ) + + + if self._file_service_identifier.GetType() == HC.LOCAL_FILE: + + if len( file_service_identifiers.GetPendingRemote() ) > 0: dc.DrawBitmap( CC.GlobalBMPs.file_repository_pending_bmp, 0, 0 ) + elif len( file_service_identifiers.GetCurrentRemote() ) > 0: dc.DrawBitmap( CC.GlobalBMPs.file_repository_bmp, 0, 0 ) + + elif self._file_service_identifier in file_service_identifiers.GetCurrentRemote(): + + if self._file_service_identifier in file_service_identifiers.GetPetitionedRemote(): dc.DrawBitmap( CC.GlobalBMPs.file_repository_petitioned_bmp, 0, 0 ) + + + return bmp + + + def Dumped( self, dump_status ): self._dump_status = dump_status + + def IsLoaded( self ): return self._hydrus_bmp is not None + + def ReloadFromDB( self ): + + self._my_dimensions = CC.AddPaddingToDimensions( wx.GetApp().Read( 'options' )[ 'thumbnail_dimensions' ], CC.THUMBNAIL_BORDER * 2 ) + + if self._hydrus_bmp is not None: self._LoadFromDB() + + + def ReloadFromDBLater( self ): + + self._my_dimensions = CC.AddPaddingToDimensions( wx.GetApp().Read( 'options' )[ 'thumbnail_dimensions' ], CC.THUMBNAIL_BORDER * 2 ) + + self._hydrus_bmp = None + + +class ThumbnailMediaCollection( Thumbnail, ClientGUIMixins.MediaCollection ): + + def __init__( self, file_service_identifier, predicates, media_results ): + + ClientGUIMixins.MediaCollection.__init__( self, file_service_identifier, predicates, media_results ) + Thumbnail.__init__( self, file_service_identifier ) + + + def ProcessContentUpdate( self, content_update ): + + ClientGUIMixins.MediaCollection.ProcessContentUpdate( self, content_update ) + + if content_update.GetAction() == CC.CONTENT_UPDATE_ADD and content_update.GetServiceIdentifier() == CC.LOCAL_FILE_SERVICE_IDENTIFIER: + + if self.GetDisplayMedia() in self._GetMedia( content_update.GetHashes() ): self.ReloadFromDB() + + + +class ThumbnailMediaSingleton( Thumbnail, ClientGUIMixins.MediaSingleton ): + + def __init__( self, file_service_identifier, media_result ): + + ClientGUIMixins.MediaSingleton.__init__( self, media_result ) + Thumbnail.__init__( self, file_service_identifier ) + + + def ProcessContentUpdate( self, content_update ): + + ClientGUIMixins.MediaSingleton.ProcessContentUpdate( self, content_update ) + + if content_update.GetAction() == CC.CONTENT_UPDATE_ADD and content_update.GetServiceIdentifier() == CC.LOCAL_FILE_SERVICE_IDENTIFIER: self.ReloadFromDB() + + \ No newline at end of file diff --git a/include/ClientGUIMessages.py b/include/ClientGUIMessages.py new file mode 100755 index 00000000..126ba65b --- /dev/null +++ b/include/ClientGUIMessages.py @@ -0,0 +1,1432 @@ +import HydrusConstants as HC +import ClientConstants as CC +import ClientConstantsMessages +import ClientGUICommon +import ClientGUIDialogs +import ClientGUIMedia +import cStringIO +import hashlib +import HydrusMessageHandling +import os +import random +import threading +import traceback +import webbrowser +import wx +import wx.html +import wx.richtext +import wx.lib.scrolledpanel +import yaml +from wx.lib.mixins.listctrl import ListCtrlAutoWidthMixin +from wx.lib.mixins.listctrl import ColumnSorterMixin + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +class ConversationsListCtrl( wx.ListCtrl, ListCtrlAutoWidthMixin, ColumnSorterMixin ): + + def __init__( self, parent, page_key, identity, conversations ): + + wx.ListCtrl.__init__( self, parent, style = wx.LC_REPORT | wx.LC_SINGLE_SEL ) + ListCtrlAutoWidthMixin.__init__( self ) + ColumnSorterMixin.__init__( self, 8 ) + + self._options = wx.GetApp().Read( 'options' ) + + self._page_key = page_key + self._identity = identity + + image_list = wx.ImageList( 16, 16, True, 2 ) + + image_list.Add( CC.GlobalBMPs.transparent_bmp ) + image_list.Add( CC.GlobalBMPs.inbox_bmp ) + + self.AssignImageList( image_list, wx.IMAGE_LIST_SMALL ) + + self.InsertColumn( 0, 'inbox', width = 30 ) + self.InsertColumn( 1, 'subject' ) + self.InsertColumn( 2, 'creator', width = 90 ) + self.InsertColumn( 3, 'to', width = 100 ) + self.InsertColumn( 4, 'messages', width = 60 ) + self.InsertColumn( 5, 'unread', width = 60 ) + self.InsertColumn( 6, 'created', width = 130 ) + self.InsertColumn( 7, 'updated', width = 130 ) + + self.setResizeColumn( 2 ) # subject + + self._SetConversations( conversations ) + + self.Bind( wx.EVT_LIST_ITEM_SELECTED, self.EventSelected ) + self.Bind( wx.EVT_LIST_ITEM_DESELECTED, self.EventSelected ) + self.Bind( wx.EVT_LIST_ITEM_RIGHT_CLICK, self.EventShowMenu ) + self.Bind( wx.EVT_MENU, self.EventMenu ) + + self.RefreshAcceleratorTable() + + HC.pubsub.sub( self, 'SetConversations', 'set_conversations' ) + HC.pubsub.sub( self, 'ArchiveConversation', 'archive_conversation_gui' ) + HC.pubsub.sub( self, 'InboxConversation', 'inbox_conversation_gui' ) + HC.pubsub.sub( self, 'DeleteConversation', 'delete_conversation_gui' ) + HC.pubsub.sub( self, 'UpdateMessageStatuses', 'message_statuses_gui' ) + HC.pubsub.sub( self, 'RefreshAcceleratorTable', 'options_updated' ) + + + def RefreshAcceleratorTable( self ): + + entries = [ + ( wx.ACCEL_NORMAL, wx.WXK_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete' ) ), + ( wx.ACCEL_NORMAL, wx.WXK_NUMPAD_DELETE, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete' ) ) + ] + + for ( modifier, key_dict ) in self._options[ 'shortcuts' ].items(): entries.extend( [ ( modifier, key, CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( action ) ) for ( key, action ) in key_dict.items() ] ) + + self.SetAcceleratorTable( wx.AcceleratorTable( entries ) ) + + + def _GetIndexFromConversationKey( self, conversation_key ): + + for i in range( self.GetItemCount() ): + + data_index = self.GetItemData( i ) + + conversation = self._data_indices_to_conversations[ data_index ] + + if conversation.GetConversationKey() == conversation_key: return i + + + return None + + + def _GetPrettyStatus( self ): + + if len( self._conversations ) == 1: return '1 conversation' + else: return str( len( self._conversations ) ) + ' conversations' + + + def _SetConversations( self, conversations ): + + self._conversations = list( conversations ) + + self.DeleteAllItems() + + self.itemDataMap = {} + self._data_indices_to_conversations = {} + + i = 0 + + cmp_conversations = lambda c1, c2: cmp( c1.GetUpdated(), c2.GetUpdated() ) + + self._conversations.sort( cmp = cmp_conversations, reverse = True ) # order by newest change first + + for conversation in self._conversations: + + ( conversation_key, inbox, subject, name_from, participants, message_count, unread_count, created, updated ) = conversation.GetListCtrlTuple() + + if created is None: + + created_string = '' + updated_string = '' + + else: + + created_string = HC.ConvertTimestampToHumanPrettyTime( created ) + updated_string = HC.ConvertTimestampToHumanPrettyTime( updated ) + + + self.Append( ( '', subject, name_from, ', '.join( [ contact.GetName() for contact in participants if contact.GetName() != name_from ] ), str( message_count ), str( unread_count ), created_string, updated_string ) ) + + data_index = i + + self.SetItemData( i, data_index ) + + if inbox: self.SetItemImage( i, 1 ) # inbox + else: self.SetItemImage( i, 0 ) # transparent + + self.itemDataMap[ data_index ] = ( inbox, subject, name_from, len( participants ), message_count, unread_count, created, updated ) + + self._data_indices_to_conversations[ data_index ] = conversation + + i += 1 + + + HC.pubsub.pub( 'conversation_focus', self._page_key, None ) + HC.pubsub.pub( 'new_page_status', self._page_key, self._GetPrettyStatus() ) + + + def _UpdateConversationItem( self, conversation_key ): + + selection = self._GetIndexFromConversationKey( conversation_key ) + + if selection is not None: + + conversation = self._data_indices_to_conversations[ self.GetItemData( selection ) ] + + ( conversation_key, inbox, subject, name_from, participants, message_count, unread_count, created, updated ) = conversation.GetListCtrlTuple() + + selection = self._GetIndexFromConversationKey( conversation_key ) + + data_index = self.GetItemData( selection ) + + self.itemDataMap[ data_index ] = ( inbox, subject, name_from, len( participants ), message_count, unread_count, created, updated ) + + if inbox: self.SetItemImage( selection, 1 ) + else: self.SetItemImage( selection, 0 ) + + self.SetStringItem( selection, 4, str( message_count ) ) + self.SetStringItem( selection, 5, str( unread_count ) ) + + if created is None: + + created_string = '' + updated_string = '' + + else: + + created_string = HC.ConvertTimestampToHumanPrettyTime( created ) + + updated_string = HC.ConvertTimestampToHumanPrettyTime( updated ) + + + self.SetStringItem( selection, 6, created_string ) + self.SetStringItem( selection, 7, updated_string ) + + + + def ArchiveConversation( self, conversation_key ): self._UpdateConversationItem( conversation_key ) + + def DeleteConversation( self, conversation_key ): + + selection = self._GetIndexFromConversationKey( conversation_key ) + + if selection is not None: + + conversation = self._data_indices_to_conversations[ self.GetItemData( selection ) ] + + self._conversations.remove( conversation ) + + self.DeleteItem( selection ) + + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + selection = self.GetFirstSelected() + + conversation = self._data_indices_to_conversations[ self.GetItemData( selection ) ] + + conversation_key = conversation.GetConversationKey() + + identity_contact_key = self._identity.GetContactKey() + + if command == 'archive': wx.GetApp().Write( 'archive_conversation', conversation_key ) + elif command == 'inbox': wx.GetApp().Write( 'inbox_conversation', conversation_key ) + elif command == 'read': + + message_keys = conversation.GetMessageKeysWithDestination( ( self._identity, 'sent' ) ) + + for message_key in message_keys: wx.GetApp().Write( 'message_statuses', message_key, [ ( identity_contact_key, 'read' ) ] ) + + elif command == 'unread': + + message_keys = conversation.GetMessageKeysWithDestination( ( self._identity, 'read' ) ) + + for message_key in message_keys: wx.GetApp().Write( 'message_statuses', message_key, [ ( identity_contact_key, 'sent' ) ] ) + + elif command == 'delete': + + with ClientGUIDialogs.DialogYesNo( self, 'Are you sure you want to delete this conversation?' ) as dlg: + + if dlg.ShowModal() == wx.ID_YES: wx.GetApp().Write( 'delete_conversation', conversation_key ) + + + else: event.Skip() + + except Exception as e: + + wx.MessageBox( unicode( e ) ) + wx.MessageBox( traceback.format_exc() ) + + + + def EventSelected( self, event ): + + selection = self.GetFirstSelected() + + if selection == wx.NOT_FOUND: HC.pubsub.pub( 'conversation_focus', self._page_key, None ) + else: HC.pubsub.pub( 'conversation_focus', self._page_key, self._data_indices_to_conversations[ self.GetItemData( selection ) ] ) + + + def EventShowMenu( self, event ): + + conversation = self._data_indices_to_conversations[ self.GetItemData( event.GetIndex() ) ] + + menu = wx.Menu() + + if conversation.IsInbox(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'archive' ), 'archive' ) + else: menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'inbox' ), 'return to inbox' ) + + if conversation.HasUnread(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'read' ), 'set all as read' ) + if conversation.HasRead(): menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'unread' ), 'set all as unread' ) + + menu.AppendSeparator() + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'delete' ), 'delete' ) + + self.PopupMenu( menu ) + + + def GetListCtrl( self ): return self + + def InboxConversation( self, conversation_key ): self._UpdateConversationItem( conversation_key ) + + def SetConversations( self, page_key, conversations ): + + if page_key == self._page_key: + + try: self._SetConversations( conversations ) + except: + + wx.MessageBox( traceback.format_exc() ) + + + + + def UpdateMessageStatuses( self, message_key, updates ): + + for conversation in self._data_indices_to_conversations.values(): + + if conversation.HasMessageKey( message_key ): + + conversation_key = conversation.GetConversationKey() + + self._UpdateConversationItem( conversation_key ) + + + + +class ConversationPanel( wx.Panel ): + + def __init__( self, parent, page_key, identity, conversation ): + + wx.Panel.__init__( self, parent, style = wx.SIMPLE_BORDER ) + + self.SetBackgroundColour( wx.WHITE ) + + self._identity = identity + self._page_key = page_key + self._conversation = conversation + + self._vbox = wx.BoxSizer( wx.VERTICAL ) + + self._message_keys_to_message_panels = {} + self._draft_keys_to_draft_panels = {} + + self._scrolling_messages_window = wx.lib.scrolledpanel.ScrolledPanel( self ) + self._scrolling_messages_window.SetupScrolling() + self._scrolling_messages_window.SetScrollRate( 0, 50 ) + + self._window_vbox = wx.BoxSizer( wx.VERTICAL ) + + self._messages_vbox = wx.BoxSizer( wx.VERTICAL ) + self._drafts_vbox = wx.BoxSizer( wx.VERTICAL ) + + self._window_vbox.AddF( self._messages_vbox, FLAGS_EXPAND_PERPENDICULAR ) + self._window_vbox.AddF( self._drafts_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + self._DrawConversation() + + self.SetSizer( self._vbox ) + + HC.pubsub.sub( self, 'DeleteDraft', 'delete_draft_gui' ) + HC.pubsub.sub( self, 'NewMessage', 'new_message' ) + + + def _DrawConversation( self ): + + # fix it so this stuff is reusable? + + self._messages_vbox.DeleteWindows() + self._drafts_vbox.DeleteWindows() + + self._convo_frame = wx.Panel( self ) + + convo_vbox = wx.BoxSizer( wx.VERTICAL ) + + subject_static_text = wx.StaticText( self._convo_frame, label = self._conversation.GetSubject() ) + + f = subject_static_text.GetFont() + + f.SetWeight( wx.BOLD ) + + subject_static_text.SetFont( f ) + + convo_vbox.AddF( subject_static_text, FLAGS_EXPAND_PERPENDICULAR ) + convo_vbox.AddF( wx.StaticText( self._convo_frame, label = ', '.join( contact.GetName() for contact in self._conversation.GetParticipants() ) ), FLAGS_EXPAND_PERPENDICULAR ) + + self._convo_frame.SetSizer( convo_vbox ) + + # archive_all + # set all as read + # delete all button, eventually + + ( messages, drafts ) = self._conversation.GetMessages() + + for message in messages: + + message_panel = MessagePanel( self._scrolling_messages_window, message, self._identity ) + + self._message_keys_to_message_panels[ message.GetMessageKey() ] = message_panel + + self._messages_vbox.AddF( message_panel, FLAGS_EXPAND_PERPENDICULAR ) + + + for draft in drafts: + + draft_panel = DraftPanel( self._scrolling_messages_window, draft ) + + self._draft_keys_to_draft_panels[ draft.GetDraftKey() ] = draft_panel + + self._drafts_vbox.AddF( draft_panel, FLAGS_EXPAND_PERPENDICULAR ) + + + self._reply_button = wx.Button( self._scrolling_messages_window, label = 'reply' ) + self._reply_button.Bind( wx.EVT_BUTTON, self.EventReply ) + self._reply_button.Disable() + + if len( messages ) > 0 and self._conversation.GetStartedBy().GetName() != 'Anonymous': self._reply_button.Enable() + + if self._conversation.GetStartedBy() == self._identity: self._reply_button.Enable() + + self._window_vbox.AddF( self._reply_button, FLAGS_LONE_BUTTON ) + + self._scrolling_messages_window.SetSizer( self._window_vbox ) + + self._vbox.AddF( self._convo_frame, FLAGS_EXPAND_PERPENDICULAR ) + self._vbox.AddF( self._scrolling_messages_window, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( self._vbox ) + + + def DeleteDraft( self, draft_key ): + + if draft_key in self._draft_keys_to_draft_panels: + + draft_panel = self._draft_keys_to_draft_panels[ draft_key ] + + del self._draft_keys_to_draft_panels[ draft_key ] + + self._drafts_vbox.Detach( draft_panel ) + + draft_panel.Destroy() + + self._scrolling_messages_window.FitInside() + + + + def EventReply( self, event ): + + draft_key = os.urandom( 32 ) + conversation_key = self._conversation.GetConversationKey() + subject = self._conversation.GetSubject() + contact_from = self._identity + participants = self._conversation.GetParticipants() + contact_names_to = [ contact.GetName() for contact in participants if contact is not None and contact.GetName() != 'Anonymous' and contact != contact_from ] + recipients_visible = True + body = '' + attachment_hashes = [] + + draft = ClientConstantsMessages.DraftMessage( draft_key, conversation_key, subject, contact_from, contact_names_to, recipients_visible, body, attachment_hashes, is_new = True ) + + self._conversation.AddDraft( draft ) + + draft_panel = DraftPanel( self._scrolling_messages_window, draft ) + + self._draft_keys_to_draft_panels[ draft_key ] = draft_panel + + self._drafts_vbox.AddF( draft_panel, FLAGS_EXPAND_PERPENDICULAR ) + + self._scrolling_messages_window.FitInside() + + + def NewMessage( self, conversation_key, message ): + + if self._conversation is not None and conversation_key == self._conversation.GetConversationKey(): + + message_key = message.GetMessageKey() + + if message_key not in self._message_keys_to_message_panels: # if not already here! + + message_panel = MessagePanel( self._scrolling_messages_window, message, self._identity ) + + self._message_keys_to_message_panels[ message_key ] = message_panel + + self._messages_vbox.AddF( message_panel, FLAGS_EXPAND_PERPENDICULAR ) + + self._conversation.AddMessage( message ) + + self._scrolling_messages_window.FitInside() + + + + +class ConversationSplitter( wx.SplitterWindow ): + + def __init__( self, parent, page_key, identity, conversations = [] ): + + wx.SplitterWindow.__init__( self, parent ) + + self._page_key = page_key + self._identity = identity + self._conversations = conversations + + self.SetMinimumPaneSize( 180 ) + self.SetSashGravity( 0.0 ) + + self._InitConversationsPanel() + self._InitConversationPanel() + + wx.CallAfter( self.SplitHorizontally, self._conversations_panel, self._conversation_panel, 180 ) + wx.CallAfter( self._conversation_panel.Refresh ) + + HC.pubsub.sub( self, 'SetConversationFocus', 'conversation_focus' ) + + + def _InitConversationsPanel( self ): self._conversations_panel = ConversationsListCtrl( self, self._page_key, self._identity, self._conversations ) + + def _InitConversationPanel( self ): self._conversation_panel = wx.Window( self ) + + def SetConversationFocus( self, page_key, conversation ): + + if page_key == self._page_key: + + with wx.FrozenWindow( self ): + + if conversation is None: new_panel = wx.Window( self ) + else: new_panel = ConversationPanel( self, self._page_key, self._identity, conversation ) + + self.ReplaceWindow( self._conversation_panel, new_panel ) + + self._conversation_panel.Destroy() + + self._conversation_panel = new_panel + + + + +class DestinationPanel( wx.Panel ): + + def __init__( self, parent, message_key, contact, status, identity ): + + wx.Panel.__init__( self, parent ) + + self.SetBackgroundColour( CC.COLOUR_MESSAGE ) + + self._message_key = message_key + self._contact = contact + self._contact_key = contact.GetContactKey() + self._identity = identity + self._status = status + + name = contact.GetName() + + name_static_text = wx.StaticText( self, label = name ) + + if self._contact == self._identity: + + f = name_static_text.GetFont() + + f.SetWeight( wx.BOLD ) + + name_static_text.SetFont( f ) + + if self._status == 'sent': self._status = 'unread' + + + self._status_panel = self._CreateStatusPanel() + + self._hbox = wx.BoxSizer( wx.HORIZONTAL ) + + self._hbox.AddF( name_static_text, FLAGS_MIXED ) + self._hbox.AddF( self._status_panel, FLAGS_MIXED ) + + self.SetSizer( self._hbox ) + + self.Bind( wx.EVT_MENU, self.EventMenu ) + + + def _CreateStatusPanel( self ): + + if self._status == 'failed': + + status_text = wx.StaticText( self, label = self._status ) + + status_text.SetForegroundColour( ( 128, 0, 0 ) ) + + status_text.SetCursor( wx.StockCursor( wx.CURSOR_HAND ) ) + + status_text.Bind( wx.EVT_LEFT_DOWN, self.EventRetryMenu ) + + elif self._status == 'unread': + + status_text = wx.StaticText( self, label = self._status ) + + status_text.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHT ) ) + + status_text.SetCursor( wx.StockCursor( wx.CURSOR_HAND ) ) + + status_text.Bind( wx.EVT_LEFT_DOWN, self.EventReadMenu ) + + elif self._status == 'read': + + status_text = wx.StaticText( self, label = self._status ) + + status_text.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHT ) ) + + status_text.SetCursor( wx.StockCursor( wx.CURSOR_HAND ) ) + + status_text.Bind( wx.EVT_LEFT_DOWN, self.EventUnreadMenu ) + + else: + + status_text = wx.StaticText( self, label = self._status ) + + status_text.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHT ) ) + + + return status_text + + + def EventMenu( self, event ): + + action = CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetAction( event.GetId() ) + + if action is not None: + + try: + + ( command, data ) = action + + if command in ( 'retry', 'read', 'unread' ): + + if command == 'retry': status = 'pending' + elif command == 'read': status = 'read' + elif command == 'unread': status = 'sent' + + my_message_depot = wx.GetApp().Read( 'service', self._identity ) + + connection = my_message_depot.GetConnection() + + my_public_key = self._identity.GetPublicKey() + my_contact_key = self._identity.GetContactKey() + + contacts_contact_key = self._contact.GetContactKey() + + status_updates = [] + + status_key = hashlib.sha256( contacts_contact_key + self._message_key ).digest() + + packaged_status = HydrusMessageHandling.PackageStatusForDelivery( ( self._message_key, contacts_contact_key, status ), my_public_key ) + + status_updates = ( ( status_key, packaged_status ), ) + + connection.Post( 'message_statuses', contact_key = my_contact_key, statuses = status_updates ) + + wx.GetApp().Write( 'message_statuses', self._message_key, [ ( self._contact_key, status ) ] ) + + else: event.Skip() + + except Exception as e: + + wx.MessageBox( 'Could not contact your message depot, so could not update status!' ) + + wx.MessageBox( unicode( e ) ) + wx.MessageBox( traceback.format_exc() ) + + + + + def EventReadMenu( self, event ): + + menu = wx.Menu() + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'read' ), 'read' ) + + self.PopupMenu( menu ) + + + def EventRetryMenu( self, event ): + + menu = wx.Menu() + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'retry' ), 'retry' ) + + self.PopupMenu( menu ) + + + def EventUnreadMenu( self, event ): + + menu = wx.Menu() + + menu.Append( CC.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'unread' ), 'unread' ) + + self.PopupMenu( menu ) + + + def SetStatus( self, status ): + + if self._contact == self._identity and status == 'sent': status = 'unread' + + self._status = status + + new_status_panel = self._CreateStatusPanel() + + self._hbox.Replace( self._status_panel, new_status_panel ) + + self._status_panel.Destroy() + + self._status_panel = new_status_panel + + +class DestinationsPanel( wx.Panel ): + + def __init__( self, parent, message_key, destinations, identity ): + + wx.Panel.__init__( self, parent ) + + self.SetBackgroundColour( CC.COLOUR_MESSAGE ) + + self._message_key = message_key + self._my_panels = {} + + vbox = wx.BoxSizer( wx.VERTICAL ) + + for ( contact, status ) in destinations: + + destination_panel = DestinationPanel( self, message_key, contact, status, identity ) + + vbox.AddF( destination_panel, FLAGS_EXPAND_PERPENDICULAR ) + + self._my_panels[ contact.GetContactKey() ] = destination_panel + + + self.SetSizer( vbox ) + + HC.pubsub.sub( self, 'UpdateMessageStatuses', 'message_statuses_gui' ) + + + def UpdateMessageStatuses( self, message_key, updates ): + + if message_key == self._message_key: + + with wx.FrozenWindow( self ): + + for ( contact_key, status ) in updates: + + if contact_key in self._my_panels: self._my_panels[ contact_key ].SetStatus( status ) + + + # doing replace on the destpanels' tricky sizer is a huge pain, hence the size event + # has to be postevent, not processevent + wx.PostEvent( self.GetParent(), wx.SizeEvent() ) + + + + +# A whole bunch of this is cribbed from/inspired by the excellent rtc example in the wxPython Demo +class DraftBodyPanel( wx.Panel ): + + ID_BOLD = 0 + ID_ITALIC = 1 + ID_UNDERLINE = 2 + + ID_ALIGN_LEFT = 3 + ID_ALIGN_CENTER = 4 + ID_ALIGN_RIGHT = 5 + ID_ALIGN_JUSTIFY = 6 # rtc doesn't yet support this, sadly + + ID_INDENT_LESS = 7 + ID_INDENT_MORE = 8 + + ID_FONT = 9 + ID_FONT_COLOUR = 10 + + ID_LINK = 11 + ID_LINK_BREAK = 12 + + def __init__( self, parent, xml ): + + wx.Panel.__init__( self, parent ) + + self._CreateToolBar() + + self._CreateRTC( xml ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._toolbar, FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( self._rtc, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + self.SetAcceleratorTable( wx.AcceleratorTable( [ + ( wx.ACCEL_CMD, ord( 'b' ), self.ID_BOLD ), + ( wx.ACCEL_CMD, ord( 'i' ), self.ID_ITALIC ), + ( wx.ACCEL_CMD, ord( 'u' ), self.ID_UNDERLINE ) + ] ) ) + + self.Bind( wx.EVT_TOOL, self.EventToolBar ) + + self.Bind( wx.EVT_UPDATE_UI, self.EventUpdateUI ) + + + def _CreateToolBar( self ): + + self._toolbar = wx.ToolBar( self ) + + self._toolbar.SetToolBitmapSize( ( 16, 16 ) ) + + self._toolbar.AddCheckTool( self.ID_BOLD, CC.GlobalBMPs.bold_bmp ) + self._toolbar.AddCheckTool( self.ID_ITALIC, CC.GlobalBMPs.italic_bmp ) + self._toolbar.AddCheckTool( self.ID_UNDERLINE, CC.GlobalBMPs.underline_bmp ) + + self._toolbar.AddSeparator() + + self._toolbar.AddRadioTool( self.ID_ALIGN_LEFT, CC.GlobalBMPs.align_left_bmp ) + self._toolbar.AddRadioTool( self.ID_ALIGN_CENTER, CC.GlobalBMPs.align_center_bmp ) + self._toolbar.AddRadioTool( self.ID_ALIGN_RIGHT, CC.GlobalBMPs.align_right_bmp ) + + self._toolbar.AddSeparator() + + self._toolbar.AddLabelTool( self.ID_INDENT_LESS, 'indent less', CC.GlobalBMPs.indent_less_bmp ) + self._toolbar.AddLabelTool( self.ID_INDENT_MORE, 'indent more', CC.GlobalBMPs.indent_more_bmp ) + + self._toolbar.AddSeparator() + + self._toolbar.AddLabelTool( self.ID_FONT, 'font', CC.GlobalBMPs.font_bmp ) + self._toolbar.AddLabelTool( self.ID_FONT_COLOUR, 'font colour', CC.GlobalBMPs.colour_bmp, shortHelp = 'font colour' ) + + # font background + # message background? + + self._toolbar.AddSeparator() + + self._toolbar.AddLabelTool( self.ID_LINK, 'link', CC.GlobalBMPs.link_bmp ) + self._toolbar.AddLabelTool( self.ID_LINK_BREAK, 'break link', CC.GlobalBMPs.link_break_bmp ) + + self._toolbar.Realize() + + + def _CreateRTC( self, xml ): + + self._rtc = wx.richtext.RichTextCtrl( self, size = ( -1, 300 ), style = wx.WANTS_CHARS | wx.richtext.RE_MULTILINE ) + + if len( xml ) > 0: + + xml_handler = wx.richtext.RichTextXMLHandler() + + stream = cStringIO.StringIO( xml ) + + xml_handler.LoadStream( self._rtc.GetBuffer(), stream ) + + + + def EventUpdateUI( self, event ): + + self._toolbar.ToggleTool( self.ID_BOLD, self._rtc.IsSelectionBold() ) + self._toolbar.ToggleTool( self.ID_ITALIC, self._rtc.IsSelectionItalics() ) + self._toolbar.ToggleTool( self.ID_UNDERLINE, self._rtc.IsSelectionUnderlined() ) + + if self._rtc.IsSelectionAligned( wx.TEXT_ALIGNMENT_LEFT ): self._toolbar.ToggleTool( self.ID_ALIGN_LEFT, True ) + elif self._rtc.IsSelectionAligned( wx.TEXT_ALIGNMENT_CENTER ): self._toolbar.ToggleTool( self.ID_ALIGN_CENTER, True ) + elif self._rtc.IsSelectionAligned( wx.TEXT_ALIGNMENT_RIGHT ): self._toolbar.ToggleTool( self.ID_ALIGN_RIGHT, True ) + + event.Skip() + + + def EventToolBar( self, event ): + + id = event.GetId() + + if id == self.ID_BOLD: self._rtc.ApplyBoldToSelection() + elif id == self.ID_ITALIC: self._rtc.ApplyItalicToSelection() + elif id == self.ID_UNDERLINE: self._rtc.ApplyUnderlineToSelection() + elif id == self.ID_ALIGN_LEFT: self._rtc.ApplyAlignmentToSelection( wx.TEXT_ALIGNMENT_LEFT ) + elif id == self.ID_ALIGN_CENTER: self._rtc.ApplyAlignmentToSelection( wx.TEXT_ALIGNMENT_CENTRE ) + elif id == self.ID_ALIGN_RIGHT: self._rtc.ApplyAlignmentToSelection( wx.TEXT_ALIGNMENT_RIGHT ) + elif id == self.ID_INDENT_LESS: + + text_attribute = wx.TEXTAttrEx() + + text_attribute.SetFlags( wx.TEXT_ATTR_LEFT_INDENT ) + + ip = self._rtc.GetInsertionPoint() + + if self._rtc.GetStyle( ip, text_attribute ): # this copies the current style into text_attribute, returning true if successful + + if self._rtc.HasSelection(): selection_range = self._rtc.GetSelectionRange() + else: selection_range = wx.richtext.RichTextRange( ip, ip ) + + if text_attribute.GetLeftIndent() >= 100: + + text_attribute.SetLeftIndent( text_attribute.GetLeftIndent() - 100 ) + text_attribute.SetFlags( wx.TEXT_ATTR_LEFT_INDENT ) + + self._rtc.SetStyle( selection_range, text_attribute ) + + + + elif id == self.ID_INDENT_MORE: + + text_attribute = wx.richtext.TextAttrEx() + + text_attribute.SetFlags( wx.TEXT_ATTR_LEFT_INDENT ) + + ip = self._rtc.GetInsertionPoint() + + if self._rtc.GetStyle( ip, text_attribute ): # this copies the current style into text_attribute, returning true if successful + + if self._rtc.HasSelection(): selection_range = self._rtc.GetSelectionRange() + else: selection_range = wx.richtext.RichTextRange( ip, ip ) + + text_attribute.SetLeftIndent( text_attribute.GetLeftIndent() + 100 ) + text_attribute.SetFlags( wx.TEXT_ATTR_LEFT_INDENT ) + + self._rtc.SetStyle( selection_range, text_attribute ) + + + elif id == self.ID_FONT: + + font_data = wx.FontData() + font_data.EnableEffects( False ) + + text_attribute = wx.richtext.TextAttrEx() + text_attribute.SetFlags( wx.TEXT_ATTR_FONT ) + + if self._rtc.GetStyle( self._rtc.GetInsertionPoint(), text_attribute ): font_data.SetInitialFont( text_attribute.GetFont() ) + + with wx.FontDialog( self, font_data ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + font_data = dlg.GetFontData() + + font = font_data.GetChosenFont() + + if not self._rtc.HasSelection(): self._rtc.BeginFont( font ) + else: + + selection_range = self._rtc.GetSelectionRange() + + text_attribute.SetFlags( wx.TEXT_ATTR_FONT ) + text_attribute.SetFont( font ) + + self._rtc.SetStyle( selection_range, text_attribute ) + + + + + elif id == self.ID_FONT_COLOUR: + + colour_data = wx.ColourData() + + text_attribute = wx.richtext.TextAttrEx() + text_attribute.SetFlags( wx.TEXT_ATTR_TEXT_COLOUR ) + + if self._rtc.GetStyle( self._rtc.GetInsertionPoint(), text_attribute ): colour_data.SetColour( text_attribute.GetTextColour() ) + + with wx.ColourDialog( self, colour_data ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + colour_data = dlg.GetColourData() + colour = colour_data.GetColour() + + if colour: + + if not self._rtc.HasSelection(): self._rtc.BeginTextColour( colour ) + else: + + selection_range = self._rtc.GetSelectionRange() + + text_attribute.SetFlags( wx.TEXT_ATTR_TEXT_COLOUR ) + text_attribute.SetTextColour( colour ) + + self._rtc.SetStyle( selection_range, text_attribute ) + + + + + + elif id == self.ID_LINK: + + text_attribute = wx.richtext.TextAttrEx() + + text_attribute.SetFlags( wx.TEXT_ATTR_URL ) + + ip = self._rtc.GetInsertionPoint() + + self._rtc.GetStyle( self._rtc.GetInsertionPoint(), text_attribute ) + + if text_attribute.HasURL(): initial_url = text_attribute.GetURL() + else: initial_url = 'http://' + + with wx.TextEntryDialog( self, 'Enter url', defaultValue = initial_url ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + url = dlg.GetValue() + + if self._rtc.HasSelection(): selection_range = self._rtc.GetSelectionRange() + else: selection_range = wx.richtext.RichTextRange( ip, ip ) + + text_attribute.SetFlags( wx.TEXT_ATTR_TEXT_COLOUR ) + text_attribute.SetTextColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHT ) ) + + text_attribute.SetFontUnderlined( True ) + + text_attribute.SetURL( url ) + + self._rtc.SetStyle( selection_range, text_attribute ) + + + + elif id == self.ID_LINK_BREAK: + + if self._rtc.HasSelection(): selection_range = self._rtc.GetSelectionRange() + else: selection_range = wx.richtext.RichTextRange( ip, ip ) + + text_attribute = wx.richtext.TextAttrEx() + + text_attribute.SetFlags( wx.TEXT_ATTR_TEXT_COLOUR ) + text_attribute.SetTextColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOWTEXT ) ) + + text_attribute.SetFontUnderlined( False ) + + self._rtc.SetStyle( selection_range, text_attribute ) + + text_attribute = wx.richtext.TextAttrEx() + + text_attribute.SetFlags( wx.TEXT_ATTR_URL ) + + self._rtc.SetStyleEx( selection_range, text_attribute, wx.richtext.RICHTEXT_SETSTYLE_REMOVE ) + + + + def GetXMLHTML( self ): + + xml_handler = wx.richtext.RichTextXMLHandler() + + stream = cStringIO.StringIO() + + xml_handler.SaveStream( self._rtc.GetBuffer(), stream ) + + stream.seek( 0 ) + + xml = stream.read() + + html_handler = wx.richtext.RichTextHTMLHandler() + html_handler.SetFlags( wx.richtext.RICHTEXT_HANDLER_SAVE_IMAGES_TO_MEMORY ) + html_handler.SetFontSizeMapping( [7,9,11,12,14,22,100] ) + + stream = cStringIO.StringIO() + + html_handler.SaveStream( self._rtc.GetBuffer(), stream ) + + stream.seek( 0 ) + + html = stream.read() + + return yaml.safe_dump( ( xml, html ) ) + + + +class DraftPanel( wx.Panel ): + + def __init__( self, parent, draft_message ): + + wx.Panel.__init__( self, parent ) + + self.SetBackgroundColour( CC.COLOUR_MESSAGE ) + + self._compose_key = os.urandom( 32 ) + + self._draft_message = draft_message + + ( self._draft_key, self._conversation_key, subject, self._contact_from, contacts_to, recipients_visible, body, attachment_hashes ) = self._draft_message.GetInfo() + + is_new = self._draft_message.IsNew() + + self._from = wx.StaticText( self, label = self._contact_from.GetName() ) + + if not self._draft_message.IsReply(): + + self._subject = wx.TextCtrl( self, value = subject ) + self._subject.Bind( wx.EVT_KEY_DOWN, self.EventChanged ) + + self._recipients_list = wx.ListCtrl( self, style = wx.LC_LIST | wx.LC_NO_HEADER | wx.LC_SINGLE_SEL ) + self._recipients_list.InsertColumn( 0, 'contacts' ) + for name in contacts_to: self._recipients_list.Append( ( name, ) ) + self._recipients_list.Bind( wx.EVT_LIST_ITEM_ACTIVATED, self.EventRemove ) + + self._new_recipient = ClientGUICommon.AutoCompleteDropdownContacts( self, self._compose_key, self._contact_from ) + + self._recipients_visible = wx.CheckBox( self ) + self._recipients_visible.SetValue( recipients_visible ) + self._recipients_visible.Bind( wx.EVT_CHECKBOX, self.EventChanged ) + + + if body == '': xml = '' + else: ( xml, html ) = yaml.safe_load( body ) + + self._body = DraftBodyPanel( self, xml ) + self.Bind( wx.richtext.EVT_RICHTEXT_STYLE_CHANGED, self.EventChanged ) + self.Bind( wx.richtext.EVT_RICHTEXT_CHARACTER, self.EventChanged ) + self.Bind( wx.richtext.EVT_RICHTEXT_RETURN, self.EventChanged ) + self.Bind( wx.richtext.EVT_RICHTEXT_DELETE, self.EventChanged ) + + self._attachments = wx.TextCtrl( self, value = os.linesep.join( [ hash.encode( 'hex' ) for hash in attachment_hashes ] ), style = wx.TE_MULTILINE ) + self._attachments.Bind( wx.EVT_KEY_DOWN, self.EventChanged ) + # do thumbnails later! for now, do a listbox or whatever + + self._send = wx.Button( self, label = 'send' ) + self._send.Bind( wx.EVT_BUTTON, self.EventSend ) + self._send.SetForegroundColour( ( 0, 128, 0 ) ) + if len( contacts_to ) == 0: self._send.Disable() + + self._delete_draft = wx.Button( self, label = 'delete' ) + self._delete_draft.Bind( wx.EVT_BUTTON, self.EventDeleteDraft ) + self._delete_draft.SetForegroundColour( ( 128, 0, 0 ) ) + + self._save_draft = wx.Button( self, label = 'save' ) + self._save_draft.Bind( wx.EVT_BUTTON, self.EventSaveDraft ) + + if is_new: + + self._draft_changed = True + self._delete_draft.SetLabel( 'discard' ) + + else: + + self._draft_changed = False + self._save_draft.SetLabel( 'saved' ) + self._save_draft.Disable() + + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._from, FLAGS_EXPAND_PERPENDICULAR ) + + if not self._draft_message.IsReply(): + + recipients_hbox = wx.BoxSizer( wx.HORIZONTAL ) + recipients_hbox.AddF( wx.StaticText( self, label = 'recipients can see each other' ), FLAGS_MIXED ) + recipients_hbox.AddF( self._recipients_visible, FLAGS_MIXED ) + + to_vbox = wx.BoxSizer( wx.VERTICAL ) + + to_vbox.AddF( wx.StaticText( self, label = '- to -' ), FLAGS_SMALL_INDENT ) + to_vbox.AddF( self._recipients_list, FLAGS_EXPAND_PERPENDICULAR ) + to_vbox.AddF( self._new_recipient, FLAGS_LONE_BUTTON ) + to_vbox.AddF( recipients_hbox, FLAGS_BUTTON_SIZERS ) + + vbox.AddF( to_vbox, FLAGS_EXPAND_PERPENDICULAR ) + + subject_hbox = wx.BoxSizer( wx.HORIZONTAL ) + + subject_hbox.AddF( wx.StaticText( self, label = 'subject: ' ), FLAGS_MIXED ) + subject_hbox.AddF( self._subject, FLAGS_EXPAND_BOTH_WAYS ) + + vbox.AddF( subject_hbox, FLAGS_EXPAND_PERPENDICULAR ) + + + vbox.AddF( self._body, FLAGS_EXPAND_BOTH_WAYS ) + #vbox.AddF( wx.StaticText( self, label = 'attachment hashes:' ), FLAGS_MIXED ) + #vbox.AddF( self._attachments, FLAGS_EXPAND_PERPENDICULAR ) + self._attachments.Hide() + button_hbox = wx.BoxSizer( wx.HORIZONTAL ) + button_hbox.AddF( self._send, FLAGS_MIXED ) + button_hbox.AddF( self._delete_draft, FLAGS_MIXED ) + button_hbox.AddF( self._save_draft, FLAGS_MIXED ) + + vbox.AddF( button_hbox, FLAGS_BUTTON_SIZERS ) + + self.SetSizer( vbox ) + + HC.pubsub.sub( self, 'AddContact', 'add_contact' ) + HC.pubsub.sub( self, 'DraftSaved', 'draft_saved' ) + + if not self._draft_message.IsReply(): wx.CallAfter( self._new_recipient.SetFocus ) + + + def _GetDraftMessage( self ): + + ( self._draft_key, self._conversation_key, subject, self._contact_from, contacts_to, recipients_visible, body, attachment_hashes ) = self._draft_message.GetInfo() + + if not self._draft_message.IsReply(): + + subject = self._subject.GetValue() + contacts_to = [ self._recipients_list.GetItemText( i ) for i in range( self._recipients_list.GetItemCount() ) ] + recipients_visible = self._recipients_visible.GetValue() + + + body = self._body.GetXMLHTML() + + try: + + raw_attachments = self._attachments.GetValue() + + attachment_hashes = [ hash.decode( 'hex' ) for hash in raw_attachments.split( os.linesep ) if hash != '' ] + + except: + + attachment_hashes = [] + + wx.MessageBox( 'Could not parse attachments!' ) + + + return ClientConstantsMessages.DraftMessage( self._draft_key, self._conversation_key, subject, self._contact_from, contacts_to, recipients_visible, body, attachment_hashes ) + + + def AddContact( self, compose_key, name ): + + if compose_key == self._compose_key: + + index = self._recipients_list.FindItem( -1, name ) + + if index == -1: self._recipients_list.Append( ( name, ) ) + else: self._recipients_list.DeleteItem( index ) + + self.EventChanged( None ) + + + + def DraftSaved( self, draft_key, draft_message ): + + if draft_key == self._draft_key: + + self._draft_changed = False + + self._save_draft.SetLabel( 'saved' ) + self._save_draft.Disable() + + self._delete_draft.SetLabel( 'delete' ) + + self._draft_message.Saved() + + + + def EventChanged( self, event ): + + if not self._draft_changed: + + self._draft_changed = True + + self._send.Enable() + self._save_draft.Enable() + + + if event is not None: event.Skip() + + + def EventDeleteDraft( self, event ): wx.GetApp().Write( 'delete_draft', self._draft_key ) + + def EventSend( self, event ): + + draft_message = self._GetDraftMessage() + + transport_messages = wx.GetApp().Read( 'transport_messages_from_draft', draft_message ) + + if self._contact_from.GetName() != 'Anonymous': + + try: + + my_message_depot = wx.GetApp().Read( 'service', self._contact_from ) + + connection = my_message_depot.GetConnection() + + my_public_key = self._contact_from.GetPublicKey() + my_contact_key = self._contact_from.GetContactKey() + + for transport_message in transport_messages: + + packaged_message = HydrusMessageHandling.PackageMessageForDelivery( transport_message, my_public_key ) + + connection.Post( 'message', contact_key = my_contact_key, message = packaged_message ) + + message_key = transport_message.GetMessageKey() + + status_updates = [] + + for contact_to in transport_message.GetContactsTo(): + + contact_to_key = contact_to.GetContactKey() + + status_key = hashlib.sha256( contact_to_key + message_key ).digest() + + status = HydrusMessageHandling.PackageStatusForDelivery( ( message_key, contact_to_key, 'pending' ), my_public_key ) + + status_updates.append( ( status_key, status ) ) + + + connection.Post( 'message_statuses', contact_key = my_contact_key, statuses = status_updates ) + + + except: + + wx.MessageBox( 'The hydrus client could not connect to your message depot, so the message could not be sent!' ) + print( traceback.format_exc() ) + + return + + + + for transport_message in transport_messages: wx.GetApp().Write( 'message', transport_message, forced_status = 'pending' ) + + draft_key = draft_message.GetDraftKey() + + wx.GetApp().Write( 'delete_draft', draft_key ) + + + def EventSaveDraft( self, event ): + + draft_message = self._GetDraftMessage() + + wx.GetApp().Write( 'draft_message', draft_message ) + + + def EventRemove( self, event ): + + selection = self._recipients_list.GetFirstSelected() + + if selection != wx.NOT_FOUND: + + self._recipients_list.DeleteItem( selection ) + + self.EventChanged( None ) + + + + def GetConversationKey( self ): return self._conversation_key + + def GetDraftKey( self ): return self._draft_key + +class MessageHTML( wx.html.HtmlWindow ): + + def __init__( self, *args, **kwargs ): + + kwargs[ 'style' ] = wx.html.HW_SCROLLBAR_NEVER + + wx.html.HtmlWindow.__init__( self, *args, **kwargs ) + + self.Bind( wx.EVT_MOUSEWHEEL, self.EventScroll ) + + self.SetRelatedFrame( wx.GetTopLevelParent( self ), '%s' ) + self.SetRelatedStatusBar( 0 ) + + + def EventScroll( self, event ): + + sw = self.GetParent().GetParent() + + sw.GetEventHandler().ProcessEvent( event ) + + + def GetClientSize( self ): return self.GetSize() + + def OnLinkClicked( self, link ): webbrowser.open( link.GetHref() ) + + def OnOpeningURL( self, type, url, redirect ): return wx.html.HTML_BLOCK + +class MessagePanel( wx.Panel ): + + def __init__( self, parent, message, identity ): + + wx.Panel.__init__( self, parent ) + + self.SetBackgroundColour( CC.COLOUR_MESSAGE ) + + self._message = message + self._identity = identity + + vbox = wx.BoxSizer( wx.VERTICAL ) + + contact_from = self._message.GetContactFrom() + + if contact_from is None: name = 'Anonymous' + else: name = self._message.GetContactFrom().GetName() + + #vbox.AddF( wx.StaticText( self, label = name + ', ' + HC.ConvertTimestampToPrettyAgo( self._message.GetTimestamp() ) ), FLAGS_EXPAND_PERPENDICULAR ) + vbox.AddF( ClientGUICommon.AnimatedStaticTextTimestamp( self, name + ', ', HC.ConvertTimestampToPrettyAgo, self._message.GetTimestamp(), '' ), FLAGS_EXPAND_PERPENDICULAR ) + + body = self._message.GetBody() + + display_body = not ( body is None or body == '' ) + + if display_body: + + self._body_panel = wx.Panel( self ) + + wx.CallAfter( self.SetBody, body ) + + else: self._body_panel = wx.StaticText( self, label = 'no body' ) + + self._message_key = self._message.GetMessageKey() + destinations = self._message.GetDestinations() + + self._destinations_panel = DestinationsPanel( self, self._message_key, destinations, identity ) + + self._hbox = wx.BoxSizer( wx.HORIZONTAL ) + + self._hbox.AddF( self._body_panel, FLAGS_EXPAND_BOTH_WAYS ) + self._hbox.AddF( self._destinations_panel, FLAGS_EXPAND_PERPENDICULAR ) + + vbox.AddF( self._hbox, FLAGS_EXPAND_BOTH_WAYS ) + + # vbox.AddF( some kind of attachment window! ) + + self.SetSizer( vbox ) + + + def SetBody( self, body ): + + with wx.FrozenWindow( self ): + + ( width, height ) = self._body_panel.GetClientSize() + + body_panel = MessageHTML( self, size = ( width, -1 ) ) + body_panel.SetPage( body ) + + internal = body_panel.GetInternalRepresentation() + + body_panel.SetSize( ( -1, internal.GetHeight() ) ) + + self._hbox.Replace( self._body_panel, body_panel ) + + self._body_panel.Destroy() + + self._body_panel = body_panel + + + self.Layout() + self.GetParent().FitInside() + + + \ No newline at end of file diff --git a/include/ClientGUIMixins.py b/include/ClientGUIMixins.py new file mode 100755 index 00000000..e6228b1a --- /dev/null +++ b/include/ClientGUIMixins.py @@ -0,0 +1,628 @@ +import collections +import ClientConstants as CC +import HydrusConstants as HC +import random +import time +import traceback +import wx + +class Media(): + + def __init__( self ): pass + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __ne__( self, other ): return self.__hash__() != other.__hash__() + +class MediaList(): + + def __init__( self, file_service_identifier, predicates, file_query_result ): + + self._file_service_identifier = file_service_identifier + self._predicates = predicates + + self._file_query_result = file_query_result + + self._sorted_media = [ self._GenerateMediaSingleton( media_result ) for media_result in file_query_result ] + self._sorted_media_to_indices = { media : index for ( index, media ) in enumerate( self._sorted_media ) } + + self._singleton_media = set( self._sorted_media ) + self._collected_media = set() + + + def _GenerateMediaCollection( self, media_results ): return MediaCollection( self._file_service_identifier, self._predicates, media_results ) + + def _GenerateMediaSingleton( self, media_result ): return MediaSingleton( media_result ) + + def _GetFirst( self ): return self._sorted_media[ 0 ] + + def _GetLast( self ): return self._sorted_media[ -1 ] + + def _GetMedia( self, hashes, discriminator = None ): + + if discriminator is None: medias = self._sorted_media + elif discriminator == 'singletons': medias = self._singleton_media + elif discriminator == 'collections': medias = self._collected_media + + return [ media for media in medias if not hashes.isdisjoint( media.GetHashes() ) ] + + + def _GetNext( self, media ): + + if media is None: return None + + next_index = self._sorted_media_to_indices[ media ] + 1 + + if next_index == len( self._sorted_media ): return self._GetFirst() + else: return self._sorted_media[ next_index ] + + + def _GetPrevious( self, media ): + + if media is None: return None + + previous_index = self._sorted_media_to_indices[ media ] - 1 + + if previous_index == -1: return self._GetLast() + else: return self._sorted_media[ previous_index ] + + + def _RemoveMedia( self, singleton_media, collected_media ): + + self._singleton_media.difference_update( singleton_media ) + self._collected_media.difference_update( collected_media ) + + self._sorted_media = [ media for media in self._sorted_media if media in self._singleton_media or media in self._collected_media ] + self._sorted_media_to_indices = { media : index for ( index, media ) in enumerate( self._sorted_media ) } + + + def AddMediaResult( self, media_result ): + + self._file_query_result.AddMediaResult( media_result ) + + hash = media_result.GetHash() + + if hash in self._GetHashes(): return + + media = self._GenerateMediaSingleton( media_result ) + + # turn this little bit into a medialist call, yo + # but be careful of media vs media_result + self._singleton_media.add( media ) + self._sorted_media.append( media ) + self._sorted_media_to_indices[ media ] = len( self._sorted_media ) - 1 + + return media + + + def Collect( self, collect_by ): + + try: + + for media in self._collected_media: self._singleton_media.update( [ self._GenerateMediaSingleton( media_result ) for media_result in media.GenerateMediaResults() ] ) + + self._collected_media = set() + + if collect_by is not None: + + singletons = set() + + keys_to_multiples_media = collections.defaultdict( list ) + + for media in self._singleton_media: + + key = media.GetTags().GetNamespaceSlice( collect_by ) + + keys_to_multiples_media[ key ].append( media ) + + + self._singleton_media = singletons + self._collected_media = set( [ self._GenerateMediaCollection( [ media.GetMediaResult() for media in multiples_media ] ) for multiples_media in keys_to_multiples_media.values() ] ) + + + self._sorted_media = list( self._singleton_media ) + list( self._collected_media ) + + except: wx.MessageBox( traceback.format_exc() ) + + + def DeletePending( self, service_identifier ): + + for media in self._collected_media: media.DeletePending( service_identifier ) + + + def GenerateMediaResults( self, discriminant = None, selected_media = None, unrated = None ): + + media_results = [] + + for media in self._sorted_media: + + if selected_media is not None and media not in selected_media: continue + + if media.IsCollection(): media_results.extend( media.GenerateMediaResults( discriminant ) ) + else: + + if discriminant is not None: + if ( discriminant == CC.DISCRIMINANT_INBOX and not media.HasInbox() ) or ( discriminant == CC.DISCRIMINANT_LOCAL and not media.GetFileServiceIdentifiersCDPP().HasLocal() ) or ( discriminant == CC.DISCRIMINANT_NOT_LOCAL and media.GetFileServiceIdentifiersCDPP().HasLocal() ): continue + + if unrated is not None: + + ( local_ratings, remote_ratings ) = media.GetRatings() + + if local_ratings.GetRating( unrated ) is not None: continue + + + media_results.append( media.GetMediaResult() ) + + + + return media_results + + + def GetMediaIndex( self, media ): return self._sorted_media_to_indices[ media ] + + def GetSortedMedia( self ): return self._sorted_media + + def HasMedia( self, media ): + + if media is None: return False + + if media in self._singleton_media: return True + elif media in self._collected_media: return True + else: + + for media_collection in self._collected_media: + + if media_collection.HasMedia( media ): return True + + + + return False + + + def HasNoMedia( self ): return len( self._sorted_media ) == 0 + + def ProcessContentUpdate( self, content_update ): + + action = content_update.GetAction() + + service_identifier = content_update.GetServiceIdentifier() + + hashes = content_update.GetHashes() + + for media in self._GetMedia( hashes, 'collections' ): media.ProcessContentUpdate( content_update ) + + if action == CC.CONTENT_UPDATE_ARCHIVE: + + if 'system:inbox' in self._predicates: + + affected_singleton_media = self._GetMedia( hashes, 'singletons' ) + affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ] + + self._RemoveMedia( affected_singleton_media, affected_collected_media ) + + + elif action == CC.CONTENT_UPDATE_DELETE and service_identifier == self._file_service_identifier: + + affected_singleton_media = self._GetMedia( hashes, 'singletons' ) + affected_collected_media = [ media for media in self._collected_media if media.HasNoMedia() ] + + self._RemoveMedia( affected_singleton_media, affected_collected_media ) + + + + def ProcessContentUpdates( self, content_updates ): + + for content_update in content_updates: self.ProcessContentUpdate( content_update ) + + + def ProcessServiceUpdate( self, update ): + + action = update.GetAction() + + service_identifier = update.GetServiceIdentifier() + + if action == CC.SERVICE_UPDATE_DELETE_PENDING: self.DeletePending( service_identifier ) + elif action == CC.SERVICE_UPDATE_RESET: self.ResetService( service_identifier ) + + + def ResetService( self, service_identifier ): + + if service_identifier == self._file_service_identifier: self._RemoveMedia( self._singleton_media, self._collected_media ) + else: + + for media in self._collected_media: media.ResetService( service_identifier ) + + + + def Sort( self, sort_by ): + + ( sort_by_type, sort_by_data ) = sort_by + + if sort_by_type == 'system': + + if sort_by_data == CC.SORT_BY_RANDOM: random.shuffle( self._sorted_media ) + else: + + if sort_by_data == CC.SORT_BY_SMALLEST: compare_function = lambda x, y: cmp( x.GetSize(), y.GetSize() ) + elif sort_by_data == CC.SORT_BY_LARGEST: compare_function = lambda x, y: cmp( y.GetSize(), x.GetSize() ) + elif sort_by_data == CC.SORT_BY_SHORTEST: compare_function = lambda x, y: cmp( x.GetDuration(), y.GetDuration() ) + elif sort_by_data == CC.SORT_BY_LONGEST: compare_function = lambda x, y: cmp( y.GetDuration(), x.GetDuration() ) + elif sort_by_data == CC.SORT_BY_OLDEST: compare_function = lambda x, y: cmp( x.GetTimestamp(), y.GetTimestamp() ) + elif sort_by_data == CC.SORT_BY_NEWEST: compare_function = lambda x, y: cmp( y.GetTimestamp(), x.GetTimestamp() ) + elif sort_by_data == CC.SORT_BY_MIME: compare_function = lambda x, y: cmp( x.GetMime(), y.GetMime() ) + + self._sorted_media.sort( compare_function ) + + + elif sort_by_type == 'namespaces': + + def namespace_compare( x, y ): + + x_tags = x.GetTags() + y_tags = y.GetTags() + + for namespace in sort_by_data: + + x_namespace_slice = x_tags.GetNamespaceSlice( ( namespace, ) ) + y_namespace_slice = y_tags.GetNamespaceSlice( ( namespace, ) ) + + if x_namespace_slice == y_namespace_slice: continue # this covers len == 0 for both, too + else: + + if len( x_namespace_slice ) == 1 and len( y_namespace_slice ) == 1: + + #convert from frozenset to tuple to extract the single member, then get the t from the n:t concat. + x_value = tuple( x_namespace_slice )[0].split( ':', 1 )[1] + y_value = tuple( y_namespace_slice )[0].split( ':', 1 )[1] + + try: return cmp( int( x_value ), int( y_value ) ) + except: return cmp( x_value, y_value ) + + elif len( x_namespace_slice ) == 0: return 1 # I'm sure the 1 and -1 should be the other way around, but that seems to be a wrong thought + elif len( y_namespace_slice ) == 0: return -1 # any membership has precedence over non-membership, right? I'm understanding it wrong, clearly. + else: + + # compare the earliest/smallest/lexicographically-first non-common values + + x_list = list( x_namespace_slice ) + + x_list.sort() + + for x_value in x_list: + + if x_value not in y_namespace_slice: + + x_value = x_value.split( ':', 1 )[1] + y_value = min( y_namespace_slice ).split( ':', 1 )[1] + + try: return cmp( int( x_value ), int( y_value ) ) + except: return cmp( x_value, y_value ) + + + + + + + return cmp( x.GetSize(), y.GetSize() ) + + + self._sorted_media.sort( namespace_compare ) + + elif sort_by_type in ( 'rating_descend', 'rating_ascend' ): + + service_identifier = sort_by_data + + service_type = service_identifier.GetType() + + def ratings_compare( x, y ): + + ( x_local_ratings, x_remote_ratings ) = x.GetRatings() + ( y_local_ratings, y_remote_ratings ) = y.GetRatings() + + # btw None is always considered less than an int in cmp( int, None ) + + if service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ): return cmp( x_local_ratings.GetRating( service_identifier ), y_local_ratings.GetRating( service_identifier ) ) + else: return cmp( x_remote_ratings.GetScore( service_identifier ), y_remote_ratings.GetScore( service_identifier ) ) + + + reverse = sort_by_type == 'rating_descend' + self._sorted_media.sort( ratings_compare, reverse = reverse ) + + + for media in self._collected_media: media.Sort( sort_by ) + + self._sorted_media_to_indices = { media : index for ( index, media ) in enumerate( self._sorted_media ) } + + +class ListeningMediaList( MediaList ): + + def __init__( self, *args ): + + MediaList.__init__( self, *args ) + + HC.pubsub.sub( self, 'ProcessContentUpdates', 'content_updates_gui' ) + HC.pubsub.sub( self, 'ProcessServiceUpdate', 'service_update_gui' ) + + +class MediaCollection( MediaList, Media ): + + def __init__( self, file_service_identifier, predicates, file_query_result ): + + Media.__init__( self ) + MediaList.__init__( self, file_service_identifier, predicates, file_query_result ) + + self._hashes = set() + + self._inbox = False + + self._size = 0 + self._size_definite = True + + self._timestamp = 0 + + self._width = None + self._height = None + self._duration = None + self._num_frames = None + self._num_words = None + self._tags = None + self._file_service_identifiers = None + + self._RecalcInternals() + + + def __hash__( self ): return frozenset( self._hashes ).__hash__() + + def _RecalcInternals( self ): + + self._hashes = HC.IntelligentMassUnion( [ media.GetHashes() for media in self._sorted_media ] ) + + self._inbox = True in ( media.HasInbox() for media in self._sorted_media ) + + self._size = sum( [ media.GetSize() for media in self._sorted_media ] ) + self._size_definite = not False in ( media.IsSizeDefinite() for media in self._sorted_media ) + + if len( self._sorted_media ) == 0: self._timestamp = 0 + else: self._timestamp = max( [ media.GetTimestamp() for media in self._sorted_media ] ) + + duration_sum = sum( [ media.GetDuration() for media in self._sorted_media if media.HasDuration() ] ) + + if duration_sum > 0: self._duration = duration_sum + else: self._duration = None + + # better-but-still-pretty-horrible code starts here + + # remember: the only time a collection is asked for its tags is by thumbnail.getbmp(), to draw series and page info + # until I make SVCP more complicated, it mostly needs only be a quick and ugly intersection + + all_tags_cdpp = [ m.GetTags().GetServiceIdentifiersToCDPP() for m in self._sorted_media ] + + combined_tags = collections.defaultdict( list ) + + for tags_cdpp in all_tags_cdpp: + + for ( service_identifier, cdpp ) in tags_cdpp.items(): combined_tags[ service_identifier ].append( cdpp ) + + + final_tags = {} + + for ( service_identifier, cdpps ) in combined_tags.items(): + + current = list( HC.IntelligentMassIntersect( ( c for ( c, d, p, pet ) in cdpps ) ) ) + deleted = [] + pending = list( HC.IntelligentMassIntersect( ( p for ( c, d, p, pet ) in cdpps ) ) ) + petitioned = [] + + final_tags[ service_identifier ] = ( current, deleted, pending, petitioned ) + + + self._tags = CC.CDPPTagServiceIdentifiers( wx.GetApp().Read( 'tag_service_precedence' ), final_tags ) + + # END OF HORRIBLE CODE + + all_file_service_identifiers = [ media.GetFileServiceIdentifiersCDPP() for media in self._sorted_media ] + + current = HC.IntelligentMassIntersect( [ file_service_identifiers.GetCurrent() for file_service_identifiers in all_file_service_identifiers ] ) + deleted = HC.IntelligentMassIntersect( [ file_service_identifiers.GetDeleted() for file_service_identifiers in all_file_service_identifiers ] ) + pending = HC.IntelligentMassIntersect( [ file_service_identifiers.GetPending() for file_service_identifiers in all_file_service_identifiers ] ) + petitioned = HC.IntelligentMassIntersect( [ file_service_identifiers.GetPetitioned() for file_service_identifiers in all_file_service_identifiers ] ) + + self._file_service_identifiers = CC.CDPPFileServiceIdentifiers( current, deleted, pending, petitioned ) + + + def DeletePending( self, service_identifier ): + + MediaList.DeletePending( self, service_identifier ) + + self._RecalcInternals() + + + def GetDisplayMedia( self ): return self._GetFirst().GetDisplayMedia() + + def GetDuration( self ): return self._duration + + def GetHashes( self, discriminant = None, not_uploaded_to = None ): + + if discriminant is None and not_uploaded_to is None: return self._hashes + else: return HC.IntelligentMassUnion( [ media.GetHashes( discriminant, not_uploaded_to ) for media in self._sorted_media ] ) + + + def GetHashes( self, discriminant = None, not_uploaded_to = None ): + + if discriminant is not None: + if ( discriminant == CC.DISCRIMINANT_INBOX and not self._inbox ) or ( discriminant == CC.DISCRIMINANT_LOCAL and not self.GetFileServiceIdentifiersCDPP().HasLocal() ) or ( discriminant == CC.DISCRIMINANT_NOT_LOCAL and self.GetFileServiceIdentifiersCDPP().HasLocal() ): return set() + + if not_uploaded_to is not None: + if not_uploaded_to in self._file_service_identifiers.GetCurrentRemote(): return set() + + return self._hashes + + + def GetFileServiceIdentifiersCDPP( self ): return self._file_service_identifiers + + def GetMime( self ): return HC.APPLICATION_HYDRUS_CLIENT_COLLECTION + + def GetNumFiles( self ): return len( self._hashes ) + + def GetNumFrames( self ): return sum( [ media.GetNumFrames() for media in self._sorted_media ] ) + + def GetPrettyAge( self ): return HC.ConvertTimestampToPrettyAge( self._timestamp ) + + def GetPrettyInfo( self ): + + size = HC.ConvertIntToBytes( self._size ) + + mime = HC.mime_string_lookup[ HC.APPLICATION_HYDRUS_CLIENT_COLLECTION ] + + info_string = size + ' ' + mime + + info_string += ' (' + HC.ConvertIntToPrettyString( self.GetNumFiles() ) + ' files)' + + return info_string + + + def GetResolution( self ): return ( self._width, self._height ) + + def GetSingletonsTags( self ): + + all_tags = [ m.GetTags() for m in self._singleton_media ] + + for m in self._collected_media: all_tags.extend( m.GetSingletonsTags() ) + + return all_tags + + + def GetSize( self ): return self._size + + def GetTags( self ): return self._tags + + def GetTimestamp( self ): return self._timestamp + + def HasDuration( self ): return self._duration is not None + + def HasImages( self ): return True in ( media.HasImages() for media in self._collected_media | self._singleton_media ) + + def HasInbox( self ): return self._inbox + + def IsCollection( self ): return True + + def IsImage( self ): return HC.IsImage( self._mime ) + + def IsNoisy( self ): return self.GetDisplayMedia().GetMime() in HC.NOISY_MIMES + + def IsSizeDefinite( self ): return self._size_definite + + def ProcessContentUpdate( self, content_update ): + + MediaList.ProcessContentUpdate( self, content_update ) + + self._RecalcInternals() + + + def ResetService( self, service_identifier ): + + MediaList.ResetService( self, service_identifier ) + + self._RecalcInternals() + + +class MediaSingleton( Media ): + + def __init__( self, media_result ): + + Media.__init__( self ) + + self._media_result = media_result + + + def __hash__( self ): return self.GetHash().__hash__() + + def GetDisplayMedia( self ): return self + + def GetDuration( self ): return self._media_result.GetDuration() + + def GetHash( self ): return self._media_result.GetHash() + + def GetHashes( self, discriminant = None, not_uploaded_to = None ): + + inbox = self._media_result.GetInbox() + file_service_identifiers = self._media_result.GetFileServiceIdentifiersCDPP() + + if discriminant is not None: + if ( discriminant == CC.DISCRIMINANT_INBOX and not inbox ) or ( discriminant == CC.DISCRIMINANT_LOCAL and not file_service_identifiers.HasLocal() ) or ( discriminant == CC.DISCRIMINANT_NOT_LOCAL and file_service_identifiers.HasLocal() ): return set() + + if not_uploaded_to is not None: + if not_uploaded_to in file_service_identifiers.GetCurrentRemote(): return set() + + return set( [ self._media_result.GetHash() ] ) + + + def GetFileServiceIdentifiersCDPP( self ): return self._media_result.GetFileServiceIdentifiersCDPP() + + def GetMediaResult( self ): return self._media_result + + def GetMime( self ): return self._media_result.GetMime() + + def GetNumFiles( self ): return 1 + + def GetNumFrames( self ): return self._media_result.GetNumFrames() + + def GetTimestamp( self ): + + timestamp = self._media_result.GetTimestamp() + + if timestamp is None: return 0 + else: return timestamp + + + def GetPrettyAge( self ): return HC.ConvertTimestampToPrettyAge( self._media_result.GetTimestamp() ) + + def GetPrettyInfo( self ): + + ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags, file_service_identifiers, local_ratings, remote_ratings ) = self._media_result.GetInfo() + + info_string = HC.ConvertIntToBytes( size ) + ' ' + HC.mime_string_lookup[ mime ] + + if width is not None and height is not None: info_string += ' (' + HC.ConvertIntToPrettyString( width ) + 'x' + HC.ConvertIntToPrettyString( height ) + ')' + + if duration is not None: info_string += ', ' + HC.ConvertMillisecondsToPrettyTime( duration ) + + if num_frames is not None: info_string += ' (' + HC.ConvertIntToPrettyString( num_frames ) + ' frames)' + + return info_string + + + def GetRatings( self ): return self._media_result.GetRatings() + + def GetResolution( self ): + + ( width, height ) = self._media_result.GetResolution() + + if width is None: return ( 0, 0 ) + else: return ( width, height ) + + + def GetSize( self ): + + size = self._media_result.GetSize() + + if size is None: return 0 + else: return size + + + def GetTags( self ): return self._media_result.GetTags() + + def HasDuration( self ): return self._media_result.GetDuration() is not None + + def HasImages( self ): return self.IsImage() + + def HasInbox( self ): return self._media_result.GetInbox() + + def IsCollection( self ): return False + + def IsImage( self ): return HC.IsImage( self._media_result.GetMime() ) + + def IsNoisy( self ): return self.GetMime() in HC.NOISY_MIMES + + def IsSizeDefinite( self ): return self._media_result.GetSize() is not None + \ No newline at end of file diff --git a/include/ClientGUIPages.py b/include/ClientGUIPages.py new file mode 100755 index 00000000..29df788f --- /dev/null +++ b/include/ClientGUIPages.py @@ -0,0 +1,353 @@ +import HydrusConstants as HC +import ClientConstants as CC +import ClientGUICommon +import ClientGUIDialogs +import ClientGUIManagement +import ClientGUIMedia +import ClientGUIMessages +import ClientGUICanvas +import os +import time +import traceback +import wx + +# Sizer Flags + +FLAGS_NONE = wx.SizerFlags( 0 ) + +FLAGS_SMALL_INDENT = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ) + +FLAGS_EXPAND_PERPENDICULAR = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Expand() +FLAGS_EXPAND_BOTH_WAYS = wx.SizerFlags( 2 ).Border( wx.ALL, 2 ).Expand() + +FLAGS_BUTTON_SIZERS = wx.SizerFlags( 0 ).Align( wx.ALIGN_RIGHT ) +FLAGS_LONE_BUTTON = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_RIGHT ) + +FLAGS_MIXED = wx.SizerFlags( 0 ).Border( wx.ALL, 2 ).Align( wx.ALIGN_CENTER_VERTICAL ) + +class PageBase(): + + def __init__( self ): + + self._page_key = os.urandom( 32 ) + + self._pretty_status = '' + + self._options = wx.GetApp().Read( 'options' ) + + HC.pubsub.sub( self, 'SetPrettyStatus', 'new_page_status' ) + + + def GetPrettyStatus( self ): return self._pretty_status + + def GetSashPositions( self ): + + x = self._options[ 'hpos' ] + + y = self._options[ 'vpos' ] + + return ( x, y ) + + + def PageHidden( self ): HC.pubsub.pub( 'page_hidden', self._page_key ) + + def PageShown( self ): HC.pubsub.pub( 'page_shown', self._page_key ) + + def SetPrettyStatus( self, page_key, status ): + + if page_key == self._page_key: + + self._pretty_status = status + + HC.pubsub.pub( 'refresh_status' ) + + + + def RefreshQuery( self ): HC.pubsub.pub( 'refresh_query', self._page_key ) + + def SetMediaFocus( self ): pass + + def SetSearchFocus( self ): HC.pubsub.pub( 'set_search_focus', self._page_key ) + + def SetSynchronisedWait( self ): HC.pubsub.pub( 'synchronised_wait_switch', self._page_key ) + + def ShowHideSplit( self ): pass + + def TryToClose( self ): pass + +class PageLog( PageBase, wx.Panel ): + + def __init__( self, parent ): + + wx.Panel.__init__( self, parent ) + PageBase.__init__( self ) + + log = wx.GetApp().GetLog() + + self._log_list_ctrl = ClientGUICommon.SaneListCtrl( self, 480, [ ( 'type', 60 ), ( 'source', 180 ), ( 'message', -1 ), ( 'time', 120 ) ] ) + + for ( type, source, message, time ) in log: self._AddEntry( type, source, message, time ) + + vbox = wx.BoxSizer( wx.VERTICAL ) + + vbox.AddF( self._log_list_ctrl, FLAGS_EXPAND_BOTH_WAYS ) + + self.SetSizer( vbox ) + + HC.pubsub.sub( self, 'AddError', 'log_error' ) + HC.pubsub.sub( self, 'AddMessage', 'log_message' ) + + + def _AddEntry( self, type, source, message, time ): self._log_list_ctrl.Append( ( CC.log_string_lookup[ type ], source, message, HC.ConvertTimestampToPrettyTime( time ) ), ( CC.log_string_lookup[ type ], source, message, time ) ) + + def AddError( self, source, message ): + + # assuming we want to show errors right now + + self._AddEntry( CC.LOG_ERROR, source, message, time.time() ) + + + def AddMessage( self, source, message ): + + # assuming we want to show messages right now + + self._AddEntry( CC.LOG_MESSAGE, source, message, time.time() ) + + +class PageMessages( PageBase, wx.SplitterWindow ): + + def __init__( self, parent, identity ): + + wx.SplitterWindow.__init__( self, parent ) + PageBase.__init__( self ) + + self.SetMinimumPaneSize( 120 ) + self.SetSashGravity( 0.0 ) + + self._identity = identity + + self._search_preview_split = wx.SplitterWindow( self, style=wx.SP_NOBORDER ) + + self._search_preview_split.SetMinimumPaneSize( 180 ) + self._search_preview_split.SetSashGravity( 0.5 ) + + self._search_preview_split.Bind( wx.EVT_SPLITTER_DCLICK, self.EventPreviewUnsplit ) + + self._InitManagementPanel() + self._preview_panel = ClientGUICanvas.CanvasPanel( self._search_preview_split, self._page_key, CC.LOCAL_FILE_SERVICE_IDENTIFIER ) + self._InitMessagesPanel() + + self.SplitVertically( self._search_preview_split, self._messages_panel, self._options[ 'hpos' ] ) + wx.CallAfter( self._search_preview_split.SplitHorizontally, self._management_panel, self._preview_panel, self._options[ 'vpos' ] ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelMessages( self._search_preview_split, self._page_key, self._identity ) + + def _InitMessagesPanel( self ): self._messages_panel = ClientGUIMessages.ConversationSplitter( self, self._page_key, self._identity ) + + def EventPreviewUnsplit( self, event ): self._search_preview_split.Unsplit( self._preview_panel ) + + def GetSashPositions( self ): + + if self.IsSplit(): x = self.GetSashPosition() + else: x = self._options[ 'hpos' ] + + if self._search_preview_split.IsSplit(): y = -1 * self._preview_panel.GetSize()[1] + else: y = self._options[ 'vpos' ] + + return ( x, y ) + + + def ShowHideSplit( self ): + + if self._search_preview_split.IsSplit(): self._search_preview_split.Unsplit( self._preview_panel ) + else: self._search_preview_split.SplitHorizontally( self._management_panel, self._preview_panel, self._options[ 'vpos' ] ) + + + def TryToClose( self ): self._management_panel.TryToClose() + +class PageWithMedia( PageBase, wx.SplitterWindow ): + + def __init__( self, parent, file_service_identifier = CC.LOCAL_FILE_SERVICE_IDENTIFIER ): + + wx.SplitterWindow.__init__( self, parent ) + PageBase.__init__( self ) + + self._file_service_identifier = file_service_identifier + + self.SetMinimumPaneSize( 120 ) + self.SetSashGravity( 0.0 ) + + self.Bind( wx.EVT_SPLITTER_DCLICK, self.EventUnsplit ) + + self._search_preview_split = wx.SplitterWindow( self, style=wx.SP_NOBORDER ) + + self._search_preview_split.SetMinimumPaneSize( 180 ) + self._search_preview_split.SetSashGravity( 0.5 ) + + self._search_preview_split.Bind( wx.EVT_SPLITTER_DCLICK, self.EventPreviewUnsplit ) + + self._InitManagementPanel() + self._preview_panel = ClientGUICanvas.CanvasPanel( self._search_preview_split, self._page_key, self._file_service_identifier ) + self._InitMediaPanel() + + self.SplitVertically( self._search_preview_split, self._media_panel, self._options[ 'hpos' ] ) + wx.CallAfter( self._search_preview_split.SplitHorizontally, self._management_panel, self._preview_panel, self._options[ 'vpos' ] ) + + HC.pubsub.sub( self, 'SwapMediaPanel', 'swap_media_panel' ) + + + def EventPreviewUnsplit( self, event ): self._search_preview_split.Unsplit( self._preview_panel ) + + def EventUnsplit( self, event ): self.Unsplit( self._search_preview_split ) + + # used by autocomplete + def GetMedia( self ): return self._media_panel.GetSortedMedia() + + def GetSashPositions( self ): + + if self.IsSplit(): x = self.GetSashPosition() + else: x = self._options[ 'hpos' ] + + if self._search_preview_split.IsSplit(): y = -1 * self._preview_panel.GetSize()[1] + else: y = self._options[ 'vpos' ] + + return ( x, y ) + + + def ShowHideSplit( self ): + + if self.IsSplit(): + + self.Unsplit( self._search_preview_split ) + + else: + + self.SplitVertically( self._search_preview_split, self._media_panel, self._options[ 'hpos' ] ) + + self._search_preview_split.SplitHorizontally( self._management_panel, self._preview_panel, self._options[ 'vpos' ] ) + + + + def SetMediaFocus( self ): self._media_panel.SetFocus() + + def SwapMediaPanel( self, page_key, new_panel ): + + if page_key == self._page_key: + + self._preview_panel.SetMedia( None ) + + self.ReplaceWindow( self._media_panel, new_panel ) + + self._media_panel.Destroy() + + self._media_panel = new_panel + + + + def TryToClose( self ): self._management_panel.TryToClose() + +class PageImport( PageWithMedia ): + + def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, self._file_service_identifier, [], CC.FileQueryResult( self._file_service_identifier, [], [] ) ) + +class PageImportBooru( PageImport ): + + def __init__( self, parent, booru ): + + self._booru = booru + + PageImport.__init__( self, parent ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedBooru( self._search_preview_split, self, self._page_key, self._booru ) + +class PageImportDeviantArt( PageImport ): + + def __init__( self, parent ): + + PageImport.__init__( self, parent ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedDeviantArt( self._search_preview_split, self, self._page_key ) + +class PageImportHDD( PageImport ): + + def __init__( self, parent, paths, **kwargs ): + + self._paths = paths + self._kwargs = kwargs + + PageImport.__init__( self, parent ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportHDD( self._search_preview_split, self, self._page_key, self._paths, **self._kwargs ) + +class PageImportHentaiFoundryArtist( PageImport ): + + def __init__( self, parent ): + + PageImport.__init__( self, parent ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedHentaiFoundryArtist( self._search_preview_split, self, self._page_key ) + +class PageImportHentaiFoundryTags( PageImport ): + + def __init__( self, parent ): + + PageImport.__init__( self, parent ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueAdvancedHentaiFoundryTags( self._search_preview_split, self, self._page_key ) + +class PageImportThreadWatcher( PageImport ): + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportThreadWatcher( self._search_preview_split, self, self._page_key ) + +class PageImportURL( PageImport ): + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelImportWithQueueURL( self._search_preview_split, self, self._page_key ) + +class PagePetitions( PageWithMedia ): + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelPetitions( self._search_preview_split, self, self._page_key, self._file_service_identifier ) + + def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelNoQuery( self, self._page_key, self._file_service_identifier ) + +class PageQuery( PageWithMedia ): + + def __init__( self, parent, file_service_identifier, initial_predicates = [] ): + + self._initial_predicates = initial_predicates + + PageWithMedia.__init__( self, parent, file_service_identifier ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelQuery( self._search_preview_split, self, self._page_key, self._file_service_identifier, initial_predicates = self._initial_predicates ) + + def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelNoQuery( self, self._page_key, self._file_service_identifier ) + +class PageThreadDumper( PageWithMedia ): + + def __init__( self, parent, imageboard, hashes ): + + self._imageboard = imageboard + + search_context = CC.FileSearchContext() + + self._unsorted_file_query_result = wx.GetApp().Read( 'media_results', search_context, hashes ) + + hashes_to_media_results = { media_result.GetHash() : media_result for media_result in self._unsorted_file_query_result } + + self._media_results = [ hashes_to_media_results[ hash ] for hash in hashes ] + + self._media_results = filter( self._imageboard.IsOkToPost, self._media_results ) + + PageWithMedia.__init__( self, parent, CC.LOCAL_FILE_SERVICE_IDENTIFIER ) + + + def _InitManagementPanel( self ): self._management_panel = ClientGUIManagement.ManagementPanelDumper( self._search_preview_split, self, self._page_key, self._imageboard, self._media_results ) + + def _InitMediaPanel( self ): self._media_panel = ClientGUIMedia.MediaPanelThumbnails( self, self._page_key, CC.LOCAL_FILE_SERVICE_IDENTIFIER, [], self._media_results ) + \ No newline at end of file diff --git a/include/ClientParsers.py b/include/ClientParsers.py new file mode 100755 index 00000000..d4aeac4e --- /dev/null +++ b/include/ClientParsers.py @@ -0,0 +1,265 @@ +import bs4 +import lxml +import traceback +import urlparse + +def Parse4chanPostScreen( html ): + + soup = bs4.BeautifulSoup( html ) + + title_tag = soup.find( 'title' ) + + if title_tag.string == 'Post successful!': return ( 'success', None ) + else: + + try: + + problem_tag = soup.find( id = 'errmsg' ) + + if problem_tag is None: + + try: print( soup ) + except: pass + + return ( 'error', 'unknown problem, writing 4chan html to log' ) + + + problem = str( problem_tag ) + + if 'CAPTCHA' in problem: return ( 'captcha', None ) + elif 'seconds' in problem: return ( 'too quick', None ) + elif 'Duplicate' in problem: return ( 'error', 'duplicate file detected' ) + else: return ( 'error', problem ) + + except: return ( 'error', 'unknown error' ) + + +def ParseBooruGallery( html, url_base, thumb_classname ): + + urls_set = set() + urls = [] + + soup = bs4.BeautifulSoup( html ) + + thumbnails = soup.find_all( class_ = thumb_classname ) + + for thumbnail in thumbnails: + + links = thumbnail.find_all( 'a' ) + + if thumbnail.name == 'a': links.append( thumbnail ) + + for link in links: + + if link.string is not None and link.string == 'Image Only': continue # rule 34 @ paheal fix + + url = link[ 'href' ] + + url = urlparse.urljoin( url_base, url ) + + if url not in urls_set: + + urls_set.add( url ) + urls.append( url ) + + + + + return urls + +def ParseBooruPage( html, url_base, tag_classnames_to_namespaces, image_id = None, image_data = None ): + + soup = bs4.BeautifulSoup( html ) + + image_base = None + + if image_id is not None: + + image = soup.find( id = image_id ) + + image_url = image[ 'src' ] + + + if image_data is not None: + + links = soup.find_all( 'a' ) + + for link in links: + + if link.string == image_data: image_url = link[ 'href' ] + + + + image_url = urlparse.urljoin( url_base, image_url ) + + image_url = image_url.replace( 'sample/sample-', '' ) # fix for danbooru resizing + + tags = [] + + for ( tag_classname, namespace ) in tag_classnames_to_namespaces.items(): + + tag_list_entries = soup.find_all( class_ = tag_classname ) + + for tag_list_entry in tag_list_entries: + + links = tag_list_entry.find_all( 'a' ) + + if tag_list_entry.name == 'a': links.append( tag_list_entry ) + + for link in links: + + if link.string not in ( '?', '-', '+' ): + + if namespace == '': tags.append( link.string ) + else: tags.append( namespace + ':' + link.string ) + + + + + + return ( image_url, tags ) + +def ParseDeviantArtGallery( html ): + + results = [] + + soup = bs4.BeautifulSoup( html ) + + thumbs_container = soup.find( class_ = 'stream stream-fh' ) + + def starts_with_thumb( classname ): return classname is not None and classname.startswith( 'thumb' ) + + links = thumbs_container.find_all( 'a', class_ = starts_with_thumb ) + + for link in links: + + page_url = link[ 'href' ] # something in the form of blah.da.com/art/blah-123456 + + page_url_split = page_url.split( '-' ) + + deviant_art_file_id = page_url_split[-1 ] + + image_url = 'http://www.deviantart.com/download/' + deviant_art_file_id + '/' # trailing slash is important + + raw_title = link[ 'title' ] # something in the form sweet dolls by ~AngeniaC, Feb 29, 2012 in Artisan Crafts > Miniatures > Jewelry + + tags = [] + + ( title, raw_title ) = raw_title.split( ' by ~', 1 ) + + ( creator, raw_title ) = raw_title.split( ', ', 1 ) + + ( date_gumpf, raw_category_tags ) = raw_title.split( ' in ', 1 ) + + category_tags = raw_category_tags.split( ' > ' ) + + tags = [] + + tags.append( 'title:' + title ) + tags.append( 'creator:' + creator ) + tags.extend( category_tags ) + + results.append( ( image_url, tags ) ) + + + return results + +def ParsePage( html, url ): + + soup = bs4.BeautifulSoup( html ) + + all_links = soup.find_all( 'a' ) + + links_with_images = [ link for link in all_links if len( link.find_all( 'img' ) ) > 0 ] + + urls = [ urlparse.urljoin( url, link[ 'href' ] ) for link in links_with_images ] + + # old version included (images that don't have a link wrapped around them)'s src + + return urls + +def ParseHentaiFoundryGallery( html ): + + urls_set = set() + + soup = bs4.BeautifulSoup( html ) + + def correct_url( href ): + + # a good url is in the form "/pictures/user/artist_name/file_id/title" + + if href.count( '/' ) == 5 and href.startswith( '/pictures/user/' ): + + ( nothing, pictures, user, artist_name, file_id, title ) = href.split( '/' ) + + # /pictures/user/artist_name/page/3 + if file_id != 'page': return True + + + return False + + + links = soup.find_all( 'a', href = correct_url ) + + urls = [ 'http://www.hentai-foundry.com' + link['href'] for link in links ] + + result_urls = [] + + for url in urls: + + if url not in urls_set: + + urls_set.add( url ) + + result_urls.append( url ) + + + + return result_urls + +def ParseHentaiFoundryPage( html ): + + # can't parse this easily normally because HF is a pain with the preview->click to see full size business. + # find http://pictures.hentai-foundry.com// + # then extend it to http://pictures.hentai-foundry.com//k/KABOS/172144.jpg + # the .jpg bit is what we really need, but whatever + try: + + index = html.index( 'http://pictures.hentai-foundry.com//' ) + + stuff = html[ index : index + 100 ] + + try: ( image_url, gumpf ) = stuff.split( '"', 1 ) + except: ( image_url, gumpf ) = stuff.split( ''', 1 ) + + except: raise Exception( 'Could not parse image url!' ) + + soup = bs4.BeautifulSoup( html ) + + tags = [] + + try: + + title = soup.find( 'title' ) + + ( data, nothing ) = unicode( title.string ).split( ' - Hentai Foundry' ) + + data_reversed = data[::-1] # want to do it right-side first, because title might have ' by ' in it + + ( artist_reversed, title_reversed ) = data_reversed.split( ' yb ' ) + + artist = artist_reversed[::-1] + + title = title_reversed[::-1] + + tags.append( 'creator:' + artist ) + tags.append( 'title:' + title ) + + except: pass + + tag_links = soup.find_all( 'a', rel = 'tag' ) + + for tag_link in tag_links: tags.append( tag_link.string ) + + return ( image_url, tags ) + \ No newline at end of file diff --git a/include/HydrusConstants.py b/include/HydrusConstants.py new file mode 100755 index 00000000..b841d538 --- /dev/null +++ b/include/HydrusConstants.py @@ -0,0 +1,1653 @@ +import collections +import HydrusPubSub +import locale +import os +import Queue +import re +import sqlite3 +import sys +import threading +import time +import traceback +import wx +import yaml + +locale.setlocale( locale.LC_ALL, '' ) + +BASE_DIR = sys.path[0] + +DB_DIR = BASE_DIR + os.path.sep + 'db' +CLIENT_FILES_DIR = DB_DIR + os.path.sep + 'client_files' +SERVER_FILES_DIR = DB_DIR + os.path.sep + 'server_files' +CLIENT_THUMBNAILS_DIR = DB_DIR + os.path.sep + 'client_thumbnails' +SERVER_THUMBNAILS_DIR = DB_DIR + os.path.sep + 'server_thumbnails' +SERVER_MESSAGES_DIR = DB_DIR + os.path.sep + 'server_messages' +SERVER_UPDATES_DIR = DB_DIR + os.path.sep + 'server_updates' +LOGS_DIR = BASE_DIR + os.path.sep + 'logs' +STATIC_DIR = BASE_DIR + os.path.sep + 'static' +TEMP_DIR = BASE_DIR + os.path.sep + 'temp' + +# Misc + +NETWORK_VERSION = 8 +SOFTWARE_VERSION = 57 + +UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 ) + +HYDRUS_KEY_LENGTH = 32 + +UPDATE_DURATION = 100000 + +expirations = [ ( 'one month', 31 * 86400 ), ( 'three months', 3 * 31 * 86400 ), ( 'six months', 6 * 31 * 86400 ), ( 'one year', 12 * 31 * 86400 ), ( 'two years', 24 * 31 * 86400 ), ( 'five years', 60 * 31 * 86400 ), ( 'does not expire', None ) ] + +shutdown = False + +# Enums + +GET_DATA = 0 +POST_DATA = 1 +POST_PETITIONS = 2 +RESOLVE_PETITIONS = 3 +MANAGE_USERS = 4 +GENERAL_ADMIN = 5 +EDIT_SERVICES = 6 + +CREATABLE_PERMISSIONS = [ GET_DATA, POST_DATA, POST_PETITIONS, RESOLVE_PETITIONS, MANAGE_USERS, GENERAL_ADMIN ] +ADMIN_PERMISSIONS = [ RESOLVE_PETITIONS, MANAGE_USERS, GENERAL_ADMIN, EDIT_SERVICES ] + +permissions_string_lookup = {} + +permissions_string_lookup[ GET_DATA ] = 'get data' +permissions_string_lookup[ POST_DATA ] = 'post data' +permissions_string_lookup[ POST_PETITIONS ] = 'post petitions' +permissions_string_lookup[ RESOLVE_PETITIONS ] = 'resolve petitions' +permissions_string_lookup[ MANAGE_USERS ] = 'manage users' +permissions_string_lookup[ GENERAL_ADMIN ] = 'general administration' +permissions_string_lookup[ EDIT_SERVICES ] = 'edit services' + +TAG_REPOSITORY = 0 +FILE_REPOSITORY = 1 +LOCAL_FILE = 2 +MESSAGE_DEPOT = 3 +LOCAL_TAG = 5 +LOCAL_RATING_NUMERICAL = 6 +LOCAL_RATING_LIKE = 7 +RATING_NUMERICAL_REPOSITORY = 8 +RATING_LIKE_REPOSITORY = 9 +SERVER_ADMIN = 99 +NULL_SERVICE = 100 + +service_string_lookup = {} + +service_string_lookup[ TAG_REPOSITORY ] = 'hydrus tag repository' +service_string_lookup[ FILE_REPOSITORY ] = 'hydrus file repository' +service_string_lookup[ LOCAL_FILE ] = 'hydrus local file service' +service_string_lookup[ MESSAGE_DEPOT ] = 'hydrus message depot' +service_string_lookup[ SERVER_ADMIN ] = 'hydrus server administration' + +RATINGS_SERVICES = [ LOCAL_RATING_LIKE, LOCAL_RATING_NUMERICAL, RATING_LIKE_REPOSITORY, RATING_NUMERICAL_REPOSITORY ] +REPOSITORIES = [ TAG_REPOSITORY, FILE_REPOSITORY, RATING_LIKE_REPOSITORY, RATING_NUMERICAL_REPOSITORY ] +RESTRICTED_SERVICES = list( REPOSITORIES ) + [ SERVER_ADMIN, MESSAGE_DEPOT ] +REMOTE_SERVICES = list( RESTRICTED_SERVICES ) +ALL_SERVICES = list( REMOTE_SERVICES ) + [ LOCAL_FILE, LOCAL_TAG, LOCAL_RATING_LIKE, LOCAL_RATING_NUMERICAL ] + +SERVICES_WITH_THUMBNAILS = [ FILE_REPOSITORY, LOCAL_FILE ] + +DELETE_FILES_PETITION = 0 +DELETE_TAG_PETITION = 1 + +BAN = 0 +SUPERBAN = 1 +CHANGE_ACCOUNT_TYPE = 2 +ADD_TO_EXPIRES = 3 +SET_EXPIRES = 4 + +CURRENT = 0 +PENDING = 1 +DELETED = 2 +PETITIONED = 3 + +HIGH_PRIORITY = 0 +LOW_PRIORITY = 2 + +SCORE_PETITION = 0 + +SERVICE_INFO_NUM_FILES = 0 +SERVICE_INFO_NUM_INBOX = 1 +SERVICE_INFO_NUM_LOCAL = 2 +SERVICE_INFO_NUM_MAPPINGS = 3 +SERVICE_INFO_NUM_DELETED_MAPPINGS = 4 +SERVICE_INFO_NUM_DELETED_FILES = 5 +SERVICE_INFO_NUM_THUMBNAILS = 6 +SERVICE_INFO_NUM_THUMBNAILS_LOCAL = 7 +SERVICE_INFO_TOTAL_SIZE = 8 +SERVICE_INFO_NUM_NAMESPACES = 9 +SERVICE_INFO_NUM_TAGS = 10 +SERVICE_INFO_NUM_PENDING = 11 +SERVICE_INFO_NUM_CONVERSATIONS = 12 +SERVICE_INFO_NUM_UNREAD = 13 +SERVICE_INFO_NUM_DRAFTS = 14 + +ADD = 0 +DELETE = 1 +EDIT = 2 + +APPROVE = 0 +DENY = 1 + +GET = 0 +POST = 1 +OPTIONS = 2 + +APPLICATION_HYDRUS_CLIENT_COLLECTION = 0 +IMAGE_JPEG = 1 +IMAGE_PNG = 2 +IMAGE_GIF = 3 +IMAGE_BMP = 4 +APPLICATION_FLASH = 5 +APPLICATION_YAML = 6 +IMAGE_ICON = 7 +TEXT_HTML = 8 +VIDEO_FLV = 9 +APPLICATION_OCTET_STREAM = 100 +APPLICATION_UNKNOWN = 101 + +ALLOWED_MIMES = ( IMAGE_JPEG, IMAGE_PNG, IMAGE_GIF, IMAGE_BMP, APPLICATION_FLASH, VIDEO_FLV ) + +IMAGES = ( IMAGE_JPEG, IMAGE_PNG, IMAGE_GIF, IMAGE_BMP ) + +NOISY_MIMES = ( APPLICATION_FLASH, VIDEO_FLV ) + +MIMES_WITH_THUMBNAILS = ( IMAGE_JPEG, IMAGE_PNG, IMAGE_GIF, IMAGE_BMP ) + +mime_enum_lookup = {} + +mime_enum_lookup[ 'collection' ] = APPLICATION_HYDRUS_CLIENT_COLLECTION +mime_enum_lookup[ 'image/jpe' ] = IMAGE_JPEG +mime_enum_lookup[ 'image/jpeg' ] = IMAGE_JPEG +mime_enum_lookup[ 'image/jpg' ] = IMAGE_JPEG +mime_enum_lookup[ 'image/png' ] = IMAGE_PNG +mime_enum_lookup[ 'image/gif' ] = IMAGE_GIF +mime_enum_lookup[ 'image/bmp' ] = IMAGE_BMP +mime_enum_lookup[ 'image' ] = IMAGES +mime_enum_lookup[ 'image/vnd.microsoft.icon' ] = IMAGE_ICON +mime_enum_lookup[ 'application/x-shockwave-flash' ] = APPLICATION_FLASH +mime_enum_lookup[ 'application/octet-stream' ] = APPLICATION_OCTET_STREAM +mime_enum_lookup[ 'application/x-yaml' ] = APPLICATION_YAML +mime_enum_lookup[ 'text/html' ] = TEXT_HTML +mime_enum_lookup[ 'video/x-flv' ] = VIDEO_FLV +mime_enum_lookup[ 'unknown mime' ] = APPLICATION_UNKNOWN + +mime_string_lookup = {} + +mime_string_lookup[ APPLICATION_HYDRUS_CLIENT_COLLECTION ] = 'collection' +mime_string_lookup[ IMAGE_JPEG ] = 'image/jpg' +mime_string_lookup[ IMAGE_PNG ] = 'image/png' +mime_string_lookup[ IMAGE_GIF ] = 'image/gif' +mime_string_lookup[ IMAGE_BMP ] = 'image/bmp' +mime_string_lookup[ IMAGES ] = 'image' +mime_string_lookup[ IMAGE_ICON ] = 'image/vnd.microsoft.icon' +mime_string_lookup[ APPLICATION_FLASH ] = 'application/x-shockwave-flash' +mime_string_lookup[ APPLICATION_OCTET_STREAM ] = 'application/octet-stream' +mime_string_lookup[ APPLICATION_YAML ] = 'application/x-yaml' +mime_string_lookup[ TEXT_HTML ] = 'text/html' +mime_string_lookup[ VIDEO_FLV ] = 'video/x-flv' +mime_string_lookup[ APPLICATION_UNKNOWN ] = 'unknown mime' + +mime_ext_lookup = {} + +mime_ext_lookup[ APPLICATION_HYDRUS_CLIENT_COLLECTION ] = '.collection' +mime_ext_lookup[ IMAGE_JPEG ] = '.jpg' +mime_ext_lookup[ IMAGE_PNG ] = '.png' +mime_ext_lookup[ IMAGE_GIF ] = '.gif' +mime_ext_lookup[ IMAGE_BMP ] = '.bmp' +mime_ext_lookup[ IMAGE_ICON ] = '.ico' +mime_ext_lookup[ APPLICATION_FLASH ] = '.swf' +mime_ext_lookup[ APPLICATION_OCTET_STREAM ] = '.bin' +mime_ext_lookup[ APPLICATION_YAML ] = '.yaml' +mime_ext_lookup[ TEXT_HTML ] = '.html' +mime_ext_lookup[ VIDEO_FLV ] = '.flv' +mime_ext_lookup[ APPLICATION_UNKNOWN ] = '' +#mime_ext_lookup[ 'application/x-rar-compressed' ] = '.rar' + +ALLOWED_MIME_EXTENSIONS = [ mime_ext_lookup[ mime ] for mime in ALLOWED_MIMES ] + +header_and_mime = [ + ( '\xff\xd8', IMAGE_JPEG ), + ( 'GIF87a', IMAGE_GIF ), + ( 'GIF89a', IMAGE_GIF ), + ( '\x89PNG', IMAGE_PNG ), + ( 'BM', IMAGE_BMP ), + ( 'CWS', APPLICATION_FLASH ), + ( 'FWS', APPLICATION_FLASH ), + ( 'FLV', VIDEO_FLV ) + ] + +wxk_code_string_lookup = { + wx.WXK_SPACE: 'space', + wx.WXK_BACK: 'backspace', + wx.WXK_TAB: 'tab', + wx.WXK_RETURN: 'return', + wx.WXK_NUMPAD_ENTER: 'enter', + wx.WXK_PAUSE: 'pause', + wx.WXK_ESCAPE: 'escape', + wx.WXK_INSERT: 'insert', + wx.WXK_DELETE: 'delete', + wx.WXK_UP: 'up', + wx.WXK_DOWN: 'down', + wx.WXK_LEFT: 'left', + wx.WXK_RIGHT: 'right', + wx.WXK_HOME: 'home', + wx.WXK_END: 'end', + wx.WXK_PAGEDOWN: 'page up', + wx.WXK_PAGEUP: 'page down', + wx.WXK_F1: 'f1', + wx.WXK_F2: 'f2', + wx.WXK_F3: 'f3', + wx.WXK_F4: 'f4', + wx.WXK_F5: 'f5', + wx.WXK_F6: 'f6', + wx.WXK_F7: 'f7', + wx.WXK_F8: 'f8', + wx.WXK_F9: 'f9', + wx.WXK_F10: 'f10', + wx.WXK_F11: 'f11', + wx.WXK_F12: 'f12', + wx.WXK_ADD: '+', + wx.WXK_DIVIDE: '/', + wx.WXK_SUBTRACT: '-', + wx.WXK_MULTIPLY: '*', + wx.WXK_NUMPAD1: 'numpad 1', + wx.WXK_NUMPAD2: 'numpad 2', + wx.WXK_NUMPAD3: 'numpad 3', + wx.WXK_NUMPAD4: 'numpad 4', + wx.WXK_NUMPAD5: 'numpad 5', + wx.WXK_NUMPAD6: 'numpad 6', + wx.WXK_NUMPAD7: 'numpad 7', + wx.WXK_NUMPAD8: 'numpad 8', + wx.WXK_NUMPAD9: 'numpad 9', + wx.WXK_NUMPAD0: 'numpad 0', + wx.WXK_NUMPAD_UP: 'numpad up', + wx.WXK_NUMPAD_DOWN: 'numpad down', + wx.WXK_NUMPAD_LEFT: 'numpad left', + wx.WXK_NUMPAD_RIGHT: 'numpad right', + wx.WXK_NUMPAD_HOME: 'numpad home', + wx.WXK_NUMPAD_END: 'numpad end', + wx.WXK_NUMPAD_PAGEDOWN: 'numpad page up', + wx.WXK_NUMPAD_PAGEUP: 'numpad page down', + wx.WXK_NUMPAD_ADD: 'numpad +', + wx.WXK_NUMPAD_DIVIDE: 'numpad /', + wx.WXK_NUMPAD_SUBTRACT: 'numpad -', + wx.WXK_NUMPAD_MULTIPLY: 'numpad *' + } + +# request checking + +BANDWIDTH_CONSUMING_REQUESTS = set() + +BANDWIDTH_CONSUMING_REQUESTS.add( ( TAG_REPOSITORY, GET, 'update' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( TAG_REPOSITORY, POST, 'mappings' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( TAG_REPOSITORY, POST, 'petitions' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( FILE_REPOSITORY, GET, 'update' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( FILE_REPOSITORY, GET, 'file' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( FILE_REPOSITORY, GET, 'thumbnail' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( FILE_REPOSITORY, POST, 'file' ) ) +BANDWIDTH_CONSUMING_REQUESTS.add( ( FILE_REPOSITORY, POST, 'petitions' ) ) + +service_requests = [] +service_requests.append( ( GET, '', None ) ) +service_requests.append( ( GET, 'favicon.ico', None ) ) + +local_file_requests = list( service_requests ) +local_file_requests.append( ( GET, 'file', None ) ) +local_file_requests.append( ( GET, 'thumbnail', None ) ) + +restricted_requests = list( service_requests ) +restricted_requests.append( ( GET, 'accesskeys', GENERAL_ADMIN ) ) +restricted_requests.append( ( GET, 'account', None ) ) +restricted_requests.append( ( GET, 'accountinfo', MANAGE_USERS ) ) +restricted_requests.append( ( GET, 'accounttypes', MANAGE_USERS ) ) +restricted_requests.append( ( GET, 'options', GENERAL_ADMIN ) ) +restricted_requests.append( ( GET, 'stats', GENERAL_ADMIN ) ) +restricted_requests.append( ( POST, 'accountmodification', ( MANAGE_USERS, GENERAL_ADMIN ) ) ) +restricted_requests.append( ( POST, 'accounttypesmodification', GENERAL_ADMIN ) ) +restricted_requests.append( ( POST, 'options', GENERAL_ADMIN ) ) + +admin_requests = list( restricted_requests ) +admin_requests.append( ( GET, 'init', None ) ) +admin_requests.append( ( GET, 'services', EDIT_SERVICES ) ) +admin_requests.append( ( POST, 'backup', EDIT_SERVICES ) ) +admin_requests.append( ( POST, 'servicesmodification', EDIT_SERVICES ) ) + +repository_requests = list( restricted_requests ) +repository_requests.append( ( GET, 'numpetitions', RESOLVE_PETITIONS ) ) +repository_requests.append( ( GET, 'petition', RESOLVE_PETITIONS ) ) +repository_requests.append( ( GET, 'update', GET_DATA ) ) +repository_requests.append( ( POST, 'news', GENERAL_ADMIN ) ) +repository_requests.append( ( POST, 'petitiondenial', RESOLVE_PETITIONS ) ) +repository_requests.append( ( POST, 'petitions', ( POST_PETITIONS, RESOLVE_PETITIONS ) ) ) + +file_repository_requests = list( repository_requests ) +file_repository_requests.append( ( GET, 'file', GET_DATA ) ) +file_repository_requests.append( ( GET, 'ip', GENERAL_ADMIN ) ) +file_repository_requests.append( ( GET, 'thumbnail', GET_DATA ) ) +file_repository_requests.append( ( POST, 'file', POST_DATA ) ) + +tag_repository_requests = list( repository_requests ) +tag_repository_requests.append( ( POST, 'mappings', POST_DATA ) ) + +message_depot_requests = list( restricted_requests ) +message_depot_requests.append( ( GET, 'message', GET_DATA ) ) +message_depot_requests.append( ( GET, 'messageinfosince', GET_DATA ) ) +message_depot_requests.append( ( GET, 'publickey', None ) ) +message_depot_requests.append( ( POST, 'contact', POST_DATA ) ) +message_depot_requests.append( ( POST, 'message', None ) ) +message_depot_requests.append( ( POST, 'message_statuses', None ) ) + +all_requests = [] +all_requests.extend( [ ( LOCAL_FILE, request_type, request, permissions ) for ( request_type, request, permissions ) in local_file_requests ] ) +all_requests.extend( [ ( SERVER_ADMIN, request_type, request, permissions ) for ( request_type, request, permissions ) in admin_requests ] ) +all_requests.extend( [ ( FILE_REPOSITORY, request_type, request, permissions ) for ( request_type, request, permissions ) in file_repository_requests ] ) +all_requests.extend( [ ( TAG_REPOSITORY, request_type, request, permissions ) for ( request_type, request, permissions ) in tag_repository_requests ] ) +all_requests.extend( [ ( MESSAGE_DEPOT, request_type, request, permissions ) for ( request_type, request, permissions ) in message_depot_requests ] ) + +ALLOWED_REQUESTS = { ( service_type, request_type, request ) for ( service_type, request_type, request, permissions ) in all_requests } + +REQUESTS_TO_PERMISSIONS = { ( service_type, request_type, request ) : permissions for ( service_type, request_type, request, permissions ) in all_requests } + +# default options + +DEFAULT_LOCAL_FILE_PORT = 45865 +DEFAULT_SERVER_ADMIN_PORT = 45870 +DEFAULT_SERVICE_PORT = 45871 + +DEFAULT_OPTIONS = {} + +DEFAULT_OPTIONS[ SERVER_ADMIN ] = {} +DEFAULT_OPTIONS[ SERVER_ADMIN ][ 'max_monthly_data' ] = None +DEFAULT_OPTIONS[ SERVER_ADMIN ][ 'max_storage' ] = None +DEFAULT_OPTIONS[ SERVER_ADMIN ][ 'message' ] = 'hydrus server administration service' + +DEFAULT_OPTIONS[ FILE_REPOSITORY ] = {} +DEFAULT_OPTIONS[ FILE_REPOSITORY ][ 'max_monthly_data' ] = None +DEFAULT_OPTIONS[ FILE_REPOSITORY ][ 'max_storage' ] = None +DEFAULT_OPTIONS[ FILE_REPOSITORY ][ 'log_uploader_ips' ] = False +DEFAULT_OPTIONS[ FILE_REPOSITORY ][ 'message' ] = 'hydrus file repository' + +DEFAULT_OPTIONS[ TAG_REPOSITORY ] = {} +DEFAULT_OPTIONS[ TAG_REPOSITORY ][ 'max_monthly_data' ] = None +DEFAULT_OPTIONS[ TAG_REPOSITORY ][ 'message' ] = 'hydrus tag repository' + +DEFAULT_OPTIONS[ MESSAGE_DEPOT ] = {} +DEFAULT_OPTIONS[ MESSAGE_DEPOT ][ 'max_monthly_data' ] = None +DEFAULT_OPTIONS[ MESSAGE_DEPOT ][ 'max_storage' ] = None +DEFAULT_OPTIONS[ MESSAGE_DEPOT ][ 'message' ] = 'hydrus message depot' + +# Hydrus pubsub + +EVT_PUBSUB = HydrusPubSub.EVT_PUBSUB +pubsub = HydrusPubSub.HydrusPubSub() + +def BuildKeyToListDict( pairs ): + + d = collections.defaultdict( list ) + + for ( key, value ) in pairs: d[ key ].append( value ) + + return d + +def CalculateScoreFromRating( count, rating ): + + # http://www.evanmiller.org/how-not-to-sort-by-average-rating.html + + count = float( count ) + + positive = count * rating + negative = count * ( 1.0 - rating ) + + # positive + negative = count + + # I think I've parsed this correctly from the website! Not sure though! + score = ( ( positive + 1.9208 ) / count - 1.96 * ( ( ( positive * negative ) / count + 0.9604 ) ** 0.5 ) / count ) / ( 1 + 3.8416 / count ) + + return score + +def CleanTag( tag ): + + if tag == '': return '' + + tag = tag.lower() + + tag = unicode( tag ) + + tag = re.sub( '[\s]+', ' ', tag, flags = re.UNICODE ) # turns multiple spaces into single spaces + + tag = re.sub( '\s\Z', '', tag, flags = re.UNICODE ) # removes space at the end + + while re.match( '\s|-|system:', tag, flags = re.UNICODE ) is not None: + + tag = re.sub( '\A(\s|-|system:)', '', tag, flags = re.UNICODE ) # removes space at the beginning + + + return tag + +def ConvertAbsPathToPortablePath( abs_path ): + + if abs_path == '': return None + + try: return os.path.relpath( abs_path, BASE_DIR ) + except: return abs_path + +def ConvertIntToBytes( size ): + + if size is None: return 'unknown size' + + suffixes = ( '', 'K', 'M', 'G', 'T', 'P' ) + + suffix_index = 0 + + size = float( size ) + + while size > 1024.0: + + size = size / 1024.0 + + suffix_index += 1 + + + if size < 10.0: return '%.1f' % size + suffixes[ suffix_index ] + 'B' + + return '%.0f' % size + suffixes[ suffix_index ] + 'B' + +def ConvertIntToPrettyString( num ): return locale.format( "%d", num, grouping = True ) + +def ConvertMillisecondsToPrettyTime( ms ): + + hours = ms / 3600000 + + if hours == 1: hours_result = '1 hour' + else: hours_result = str( hours ) + ' hours' + + ms = ms % 3600000 + + minutes = ms / 60000 + + if minutes == 1: minutes_result = '1 minute' + else: minutes_result = str( minutes ) + ' minutes' + + ms = ms % 60000 + + seconds = ms / 1000 + + if seconds == 1: seconds_result = '1 second' + else: seconds_result = str( seconds ) + ' seconds' + + detailed_seconds = float( ms ) / 1000.0 + + if detailed_seconds == 1.0: detailed_seconds_result = '1.0 seconds' + else:detailed_seconds_result = '%.1f' % detailed_seconds + ' seconds' + + ms = ms % 1000 + + if ms == 1: milliseconds_result = '1 millisecond' + else: milliseconds_result = str( ms ) + ' milliseconds' + + if hours > 0: return hours_result + ' ' + minutes_result + + if minutes > 0: return minutes_result + ' ' + seconds_result + + if seconds > 0: return detailed_seconds_result + + return milliseconds_result + +def ConvertNumericalRatingToPrettyString( lower, upper, rating, rounded_result = False, out_of = True ): + + rating_converted = ( rating * ( upper - lower ) ) + lower + + if rounded_result: s = str( '%.2f' % round( rating_converted ) ) + else: s = str( '%.2f' % rating_converted ) + + if out_of: + + if lower in ( 0, 1 ): s += '/' + str( '%.2f' % upper ) + + + return s + +def ConvertPortablePathToAbsPath( portable_path ): + + if portable_path is None: return None + + if os.path.isabs( portable_path ): abs_path = portable_path + else: abs_path = os.path.normpath( BASE_DIR + os.path.sep + portable_path ) + + if os.path.exists( abs_path ): return abs_path + else: return None + +def ConvertShortcutToPrettyShortcut( modifier, key, action ): + + if modifier == wx.ACCEL_NORMAL: modifier = '' + elif modifier == wx.ACCEL_ALT: modifier = 'alt' + elif modifier == wx.ACCEL_CTRL: modifier = 'ctrl' + elif modifier == wx.ACCEL_SHIFT: modifier = 'shift' + + if key in range( 65, 91 ): key = chr( key + 32 ) # + 32 for converting ascii A -> a + elif key in range( 97, 123 ): key = chr( key ) + else: key = wxk_code_string_lookup[ key ] + + return ( modifier, key, action ) + +def ConvertTimestampToPrettyAge( timestamp ): + + if timestamp == 0 or timestamp is None: return 'unknown age' + + age = int( time.time() ) - timestamp + + seconds = age % 60 + if seconds == 1: s = '1 second' + else: s = str( seconds ) + ' seconds' + + age = age / 60 + minutes = age % 60 + if minutes == 1: m = '1 minute' + else: m = str( minutes ) + ' minutes' + + age = age / 60 + hours = age % 24 + if hours == 1: h = '1 hour' + else: h = str( hours ) + ' hours' + + age = age / 24 + days = age % 30 + if days == 1: d = '1 day' + else: d = str( days ) + ' days' + + age = age / 30 + months = age % 12 + if months == 1: mo = '1 month' + else: mo = str( months ) + ' months' + + years = age / 12 + if years == 1: y = '1 year' + else: y = str( years ) + ' years' + + if years > 0: return ' '.join( ( y, mo ) ) + ' old' + elif months > 0: return ' '.join( ( mo, d ) ) + ' old' + elif days > 0: return ' '.join( ( d, h ) ) + ' old' + elif hours > 0: return ' '.join( ( h, m ) ) + ' old' + else: return ' '.join( ( m, s ) ) + ' old' + +def ConvertTimestampToPrettyAgo( timestamp ): + + if timestamp == 0: return 'unknown when' + + age = int( time.time() ) - timestamp + + seconds = age % 60 + if seconds == 1: s = '1 second' + else: s = str( seconds ) + ' seconds' + + age = age / 60 + minutes = age % 60 + if minutes == 1: m = '1 minute' + else: m = str( minutes ) + ' minutes' + + age = age / 60 + hours = age % 24 + if hours == 1: h = '1 hour' + else: h = str( hours ) + ' hours' + + age = age / 24 + days = age % 30 + if days == 1: d = '1 day' + else: d = str( days ) + ' days' + + age = age / 30 + months = age % 12 + if months == 1: mo = '1 month' + else: mo = str( months ) + ' months' + + years = age / 12 + if years == 1: y = '1 year' + else: y = str( years ) + ' years' + + if years > 0: return ' '.join( ( y, mo ) ) + ' ago' + elif months > 0: return ' '.join( ( mo, d ) ) + ' ago' + elif days > 0: return ' '.join( ( d, h ) ) + ' ago' + elif hours > 0: return ' '.join( ( h, m ) ) + ' ago' + else: return ' '.join( ( m, s ) ) + ' ago' + +def ConvertTimestampToPrettyExpires( timestamp ): + + if timestamp is None: return 'does not expire' + if timestamp == 0: return 'unknown expiry' + + expires = int( time.time() ) - timestamp + + if expires >= 0: already_happend = True + else: + + expires *= -1 + + already_happend = False + + + seconds = expires % 60 + if seconds == 1: s = '1 second' + else: s = str( seconds ) + ' seconds' + + expires = expires / 60 + minutes = expires % 60 + if minutes == 1: m = '1 minute' + else: m = str( minutes ) + ' minutes' + + expires = expires / 60 + hours = expires % 24 + if hours == 1: h = '1 hour' + else: h = str( hours ) + ' hours' + + expires = expires / 24 + days = expires % 30 + if days == 1: d = '1 day' + else: d = str( days ) + ' days' + + expires = expires / 30 + months = expires % 12 + if months == 1: mo = '1 month' + else: mo = str( months ) + ' months' + + years = expires / 12 + if years == 1: y = '1 year' + else: y = str( years ) + ' years' + + if already_happend: + + if years > 0: return 'expired ' + ' '.join( ( y, mo ) ) + ' ago' + elif months > 0: return 'expired ' + ' '.join( ( mo, d ) ) + ' ago' + elif days > 0: return 'expired ' + ' '.join( ( d, h ) ) + ' ago' + elif hours > 0: return 'expired ' + ' '.join( ( h, m ) ) + ' ago' + else: return 'expired ' + ' '.join( ( m, s ) ) + ' ago' + + else: + + if years > 0: return 'expires in ' + ' '.join( ( y, mo ) ) + elif months > 0: return 'expires in ' + ' '.join( ( mo, d ) ) + elif days > 0: return 'expires in ' + ' '.join( ( d, h ) ) + elif hours > 0: return 'expires in ' + ' '.join( ( h, m ) ) + else: return 'expires in ' + ' '.join( ( m, s ) ) + + +def ConvertTimestampToPrettyPending( timestamp ): + + if timestamp is None: return '' + if timestamp == 0: return 'imminent' + + pending = int( time.time() ) - timestamp + + if pending >= 0: return 'imminent' + else: pending *= -1 + + seconds = pending % 60 + if seconds == 1: s = '1 second' + else: s = str( seconds ) + ' seconds' + + pending = pending / 60 + minutes = pending % 60 + if minutes == 1: m = '1 minute' + else: m = str( minutes ) + ' minutes' + + pending = pending / 60 + hours = pending % 24 + if hours == 1: h = '1 hour' + else: h = str( hours ) + ' hours' + + pending = pending / 24 + days = pending % 30 + if days == 1: d = '1 day' + else: d = str( days ) + ' days' + + pending = pending / 30 + months = pending % 12 + if months == 1: mo = '1 month' + else: mo = str( months ) + ' months' + + years = pending / 12 + if years == 1: y = '1 year' + else: y = str( years ) + ' years' + + if years > 0: return 'in ' + ' '.join( ( y, mo ) ) + elif months > 0: return 'in ' + ' '.join( ( mo, d ) ) + elif days > 0: return 'in ' + ' '.join( ( d, h ) ) + elif hours > 0: return 'in ' + ' '.join( ( h, m ) ) + else: return 'in ' + ' '.join( ( m, s ) ) + +def ConvertTimestampToPrettySync( timestamp ): + + if timestamp == 0: return 'not updated' + + age = int( time.time() ) - timestamp + + seconds = age % 60 + if seconds == 1: s = '1 second' + else: s = str( seconds ) + ' seconds' + + age = age / 60 + minutes = age % 60 + if minutes == 1: m = '1 minute' + else: m = str( minutes ) + ' minutes' + + age = age / 60 + hours = age % 24 + if hours == 1: h = '1 hour' + else: h = str( hours ) + ' hours' + + age = age / 24 + days = age % 30 + if days == 1: d = '1 day' + else: d = str( days ) + ' days' + + age = age / 30 + months = age % 12 + if months == 1: mo = '1 month' + else: mo = str( months ) + ' months' + + years = age / 12 + if years == 1: y = '1 year' + else: y = str( years ) + ' years' + + if years > 0: return 'updated to ' + ' '.join( ( y, mo ) ) + ' ago' + elif months > 0: return 'updated to ' + ' '.join( ( mo, d ) ) + ' ago' + elif days > 0: return 'updated to ' + ' '.join( ( d, h ) ) + ' ago' + elif hours > 0: return 'updated to ' + ' '.join( ( h, m ) ) + ' ago' + else: return 'updated to ' + ' '.join( ( m, s ) ) + ' ago' + +def ConvertTimestampToPrettyTime( timestamp ): return time.strftime( '%Y/%m/%d %H:%M:%S', time.localtime( timestamp ) ) + +def ConvertTimestampToHumanPrettyTime( timestamp ): + + now = int( time.time() ) + + difference = now - timestamp + + if difference < 60: return 'just now' + elif difference < 86400 * 7: return ConvertTimestampToPrettyAgo( timestamp ) + else: return ConvertTimestampToPrettyTime( timestamp ) + +def ConvertTimeToPrettyTime( secs ): + + return time.strftime( '%H:%M:%S', time.gmtime( secs ) ) + +def ConvertZoomToPercentage( zoom ): + + zoom = zoom * 100.0 + + pretty_zoom = '%.0f' % zoom + '%' + + return pretty_zoom + +def GetMimeFromPath( filename ): + + f = open( filename, 'rb' ) + + return GetMimeFromFilePointer( f ) + +def GetMimeFromFilePointer( f ): + + try: + + f.seek( 0 ) + + header = f.read( 256 ) + + return GetMimeFromString( header ) + + except: raise Exception( 'I could not identify the mime of the file' ) + +def GetMimeFromString( file ): + + for ( header, mime ) in header_and_mime: + + if file.startswith( header ): + + return mime + + + + return APPLICATION_OCTET_STREAM + +def GetShortcutFromEvent( event ): + + modifier = wx.ACCEL_NORMAL + + if event.AltDown(): modifier = wx.ACCEL_ALT + elif event.ControlDown(): modifier = wx.ACCEL_CTRL + elif event.ShiftDown(): modifier = wx.ACCEL_SHIFT + + key = event.KeyCode + + return ( modifier, key ) + +def IntelligentMassUnion( iterables_to_reduce ): + + # while I might usually go |= here (for style), it is quicker to use the ugly .update when we want to be quick + # set.update also converts the second argument to a set, if appropriate! + + #return reduce( set.union, iterables_to_reduce, set() ) + + # also: reduce is slower, I think, cause of union rather than update! + + answer = set() + + for i in iterables_to_reduce: answer.update( i ) + + return answer + +def IntelligentMassIntersect( sets_to_reduce ): + + answer = None + + sets_to_reduce = list( sets_to_reduce ) + + sets_to_reduce.sort( cmp = lambda x, y: cmp( len( x ), len( y ) ) ) + + for set_to_reduce in sets_to_reduce: + + if len( set_to_reduce ) == 0: return set() + + if answer is None: answer = set( set_to_reduce ) + else: + + # same thing as union; I could go &= here, but I want to be quick, so use the function call + if len( answer ) == 0: return set() + else: answer.intersection_update( set_to_reduce ) + + + + if answer is None: return set() + else: return answer + +def IsCollection( mime ): return mime in ( APPLICATION_HYDRUS_CLIENT_COLLECTION, ) # this is a little convoluted, but I want to keep it similar to IsImage, IsText, IsAudio, IsX + +def IsImage( mime ): return mime in ( IMAGE_JPEG, IMAGE_GIF, IMAGE_PNG, IMAGE_BMP ) + +def SearchEntryMatchesTag( search_entry, tag ): + + # note that at no point is the namespace checked against the search_entry! + + if ':' in tag: + + ( n, t ) = tag.split( ':', 1 ) + + return t.startswith( search_entry ) + + else: return tag.startswith( search_entry ) + + +def SplayListForDB( xs ): return '(' + ','.join( [ '"' + str( x ) + '"' for x in xs ] ) + ')' + +def SplayTupleListForDB( first_column_name, second_column_name, xys ): return ' OR '.join( [ '( ' + first_column_name + '=' + str( x ) + ' AND ' + second_column_name + ' IN ' + SplayListForDB( ys ) + ' )' for ( x, ys ) in xys ] ) + +def ThumbnailResolution( original_resolution, target_resolution ): + + ( original_width, original_height ) = original_resolution + ( target_width, target_height ) = target_resolution + + if original_width > target_width: + + original_height = max( original_height * target_width / float( original_width ), 1 ) + original_width = target_width + + + if round( original_height ) > target_height: + + original_width = max( original_width * target_height / float( original_height ), 1 ) + original_height = target_height + + + return ( int( round( original_width ) ), int( round( original_height ) ) ) + +class HydrusYAMLBase( yaml.YAMLObject ): + + yaml_loader = yaml.SafeLoader + yaml_dumper = yaml.SafeDumper + +class Account( HydrusYAMLBase ): + + yaml_tag = u'!Account' + + def __init__( self, account_id, account_type, created, expires, used_data, banned_info = None ): + + HydrusYAMLBase.__init__( self ) + + self._account_id = account_id + self._account_type = account_type + self._created = created + self._expires = expires + self._used_data = used_data + self._banned_info = banned_info + + self._object_instantiation_timestamp = int( time.time() ) + + + def __repr__( self ): return self.ConvertToString() + + def __str__( self ): return self.ConvertToString() + + def _IsBanned( self ): + + if self._banned_info is None: return False + else: + + ( reason, created, expires ) = self._banned_info + + if expires is None: return True + else: return int( time.time() ) > expires + + + + def _IsExpired( self ): + + if self._expires is None: return False + else: return int( time.time() ) > self._expires + + + def CheckPermissions( self, permissions ): + + if type( permissions ) == int: permissions = ( permissions, ) + + if self._IsBanned(): raise PermissionException( 'This account is banned!' ) + + if self._IsExpired(): raise PermissionException( 'This account is expired.' ) + + ( max_num_bytes, max_num_requests ) = self._account_type.GetMaxMonthlyData() + + ( used_bytes, used_requests ) = self._used_data + + if max_num_bytes is not None and used_bytes > max_num_bytes: raise PermissionException( 'You have hit your data transfer limit (' + ConvertIntToBytes( max_num_bytes ) + '), and cannot download any more for the month.' ) + + if max_num_requests is not None and used_requests > max_num_requests: raise PermissionException( 'You have hit your requests limit (' + ConvertIntToPrettyString( max_num_requests ) + '), and cannot download any more for the month.' ) + + if len( permissions ) > 0 and True not in [ self._account_type.HasPermission( permission ) for permission in permissions ]: raise PermissionException( 'You do not have permission to do that.' ) + + + def ConvertToString( self ): return ConvertTimestampToPrettyAge( self._created ) + os.linesep + self._account_type.ConvertToString( self._used_data ) + os.linesep + 'which '+ ConvertTimestampToPrettyExpires( self._expires ) + + def GetAccountIdentifier( self ): return AccountIdentifier( account_id = account_id ) + + def GetAccountType( self ): return self._account_type + + def GetBannedInfo( self ): return self._banned_info + + def GetCreated( self ): return self._created + + def GetExpires( self ): return self._expires + + def GetExpiresString( self ): + + if self._IsBanned(): + + ( reason, created, expires ) = self._banned_info + + return 'banned ' + ConvertTimestampToPrettyAge( created ) + ', ' + ConvertTimestampToPrettyExpires( expires ) + ' because: ' + reason + + else: return ConvertTimestampToPrettyAge( self._created ) + ' and ' + ConvertTimestampToPrettyExpires( self._expires ) + + + def GetAccountId( self ): return self._account_id + + def GetUsedBytesString( self ): + + ( max_num_bytes, max_num_requests ) = self._account_type.GetMaxMonthlyData() + ( used_bytes, used_requests ) = self._used_data + + if max_num_bytes is None: return ConvertIntToBytes( used_bytes ) + ' used this month' + else: return ConvertIntToBytes( used_bytes ) + '/' + ConvertIntToBytes( max_num_bytes ) + ' used this month' + + + def GetUsedRequestsString( self ): + + ( max_num_bytes, max_num_requests ) = self._account_type.GetMaxMonthlyData() + ( used_bytes, used_requests ) = self._used_data + + if max_num_requests is None: return ConvertIntToPrettyString( used_requests ) + ' requests used this month' + else: return ConvertIntToPrettyString( used_requests ) + '/' + ConvertIntToPrettyString( max_num_requests ) + ' requests used this month' + + + def GetUsedData( self ): return self._used_data + + def HasPermission( self, permission ): + + if self._IsExpired(): return False + + ( max_num_bytes, max_num_requests ) = self._account_type.GetMaxMonthlyData() + + ( used_bytes, used_requests ) = self._used_data + + if max_num_bytes is not None and used_bytes >= max_num_bytes: return False + if max_num_requests is not None and used_requests >= max_num_requests: return False + + return self._account_type.HasPermission( permission ) + + + def IsAdmin( self ): return True in [ self.HasPermissions( permission ) for permission in ADMIN_PERMISSIONS ] + + def IsBanned( self ): return self._IsBanned() + + def IsStale( self ): return self._object_instantiation_timestamp + UPDATE_DURATION * 5 < int( time.time() ) + + def MakeFresh( self ): self._object_instantiation_timestamp = int( time.time() ) + + def MakeStale( self ): self._object_instantiation_timestamp = 0 + + def RequestMade( self, num_bytes ): + + ( used_bytes, used_requests ) = self._used_data + + used_bytes += num_bytes + used_requests += 1 + + self._used_data = ( used_bytes, used_requests ) + + +class AccountIdentifier( HydrusYAMLBase ): + + yaml_tag = u'!AccountIdentifier' + + def __init__( self, access_key = None, hash = None, tag = None, account_id = None ): + + HydrusYAMLBase.__init__( self ) + + self._access_key = access_key + self._hash = hash + self._tag = tag + self._account_id = account_id + + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __hash__( self ): return ( self._hash, self._tag, self._account_id ).__hash__() + + def __ne__( self, other ): return self.__hash__() != other.__hash__() + + def GetAccessKey( self ): return self._access_key + + def GetAccountId( self ): return self._account_id + + def GetHash( self ): return self._hash + + def GetMapping( self ): return ( self._tag, self._hash ) + + def HasAccessKey( self ): return self._access_key is not None + + def HasHash( self ): return self._hash is not None and self._tag is None + + def HasAccountId( self ): return self._account_id is not None + + def HasMapping( self ): return self._tag is not None and self._hash is not None + +class AccountType( HydrusYAMLBase ): + + yaml_tag = u'!AccountType' + + def __init__( self, title, permissions, max_monthly_data ): + + HydrusYAMLBase.__init__( self ) + + self._title = title + self._permissions = permissions + self._max_monthly_data = max_monthly_data + + + def __repr__( self ): return self.ConvertToString() + + def GetPermissions( self ): return self._permissions + + def GetTitle( self ): return self._title + + def GetMaxMonthlyData( self ): return self._max_monthly_data + + def GetMaxMonthlyDataString( self ): + + ( max_num_bytes, max_num_requests ) = self._max_monthly_data + + if max_num_bytes is None: max_num_bytes_string = 'No limit' + else: max_num_bytes_string = ConvertIntToBytes( max_num_bytes ) + + if max_num_requests is None: max_num_requests_string = 'No limit' + else: max_num_requests_string = ConvertIntToPrettyString( max_num_requests ) + + return ( max_num_bytes_string, max_num_requests_string ) + + + def ConvertToString( self, data_usage = None ): + + result_string = self._title + ' with ' + + if len( self._permissions ) == 0: result_string += 'no permissions' + else: result_string += ', '.join( [ permissions_string_lookup[ permission ] for permission in self._permissions ] ) + ' permissions' + + return result_string + + + def HasPermission( self, permission ): return permission in self._permissions + +class ClientFilePetitionDenial( HydrusYAMLBase ): + + yaml_tag = u'!ClientFilePetitionDenial' + + def __init__( self, hashes ): + + HydrusYAMLBase.__init__( self ) + + self._hashes = hashes + + + def GetInfo( self ): return self._hashes + +class ClientFilePetitions( HydrusYAMLBase ): + + yaml_tag = u'!ClientFilePetitions' + + def __init__( self, petitions ): + + HydrusYAMLBase.__init__( self ) + + self._petitions = petitions + + + def __iter__( self ): return ( petition for petition in self._petitions ) + + def __len__( self ): return len( self._petitions ) + + def GetHashes( self ): return IntelligentMassUnion( [ hashes for ( reason, hashes ) in self._petitions ] ) + +class ClientMappingPetitionDenial( HydrusYAMLBase ): + + yaml_tag = u'!ClientMappingPetitionDenial' + + def __init__( self, tag, hashes ): + + HydrusYAMLBase.__init__( self ) + + self._tag = tag + self._hashes = hashes + + + def GetInfo( self ): return ( self._tag, self._hashes ) + +class ClientMappingPetitions( HydrusYAMLBase ): + + yaml_tag = u'!ClientMappingPetitions' + + def __init__( self, petitions, hash_ids_to_hashes ): + + HydrusYAMLBase.__init__( self ) + + self._petitions = petitions + + self._hash_ids_to_hashes = hash_ids_to_hashes + + + def __iter__( self ): return ( ( reason, tag, [ self._hash_ids_to_hashes[ hash_id ] for hash_id in hash_ids ] ) for ( reason, tag, hash_ids ) in self._petitions ) + + def __len__( self ): return len( self._petitions ) + + def GetHashes( self ): return self._hash_ids_to_hashes.values() + + def GetTags( self ): return [ tag for ( reason, tag, hash_ids ) in self._petitions ] + +class ClientMappings( HydrusYAMLBase ): + + yaml_tag = u'!ClientMappings' + + def __init__( self, mappings, hash_ids_to_hashes ): + + HydrusYAMLBase.__init__( self ) + + self._mappings = mappings + + self._hash_ids_to_hashes = hash_ids_to_hashes + + + def __iter__( self ): return ( ( tag, [ self._hash_ids_to_hashes[ hash_id ] for hash_id in hash_ids ] ) for ( tag, hash_ids ) in self._mappings ) + + def __len__( self ): return len( self._mappings ) + + def GetHashes( self ): return self._hash_ids_to_hashes.values() + + def GetTags( self ): return [ tag for ( tag, hash_ids ) in self._mappings ] + +class ClientServiceIdentifier( HydrusYAMLBase ): + + yaml_tag = u'!ClientServiceIdentifier' + + def __init__( self, service_key, type, name ): + + HydrusYAMLBase.__init__( self ) + + self._service_key = service_key + self._type = type + self._name = name + + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __hash__( self ): return self._service_key.__hash__() + + def __ne__( self, other ): return self.__hash__() != other.__hash__() + + def GetName( self ): return self._name + + def GetServiceKey( self ): return self._service_key + + def GetType( self ): return self._type + +class DAEMON( threading.Thread ): + + def __init__( self, name, callable, period = 1200 ): + + threading.Thread.__init__( self, name = name ) + + self._callable = callable + self._period = period + + self._event = threading.Event() + + pubsub.sub( self, 'shutdown', 'shutdown' ) + + + def shutdown( self ): self._event.set() + +class DAEMONQueue( DAEMON ): + + def __init__( self, name, callable, queue_topic, period = 10 ): + + DAEMON.__init__( self, name, callable, period ) + + self._queue = Queue.Queue() + self._queue_topic = queue_topic + + self.start() + + pubsub.sub( self, 'put', queue_topic ) + + + def put( self, data ): self._queue.put( data ) + + def run( self ): + + time.sleep( 3 ) + + while True: + + while self._queue.qsize() == 0: + + if shutdown: return + + self._event.wait( self._period ) + + self._event.clear() + + + items = [] + + while self._queue.qsize() > 0: items.append( self._queue.get() ) + + try: self._callable( items ) + except: print( traceback.format_exc() ) + + + +class DAEMONWorker( DAEMON ): + + def __init__( self, name, callable, topics = [], period = 1200 ): + + DAEMON.__init__( self, name, callable, period ) + + self._topics = topics + + self.start() + + for topic in topics: pubsub.sub( self, 'set', topic ) + + + def run( self ): + + time.sleep( 3 ) + + while True: + + if shutdown: return + + try: self._callable() + except: print( traceback.format_exc() ) + + if shutdown: return + + self._event.wait( self._period ) + + self._event.clear() + + + + def set( self, *args, **kwargs ): self._event.set() + +class HydrusUpdate( HydrusYAMLBase ): + + yaml_tag = u'!HydrusUpdate' + + def __init__( self, news, begin, end ): + + HydrusYAMLBase.__init__( self ) + + self._news = news + + self._begin = begin + self._end = end + + + def GetBegin( self ): return self._begin + + def GetEnd( self ): return self._end + + def GetNextBegin( self ): return self._end + 1 + + def GetNews( self ): return [ ( news, timestamp ) for ( news, timestamp ) in self._news ] + + def SplitIntoSubUpdates( self ): return [ self ] + +class HydrusUpdateFileRepository( HydrusUpdate ): + + yaml_tag = u'!HydrusUpdateFileRepository' + + def __init__( self, files, deleted_hashes, news, begin, end ): + + HydrusUpdate.__init__( self, news, begin, end ) + + self._files = files + self._deleted_hashes = deleted_hashes + + + def GetDeletedHashes( self ): return self._deleted_hashes + + def GetFiles( self ): return self._files + + def GetHashes( self ): return [ hash for ( hash, size, mime, timestamp, width, height, duration, num_frames, num_words ) in self._files ] + +class HydrusUpdateTagRepository( HydrusUpdate ): + + yaml_tag = u'!HydrusUpdateTagRepository' + + def __init__( self, mappings, deleted_mappings, hash_ids_to_hashes, news, begin, end ): + + HydrusUpdate.__init__( self, news, begin, end ) + + self._hash_ids_to_hashes = hash_ids_to_hashes + + self._mappings = mappings + self._deleted_mappings = deleted_mappings + + + def GetDeletedMappings( self ): return ( ( tag, [ self._hash_ids_to_hashes[ hash_id ] for hash_id in hash_ids ] ) for ( tag, hash_ids ) in self._deleted_mappings ) + + def GetMappings( self ): return ( ( tag, [ self._hash_ids_to_hashes[ hash_id ] for hash_id in hash_ids ] ) for ( tag, hash_ids ) in self._mappings ) + + def GetTags( self ): return [ tag for ( tag, hash_ids ) in self._mappings ] + + def SplitIntoSubUpdates( self ): + + updates = [ HydrusUpdateTagRepository( [], [], {}, self._news, self._begin, None ) ] + + total_mappings = sum( [ len( hashes ) for ( tag, hashes ) in self._mappings ] ) + + total_tags = len( self._mappings ) + + number_updates_to_make = total_mappings / 500 + + if number_updates_to_make == 0: number_updates_to_make = 1 + + int_to_split_by = total_tags / number_updates_to_make + + if int_to_split_by == 0: int_to_split_by = 1 + + for i in range( 0, len( self._mappings ), int_to_split_by ): + + mappings_subset = self._mappings[ i : i + int_to_split_by ] + + updates.append( HydrusUpdateTagRepository( mappings_subset, [], self._hash_ids_to_hashes, [], self._begin, None ) ) + + + for i in range( 0, len( self._deleted_mappings ), int_to_split_by ): + + deleted_mappings_subset = self._deleted_mappings[ i : i + int_to_split_by ] + + updates.append( HydrusUpdateTagRepository( [], deleted_mappings_subset, self._hash_ids_to_hashes, [], self._begin, None ) ) + + + updates.append( HydrusUpdateTagRepository( [], [], {}, [], self._begin, self._end ) ) + + return updates + + +class JobInternal(): + + yaml_tag = u'!JobInternal' + + def __init__( self, action, type, *args, **kwargs ): + + self._action = action + self._type = type + self._args = args + self._kwargs = kwargs + + self._result = None + self._result_ready = threading.Event() + + + def GetAction( self ): return self._action + + def GetArgs( self ): return self._args + + def GetKWArgs( self ): return self._kwargs + + def GetResult( self ): + + while True: + + if self._result_ready.wait( 5 ) == True: break + elif shutdown: raise Exception( 'Application quit before db could serve result!' ) + + + if issubclass( type( self._result ), Exception ): raise self._result + else: return self._result + + + def GetType( self ): return self._type + + def PutResult( self, result ): + + self._result = result + + self._result_ready.set() + + +class JobServer(): + + yaml_tag = u'!JobServer' + + def __init__( self, service_identifier, account_identifier, ip, request_type, request, request_args, request_length ): + + self._service_identifier = service_identifier + self._account_identifier = account_identifier + self._ip = ip + self._request_type = request_type + self._request = request + self._request_args = request_args + self._request_length = request_length + + self._result = None + self._result_ready = threading.Event() + + + def GetInfo( self ): return ( self._service_identifier, self._account_identifier, self._ip, self._request_type, self._request, self._request_args, self._request_length ) + + def GetResult( self ): + + while True: + + if self._result_ready.wait( 5 ) == True: break + elif shutdown: raise Exception( 'Application quit before db could serve result!' ) + + + if issubclass( type( self._result ), Exception ): raise self._result + else: return self._result + + + def PutResult( self, result ): + + self._result = result + + self._result_ready.set() + + +class ResponseContext(): + + def __init__( self, status_code, mime = None, body = '', filename = None ): + + self._status_code = status_code + self._mime = mime + self._body = body + self._filename = filename + + + def GetFilename( self ): return self._filename + + def GetLength( self ): return len( self._body ) + + def GetMimeBody( self ): return ( self._mime, self._body ) + + def GetStatusCode( self ): return self._status_code + + def HasBody( self ): return self._body != '' + + def HasFilename( self ): return self._filename is not None + +class ServerPetition( HydrusYAMLBase ): + + yaml_tag = u'!ServerPetition' + + def __init__( self, petitioner_identifier ): + + HydrusYAMLBase.__init__( self ) + + self._petitioner_identifier = petitioner_identifier + + + def GetPetitionerIdentifier( self ): return self._petitioner_identifier + +class ServerFilePetition( ServerPetition ): + + yaml_tag = u'!ServerFilePetition' + + def __init__( self, petitioner_identifier, reason, hashes ): + + ServerPetition.__init__( self, petitioner_identifier ) + + self._reason = reason + self._hashes = hashes + + + def GetClientPetition( self ): return ClientFilePetitions( [ ( self._reason, self._hashes ) ] ) + + def GetClientPetitionDenial( self ): return ClientFilePetitionDenial( self._hashes ) + + def GetPetitionHashes( self ): return self._hashes + + def GetPetitionInfo( self ): return ( self._reason, self._hashes ) + + def GetPetitionInfoDenial( self ): return ( self._hashes, ) + + def GetPetitionString( self ): return 'For ' + ConvertIntToPrettyString( len( self._hashes ) ) + ' files:' + os.linesep + os.linesep + self._reason + +class ServerMappingPetition( ServerPetition ): + + yaml_tag = u'!ServerMappingPetition' + + def __init__( self, petitioner_identifier, reason, tag, hashes ): + + ServerPetition.__init__( self, petitioner_identifier ) + + self._reason = reason + self._tag = tag + self._hashes = hashes + + + def GetClientPetition( self ): + + hash_ids_to_hashes = { enum : hash for ( enum, hash ) in enumerate( self._hashes ) } + + hash_ids = hash_ids_to_hashes.keys() + + return ClientMappingPetitions( [ ( self._reason, self._tag, hash_ids ) ], hash_ids_to_hashes ) + + + def GetClientPetitionDenial( self ): return ClientMappingPetitionDenial( self._tag, self._hashes ) + + def GetPetitionHashes( self ): return self._hashes + + def GetPetitionInfo( self ): return ( self._reason, self._tag, self._hashes ) + + def GetPetitionString( self ): return 'Tag: ' + self._tag + ' for ' + ConvertIntToPrettyString( len( self._hashes ) ) + ' files.' + os.linesep + os.linesep + 'Reason: ' + self._reason + +class ServerServiceIdentifier( HydrusYAMLBase ): + + yaml_tag = u'!ServerServiceIdentifier' + + def __init__( self, type, port ): + + HydrusYAMLBase.__init__( self ) + + self._type = type + self._port = port + + + def __eq__( self, other ): return self.__hash__() == other.__hash__() + + def __hash__( self ): return ( self._type, self._port ).__hash__() + + def __ne__( self, other ): return self.__hash__() != other.__hash__() + + def GetPort( self ): return self._port + + def GetType( self ): return self._type + +# sqlite mod + +sqlite3.register_adapter( dict, yaml.safe_dump ) +sqlite3.register_adapter( Account, yaml.safe_dump ) +sqlite3.register_adapter( AccountType, yaml.safe_dump ) +sqlite3.register_converter( 'TEXT_YAML', yaml.safe_load ) + +sqlite3.register_converter( 'BLOB_BYTES', str ) + +# for some reason, sqlite doesn't parse to int before this, despite the column affinity +# it gives the register_converter function a bytestring :/ +def integer_boolean_to_bool( integer_boolean ): return bool( int( integer_boolean ) ) + +sqlite3.register_adapter( bool, int ) +sqlite3.register_converter( 'INTEGER_BOOLEAN', integer_boolean_to_bool ) + +# no converters in this case, since we always want to send the dumped string, not the object, to the network +sqlite3.register_adapter( HydrusUpdateFileRepository, yaml.safe_dump ) +sqlite3.register_adapter( HydrusUpdateTagRepository, yaml.safe_dump ) + +# Custom Exceptions + +class NetworkVersionException( Exception ): pass +class NoContentException( Exception ): pass +class NotFoundException( Exception ): pass +class NotModifiedException( Exception ): pass +class ForbiddenException( Exception ): pass +class PermissionException( Exception ): pass +class ShutdownException( Exception ): pass +class WrongServiceTypeException( Exception ): pass diff --git a/include/HydrusFlashHandling.py b/include/HydrusFlashHandling.py new file mode 100755 index 00000000..d8f5011d --- /dev/null +++ b/include/HydrusFlashHandling.py @@ -0,0 +1,22 @@ +import cStringIO +import hexagonitswfheader +import traceback + +# to all out there who write libraries: +# hexagonit.swfheader is a perfect library. it is how you are supposed to do it. +def GetFlashProperties( file ): + + f = cStringIO.StringIO( file ) + + metadata = hexagonitswfheader.parse( f ) + + width = metadata[ 'width' ] + height = metadata[ 'height' ] + + num_frames = metadata[ 'frames' ] + fps = metadata[ 'fps' ] + + duration = ( 1000 * num_frames ) / fps + + return ( ( width, height ), duration, num_frames ) + \ No newline at end of file diff --git a/include/HydrusImageHandling.py b/include/HydrusImageHandling.py new file mode 100755 index 00000000..b6703554 --- /dev/null +++ b/include/HydrusImageHandling.py @@ -0,0 +1,459 @@ +import cStringIO +import numpy.core.multiarray # important this comes before cv! +import cv +import HydrusConstants as HC +from PIL import Image as PILImage +import struct +import threading +import time +import traceback +import wx + +#LINEAR_SCALE_PALETTE = [ 0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12, 13, 13, 13, 14, 14, 14, 15, 15, 15, 16, 16, 16, 17, 17, 17, 18, 18, 18, 19, 19, 19, 20, 20, 20, 21, 21, 21, 22, 22, 22, 23, 23, 23, 24, 24, 24, 25, 25, 25, 26, 26, 26, 27, 27, 27, 28, 28, 28, 29, 29, 29, 30, 30, 30, 31, 31, 31, 32, 32, 32, 33, 33, 33, 34, 34, 34, 35, 35, 35, 36, 36, 36, 37, 37, 37, 38, 38, 38, 39, 39, 39, 40, 40, 40, 41, 41, 41, 42, 42, 42, 43, 43, 43, 44, 44, 44, 45, 45, 45, 46, 46, 46, 47, 47, 47, 48, 48, 48, 49, 49, 49, 50, 50, 50, 51, 51, 51, 52, 52, 52, 53, 53, 53, 54, 54, 54, 55, 55, 55, 56, 56, 56, 57, 57, 57, 58, 58, 58, 59, 59, 59, 60, 60, 60, 61, 61, 61, 62, 62, 62, 63, 63, 63, 64, 64, 64, 65, 65, 65, 66, 66, 66, 67, 67, 67, 68, 68, 68, 69, 69, 69, 70, 70, 70, 71, 71, 71, 72, 72, 72, 73, 73, 73, 74, 74, 74, 75, 75, 75, 76, 76, 76, 77, 77, 77, 78, 78, 78, 79, 79, 79, 80, 80, 80, 81, 81, 81, 82, 82, 82, 83, 83, 83, 84, 84, 84, 85, 85, 85, 86, 86, 86, 87, 87, 87, 88, 88, 88, 89, 89, 89, 90, 90, 90, 91, 91, 91, 92, 92, 92, 93, 93, 93, 94, 94, 94, 95, 95, 95, 96, 96, 96, 97, 97, 97, 98, 98, 98, 99, 99, 99, 100, 100, 100, 101, 101, 101, 102, 102, 102, 103, 103, 103, 104, 104, 104, 105, 105, 105, 106, 106, 106, 107, 107, 107, 108, 108, 108, 109, 109, 109, 110, 110, 110, 111, 111, 111, 112, 112, 112, 113, 113, 113, 114, 114, 114, 115, 115, 115, 116, 116, 116, 117, 117, 117, 118, 118, 118, 119, 119, 119, 120, 120, 120, 121, 121, 121, 122, 122, 122, 123, 123, 123, 124, 124, 124, 125, 125, 125, 126, 126, 126, 127, 127, 127, 128, 128, 128, 129, 129, 129, 130, 130, 130, 131, 131, 131, 132, 132, 132, 133, 133, 133, 134, 134, 134, 135, 135, 135, 136, 136, 136, 137, 137, 137, 138, 138, 138, 139, 139, 139, 140, 140, 140, 141, 141, 141, 142, 142, 142, 143, 143, 143, 144, 144, 144, 145, 145, 145, 146, 146, 146, 147, 147, 147, 148, 148, 148, 149, 149, 149, 150, 150, 150, 151, 151, 151, 152, 152, 152, 153, 153, 153, 154, 154, 154, 155, 155, 155, 156, 156, 156, 157, 157, 157, 158, 158, 158, 159, 159, 159, 160, 160, 160, 161, 161, 161, 162, 162, 162, 163, 163, 163, 164, 164, 164, 165, 165, 165, 166, 166, 166, 167, 167, 167, 168, 168, 168, 169, 169, 169, 170, 170, 170, 171, 171, 171, 172, 172, 172, 173, 173, 173, 174, 174, 174, 175, 175, 175, 176, 176, 176, 177, 177, 177, 178, 178, 178, 179, 179, 179, 180, 180, 180, 181, 181, 181, 182, 182, 182, 183, 183, 183, 184, 184, 184, 185, 185, 185, 186, 186, 186, 187, 187, 187, 188, 188, 188, 189, 189, 189, 190, 190, 190, 191, 191, 191, 192, 192, 192, 193, 193, 193, 194, 194, 194, 195, 195, 195, 196, 196, 196, 197, 197, 197, 198, 198, 198, 199, 199, 199, 200, 200, 200, 201, 201, 201, 202, 202, 202, 203, 203, 203, 204, 204, 204, 205, 205, 205, 206, 206, 206, 207, 207, 207, 208, 208, 208, 209, 209, 209, 210, 210, 210, 211, 211, 211, 212, 212, 212, 213, 213, 213, 214, 214, 214, 215, 215, 215, 216, 216, 216, 217, 217, 217, 218, 218, 218, 219, 219, 219, 220, 220, 220, 221, 221, 221, 222, 222, 222, 223, 223, 223, 224, 224, 224, 225, 225, 225, 226, 226, 226, 227, 227, 227, 228, 228, 228, 229, 229, 229, 230, 230, 230, 231, 231, 231, 232, 232, 232, 233, 233, 233, 234, 234, 234, 235, 235, 235, 236, 236, 236, 237, 237, 237, 238, 238, 238, 239, 239, 239, 240, 240, 240, 241, 241, 241, 242, 242, 242, 243, 243, 243, 244, 244, 244, 245, 245, 245, 246, 246, 246, 247, 247, 247, 248, 248, 248, 249, 249, 249, 250, 250, 250, 251, 251, 251, 252, 252, 252, 253, 253, 253, 254, 254, 254, 255, 255, 255 ] + +def ConvertToPngIfBmp( file ): + + if HC.GetMimeFromString( file[:256] ) == HC.IMAGE_BMP: + + pil_image = GeneratePILImageFromFile( file ) + + f = cStringIO.StringIO() + + pil_image = pil_image.convert( 'P' ) + + pil_image.save( f, 'PNG' ) + + f.seek( 0 ) + + file = f.read() + + f.close() + + + return file + +def EfficientlyResizeImage( pil_image, ( x, y ) ): + + ( im_x, im_y ) = pil_image.size + + if x >= im_x and y >= im_y: return pil_image + + if pil_image.mode == 'RGB': # low quality resize screws up alpha channel! + + if im_x > 2 * x and im_y > 2 * y: pil_image.thumbnail( ( 2 * x, 2 * y ), PILImage.NEAREST ) + + + return pil_image.resize( ( x, y ), PILImage.ANTIALIAS ) + +def EfficientlyThumbnailImage( pil_image, ( x, y ) ): + + ( im_x, im_y ) = pil_image.size + + if pil_image.mode == 'RGB': # low quality resize screws up alpha channel! + + if im_x > 2 * x or im_y > 2 * y: pil_image.thumbnail( ( 2 * x, 2 * y ), PILImage.NEAREST ) + + + pil_image.thumbnail( ( x, y ), PILImage.ANTIALIAS ) + +def GenerateAnimatedFrame( pil_image, target_resolution, canvas ): + + if 'duration' not in pil_image.info: duration = 40 # 25 fps default when duration is missing or too funky to extract. most stuff looks ok at this. + else: + + duration = pil_image.info[ 'duration' ] + + if duration == 0: duration = 40 + + + current_frame = EfficientlyResizeImage( pil_image, target_resolution ) + + if pil_image.mode == 'P' and 'transparency' in pil_image.info: + + current_frame = current_frame.convert( 'RGBA' ) + + if canvas is None: canvas = current_frame + else: canvas.paste( current_frame, None, current_frame ) # yeah, use the rgba image as its own mask, wut. + + else: canvas = current_frame + + return ( canvas, duration ) + +def GenerateHydrusBitmapFromFile( file ): + + pil_image = GeneratePILImageFromFile( file ) + + return GenerateHydrusBitmapFromPILImage( pil_image ) + +def GenerateHydrusBitmapFromPILImage( pil_image ): + + ( image_width, image_height ) = pil_image.size + + if pil_image.mode == 'RGBA' or ( pil_image.mode == 'P' and pil_image.info.has_key( 'transparency' ) ): + + if pil_image.mode == 'P': pil_image = pil_image.convert( 'RGBA' ) + + return HydrusBitmap( pil_image.tostring(), wx.BitmapBufferFormat_RGBA, pil_image.size ) + + else: + + if pil_image.mode in ( 'P', 'L', 'LA' ): pil_image = pil_image.convert( 'RGB' ) + + return HydrusBitmap( pil_image.tostring(), wx.BitmapBufferFormat_RGB, pil_image.size ) + + +def GeneratePerceptualHash( thumbnail_file ): + + thumbnail = GeneratePILImageFromFile( thumbnail_file ) + + # convert to 32 x 32 greyscale + + if thumbnail.mode == 'RGBA': + + # this is some code i picked up somewhere + # another great example of PIL failing; it turns all alpha to pure black on a RGBA->RGB + + thumbnail.load() + + canvas = PILImage.new( 'RGB', thumbnail.size, ( 255, 255, 255 ) ) + + canvas.paste( thumbnail, mask = thumbnail.split()[3] ) + + thumbnail = canvas + + + thumbnail = thumbnail.convert( 'L' ) + + thumbnail = thumbnail.resize( ( 32, 32 ), PILImage.ANTIALIAS ) + + # convert to mat + + cv_thumbnail_8 = cv.CreateMatHeader( 32, 32, cv.CV_8UC1 ) + + cv.SetData( cv_thumbnail_8, thumbnail.tostring() ) + + cv_thumbnail_32 = cv.CreateMat( 32, 32, cv.CV_32FC1 ) + + cv.Convert( cv_thumbnail_8, cv_thumbnail_32 ) + + # compute dct + + dct = cv.CreateMat( 32, 32, cv.CV_32FC1 ) + + cv.DCT( cv_thumbnail_32, dct, cv.CV_DXT_FORWARD ) + + # take top left 8x8 of dct + + dct = cv.GetSubRect( dct, ( 0, 0, 8, 8 ) ) + + # get mean of dct, excluding [0,0] + + mask = cv.CreateMat( 8, 8, cv.CV_8U ) + + cv.Set( mask, 1 ) + + mask[0,0] = 0 + + channel_averages = cv.Avg( dct, mask ) + + average = channel_averages[0] + + # make a monochromatic, 64-bit hash of whether the entry is above or below the mean + + bytes = [] + + for i in range( 8 ): + + byte = 0 + + for j in range( 8 ): + + byte <<= 1 # shift byte one left + + value = dct[i,j] + + if value > average: byte |= 1 + + + bytes.append( byte ) + + + answer = str( bytearray( bytes ) ) + + # we good + + return answer + +def GeneratePILImageFromFile( file ): return PILImage.open( cStringIO.StringIO( file ) ) + +def GenerateResolutionAndNumFramesFromFile( file ): + + pil_image = GeneratePILImageFromFile( file ) + + ( x, y ) = pil_image.size + + try: + + pil_image.seek( 1 ) + pil_image.seek( 0 ) + + num_frames = 1 + + while True: + + try: + + pil_image.seek( pil_image.tell() + 1 ) + num_frames += 1 + + except: break + + + except: num_frames = 1 + + return ( ( x, y ), num_frames ) + +def GenerateThumbnailFileFromFile( file, dimensions ): + + pil_image = GeneratePILImageFromFile( file ) + + return GenerateThumbnailFileFromImage( pil_image, dimensions ) + +def GenerateThumbnailFileFromImage( pil_image, dimensions = None ): + + if dimensions is None: dimensions = HC.UNSCALED_THUMBNAIL_DIMENSIONS + + EfficientlyThumbnailImage( pil_image, dimensions ) + + f_t = cStringIO.StringIO() + + if pil_image.mode == 'P' and pil_image.info.has_key( 'transparency' ): + + pil_image.save( f_t, 'PNG', transparency = pil_image.info[ 'transparency' ] ) + + elif pil_image.mode == 'RGBA': pil_image.save( f_t, 'PNG' ) + else: + + pil_image = pil_image.convert( 'RGB' ) + + pil_image.save( f_t, 'JPEG', quality=92 ) + + + f_t.seek( 0 ) + + thumbnail = f_t.read() + + f_t.close() + + return thumbnail + +def GetHammingDistance( phash1, phash2 ): + + distance = 0 + + phash1 = bytearray( phash1 ) + phash2 = bytearray( phash2 ) + + for i in range( len( phash1 ) ): + + xor = phash1[i] ^ phash2[i] + + while xor > 0: + + distance += 1 + xor &= xor - 1 + + + + return distance + +def RenderImageFrames( image_container, file, target_resolution, synchronous ): + + if image_container.IsAnimated(): renderer = AnimatedFrameRenderer( image_container, file, target_resolution ) + else: renderer = StaticFrameRenderer( image_container, file, target_resolution ) + + if synchronous: renderer.Render() + else: threading.Thread( target = renderer.RenderCallAfter ).start() + +def RenderImageFromFile( file, hash, target_resolution = None, synchronous = True ): + + try: + + ( original_resolution, num_frames ) = GenerateResolutionAndNumFramesFromFile( file ) + + if target_resolution is None: target_resolution = original_resolution + + image_container = RenderedImageContainer( hash, original_resolution, target_resolution, num_frames ) + + RenderImageFrames( image_container, file, target_resolution, synchronous ) + + return image_container + + except: raise Exception( 'Attempted to render the image, but it was either formatted slightly incorrectly or PIL could not handle it; look up PIL in the hydrus help for more info.' ) + +class FrameRenderer(): + + def __init__( self, image_container, file, target_resolution ): + + self._image_container = image_container + self._pil_image = GeneratePILImageFromFile( file ) + self._target_resolution = target_resolution + + +class AnimatedFrameRenderer( FrameRenderer ): + + def GetFrames( self ): + + canvas = None + + global_palette = self._pil_image.palette + + dirty = self._pil_image.palette.dirty + mode = self._pil_image.palette.mode + rawmode = self._pil_image.palette.rawmode + + # believe it or not, doing this actually fixed a couple of gifs! + self._pil_image.seek( 1 ) + self._pil_image.seek( 0 ) + + while True: + + ( canvas, duration ) = GenerateAnimatedFrame( self._pil_image, self._target_resolution, canvas ) + + yield ( GenerateHydrusBitmapFromPILImage( canvas ), duration ) + + try: + + self._pil_image.seek( self._pil_image.tell() + 1 ) + + if self._pil_image.palette == global_palette: # for some reason, when we fall back to global palette (no local-frame palette), we reset bunch of important variables! + + self._pil_image.palette.dirty = dirty + self._pil_image.palette.mode = mode + self._pil_image.palette.rawmode = rawmode + + + except: break + + + + def Render( self ): + + for ( frame, duration ) in self.GetFrames(): self._image_container.AddFrame( frame, duration ) + + + def RenderCallAfter( self ): + + time.sleep( 0 ) # thread yield + + for ( frame, duration ) in self.GetFrames(): wx.CallAfter( self._image_container.AddFrame, frame, duration ) + + HC.pubsub.pub( 'finished_rendering', self._image_container.GetKey() ) + + +class StaticFrameRenderer( FrameRenderer ): + + def GetFrame( self ): return GenerateHydrusBitmapFromPILImage( EfficientlyResizeImage( self._pil_image, self._target_resolution ) ) + + def Render( self ): self._image_container.AddFrame( self.GetFrame() ) + + def RenderCallAfter( self ): + + time.sleep( 0 ) # thread yield + + wx.CallAfter( self._image_container.AddFrame, self.GetFrame() ) + + HC.pubsub.pub( 'finished_rendering', self._image_container.GetKey() ) + + +class HydrusBitmap(): + + def __init__( self, data, format, size ): + + self._data = data + self._format = format + self._size = size + + + def CreateWxBmp( self ): + + ( width, height ) = self._size + + if self._format == wx.BitmapBufferFormat_RGB: return wx.BitmapFromBuffer( width, height, self._data ) + else: return wx.BitmapFromBufferRGBA( width, height, self._data ) + + + def GetEstimatedMemoryFootprint( self ): return len( self._data ) + + def GetSize( self ): return self._size + +class RenderedImageContainer(): + + def __init__( self, hash, original_resolution, my_resolution, num_frames ): + + self._hash = hash + self._original_resolution = original_resolution + self._my_resolution = my_resolution + self._num_frames = num_frames + + ( original_width, original_height ) = original_resolution + + ( my_width, my_height ) = my_resolution + + width_zoom = my_width / float( original_width ) + height_zoom = my_height / float( original_height ) + + self._zoom = min( ( width_zoom, height_zoom ) ) + + if self._zoom > 1.0: self._zoom = 1.0 + + self._frames = [] + self._durations = [] + + self._finished_rendering = False + + + def AddFrame( self, frame, duration = None ): + + self._frames.append( frame ) + + if duration is not None: self._durations.append( duration ) + + + def GetDuration( self, index ): return self._durations[ index ] + + def GetEstimatedMemoryFootprint( self ): return sum( [ frame.GetEstimatedMemoryFootprint() for frame in self._frames ] ) + + def GetFrame( self, index = None ): + + if index is None: return self._frames[ 0 ] + else: return self._frames[ index ] + + + def GetHash( self ): return self._hash + + def GetKey( self ): return ( self._hash, self._my_resolution ) + + def GetNumFrames( self ): return self._num_frames + + def GetResolution( self ): return self._original_resolution + + def GetSize( self ): return self._my_resolution + + def GetTotalDuration( self ): return sum( self._durations ) + + def GetZoom( self ): return self._zoom + + def HasFrame( self, index = None ): + + if index is None: index = 0 + + return len( self._frames ) > index + + + def IsAnimated( self ): return self._num_frames > 1 + + def IsFinishedRendering( self ): return len( self._frames ) == self._num_frames + + def IsScaled( self ): return self._zoom != 1.0 + \ No newline at end of file diff --git a/include/HydrusMessageHandling.py b/include/HydrusMessageHandling.py new file mode 100755 index 00000000..1d021acb --- /dev/null +++ b/include/HydrusMessageHandling.py @@ -0,0 +1,243 @@ +import Crypto.Cipher.AES +import Crypto.Cipher.PKCS1_OAEP +import Crypto.Hash.SHA256 +import Crypto.Signature.PKCS1_v1_5 +import Crypto.PublicKey.RSA +import hashlib +import HydrusConstants as HC +import os +import time +import traceback +import wx +import yaml +import zlib + +def GenerateFilteredRandomBytes( byte_to_exclude, num_bytes ): + + bytes = [] + + while len( bytes ) < num_bytes: + + new_byte = os.urandom( 1 ) + + if new_byte != byte_to_exclude: bytes.append( new_byte ) + + + return ''.join( bytes ) + +def GenerateNewPrivateKey(): return Crypto.PublicKey.RSA.generate( 2048 ).exportKey() + +def GetPublicKey( private_key_text ): + + private_key = TextToKey( private_key_text ) + + public_key = private_key.publickey() + + return public_key.exportKey() + +def TextToKey( text ): return Crypto.PublicKey.RSA.importKey( text ) + +def PadAESMessage( message ): + + block_size = 16 + + # get last byte + # add random gumpf (except for last byte), then add last byte again + + last_byte = message[-1] + + num_bytes_to_add = block_size - ( len( message ) % block_size ) + + pad = GenerateFilteredRandomBytes( last_byte, num_bytes_to_add - 1 ) + last_byte + + return message + pad + +def UnpadAESMessage( message ): + + block_size = 16 + + # check last byte, jump back to previous instance of that byte + + last_byte = message[-1] + + i = 2 + + while True: + + if message[-i] == last_byte: break + + i += 1 + + + index_of_correct_end = len( message ) - i + + return message[:index_of_correct_end + 1] + +def PackageStatusForDelivery( status, public_key_text ): + + public_key = TextToKey( public_key_text ) + + yamled = yaml.safe_dump( status ) + + gzipped = zlib.compress( yamled ) + + rsa_cipher = Crypto.Cipher.PKCS1_OAEP.new( public_key ) + + # my understanding is that I don't have to manually pad this, cause OAEP does it for me. + # if that is wrong, then lol + encrypted_gzipped = rsa_cipher.encrypt( gzipped ) + + return encrypted_gzipped + +def UnpackageDeliveredStatus( encrypted_gzipped, private_key_text ): + + private_key = TextToKey( private_key_text ) + + rsa_cipher = Crypto.Cipher.PKCS1_OAEP.new( private_key ) + + gzipped = rsa_cipher.decrypt( encrypted_gzipped ) + + yamled = zlib.decompress( gzipped ) + + status = yaml.safe_load( yamled ) + + return status + +def PackageMessageForDelivery( message_object, public_key_text ): + + public_key = TextToKey( public_key_text ) + + yamled = yaml.safe_dump( message_object ) + + gzipped = zlib.compress( yamled ) + + padded = PadAESMessage( gzipped ) + + aes_key = os.urandom( 32 ) + iv = os.urandom( 16 ) # initialisation vector, aes block_size is 16 + + aes_cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, iv ) + + encrypted_message = aes_cipher.encrypt( padded ) + + rsa_cipher = Crypto.Cipher.PKCS1_OAEP.new( public_key ) + + # my understanding is that I don't have to manually pad this, cause OAEP does it for me. + # if that is wrong, then lol + encrypted_aes_key = rsa_cipher.encrypt( aes_key + iv ) + + whole_encrypted_message = encrypted_aes_key + encrypted_message + + return whole_encrypted_message + +def UnpackageDeliveredMessage( whole_encrypted_message, private_key_text ): + + private_key = TextToKey( private_key_text ) + + encrypted_aes_key = whole_encrypted_message[:256] + + rsa_cipher = Crypto.Cipher.PKCS1_OAEP.new( private_key ) + + aes_key_and_iv = rsa_cipher.decrypt( encrypted_aes_key ) + + aes_key = aes_key_and_iv[:32] + + iv = aes_key_and_iv[32:] + + encrypted_message = whole_encrypted_message[256:] + + aes_cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, iv ) + + padded = aes_cipher.decrypt( encrypted_message ) + + gzipped = UnpadAESMessage( padded ) + + yamled = zlib.decompress( gzipped ) + + message = yaml.safe_load( yamled ) + + return message + +class Message( HC.HydrusYAMLBase ): + + yaml_tag = u'!Message' + + def __init__( self, conversation_key, contact_from, contacts_to, subject, body, timestamp, files = [], private_key = None ): + + if contact_from is not None and contact_from.GetName() == 'Anonymous': contact_from = None + + self._contact_from = contact_from + self._contacts_to = contacts_to + self._subject = subject + self._body = body + self._files = files + + self._timestamp = timestamp + + self._conversation_key = conversation_key + + hash_object = self._GetHashObject() + + if private_key is None: self._signature = None + else: + + private_key_object = TextToKey( private_key ) + + signer = Crypto.Signature.PKCS1_v1_5.new( private_key_object ) + + self._signature = signer.sign( hash_object ) + + + self._message_key = hash_object.digest() + + + def _GetHashObject( self ): + + message = '' + + if self._contact_from is not None: message += yaml.safe_dump( self._contact_from.GetPublicKey() ) + + contact_to_public_keys = [ contact_to.GetPublicKey() for contact_to in self._contacts_to ] + + contact_to_public_keys.sort() + + if type( self._subject ) == unicode: subject_text = self._subject.encode( 'utf-8' ) + else: subject_text = self._subject + + if type( self._body ) == unicode: body_text = self._body.encode( 'utf-8' ) + else: body_text = self._body + + message += ''.join( [ yaml.safe_dump( public_key ) for public_key in contact_to_public_keys ] ) + subject_text + body_text + ''.join( self._files ) + str( self._conversation_key ) + str( self._timestamp ) + + hash_object = Crypto.Hash.SHA256.new( message ) + + return hash_object + + + def GetContactFrom( self ): return self._contact_from + + def GetContactsTo( self ): return self._contacts_to + + def GetInfo( self ): + + if self._conversation_key is None: conversation_key = self._message_key + else: conversation_key = self._conversation_key + + return ( self._contact_from, self._contacts_to, self._message_key, conversation_key, self._timestamp, self._subject, self._body, self._files ) + + + def GetMessageKey( self ): return self._message_key + + def VerifyIsFromCorrectPerson( self, public_key_text ): + + public_key = TextToKey( public_key_text ) + + hash_object = self._GetHashObject() + + self._message_key = hash_object.digest() + + verifier = Crypto.Signature.PKCS1_v1_5.new( public_key ) + + return verifier.verify( hash_object, self._signature ) + + \ No newline at end of file diff --git a/include/HydrusPubSub.py b/include/HydrusPubSub.py new file mode 100755 index 00000000..b959e1fd --- /dev/null +++ b/include/HydrusPubSub.py @@ -0,0 +1,103 @@ +import Queue +import threading +import traceback +import weakref +import wx +import wx.lib.newevent + +( PubSubEvent, EVT_PUBSUB ) = wx.lib.newevent.NewEvent() + +class HydrusPubSub(): + + def __init__( self ): + + self._pubsubs = Queue.Queue() + + self._lock = threading.Lock() + + self._topics_to_objects = {} + self._topics_to_method_names = {} + + + def GetQueue( self ): return self._pubsubs + + def pub( self, topic, *args, **kwargs ): + + with self._lock: + + if topic in self._topics_to_objects: + + try: + + objects = self._topics_to_objects[ topic ] + + for object in objects: + + method_names = self._topics_to_method_names[ topic ] + + for method_name in method_names: + + if hasattr( object, method_name ): + + try: + + self._pubsubs.put( ( getattr( object, method_name ), args, kwargs ) ) + + wx.PostEvent( wx.GetApp(), PubSubEvent() ) + + except wx.PyDeadObjectError: pass + except: print( topic + ' for ' + str( object ) + ' bound to ' + method_name + os.linesep + traceback.format_exc() ) + + + + + except: pass + + + + + def pubimmediate( self, topic, *args, **kwargs ): + + with self._lock: + + if topic in self._topics_to_objects: + + try: + + objects = self._topics_to_objects[ topic ] + + for object in objects: + + method_names = self._topics_to_method_names[ topic ] + + for method_name in method_names: + + if hasattr( object, method_name ): + + try: getattr( object, method_name )( *args, **kwargs ) + except wx.PyDeadObjectError: pass + except: print( topic + ' for ' + str( object ) + ' bound to ' + method_name + os.linesep + traceback.format_exc() ) + + + + + except RuntimeError: pass # sometimes the set changes size during iteration, which is a bug I haven't tracked down + except wx.PyDeadObjectError: pass + except TypeError: pass + except: print( traceback.format_exc() ) + + + + + def sub( self, object, method_name, topic ): + + with self._lock: + + if topic not in self._topics_to_objects: self._topics_to_objects[ topic ] = weakref.WeakSet() + if topic not in self._topics_to_method_names: self._topics_to_method_names[ topic ] = set() + + self._topics_to_objects[ topic ].add( object ) + self._topics_to_method_names[ topic ].add( method_name ) + + + \ No newline at end of file diff --git a/include/HydrusServer.py b/include/HydrusServer.py new file mode 100755 index 00000000..e8a63f6a --- /dev/null +++ b/include/HydrusServer.py @@ -0,0 +1,578 @@ +import BaseHTTPServer +import hashlib +import httplib +import HydrusConstants as HC +import HydrusFlashHandling +import HydrusImageHandling +import HydrusVideoHandling +import os +import SocketServer +import traceback +import urllib +import wx +import yaml + +eris = '''hydrus
+                         8888  8888888
+                  888888888888888888888888
+               8888:::8888888888888888888888888
+             8888::::::8888888888888888888888888888
+            88::::::::888:::8888888888888888888888888
+          88888888::::8:::::::::::88888888888888888888
+        888 8::888888::::::::::::::::::88888888888   888
+           88::::88888888::::m::::::::::88888888888    8
+         888888888888888888:M:::::::::::8888888888888
+        88888888888888888888::::::::::::M88888888888888
+        8888888888888888888888:::::::::M8888888888888888
+         8888888888888888888888:::::::M888888888888888888
+        8888888888888888::88888::::::M88888888888888888888
+      88888888888888888:::88888:::::M888888888888888   8888
+     88888888888888888:::88888::::M::;o*M*o;888888888    88
+    88888888888888888:::8888:::::M:::::::::::88888888    8
+   88888888888888888::::88::::::M:;:::::::::::888888888
+  8888888888888888888:::8::::::M::aAa::::::::M8888888888       8
+  88   8888888888::88::::8::::M:::::::::::::888888888888888 8888
+ 88  88888888888:::8:::::::::M::::::::::;::88:88888888888888888
+ 8  8888888888888:::::::::::M::"@@@@@@@"::::8w8888888888888888
+  88888888888:888::::::::::M:::::"@a@":::::M8i888888888888888
+ 8888888888::::88:::::::::M88:::::::::::::M88z88888888888888888
+8888888888:::::8:::::::::M88888:::::::::MM888!888888888888888888
+888888888:::::8:::::::::M8888888MAmmmAMVMM888*88888888   88888888
+888888 M:::::::::::::::M888888888:::::::MM88888888888888   8888888
+8888   M::::::::::::::M88888888888::::::MM888888888888888    88888
+ 888   M:::::::::::::M8888888888888M:::::mM888888888888888    8888
+  888  M::::::::::::M8888:888888888888::::m::Mm88888 888888   8888
+   88  M::::::::::::8888:88888888888888888::::::Mm8   88888   888
+   88  M::::::::::8888M::88888::888888888888:::::::Mm88888    88
+   8   MM::::::::8888M:::8888:::::888888888888::::::::Mm8     4
+       8M:::::::8888M:::::888:::::::88:::8888888::::::::Mm    2
+      88MM:::::8888M:::::::88::::::::8:::::888888:::M:::::M
+     8888M:::::888MM::::::::8:::::::::::M::::8888::::M::::M
+    88888M:::::88:M::::::::::8:::::::::::M:::8888::::::M::M
+   88 888MM:::888:M:::::::::::::::::::::::M:8888:::::::::M:
+   8 88888M:::88::M:::::::::::::::::::::::MM:88::::::::::::M
+     88888M:::88::M::::::::::*88*::::::::::M:88::::::::::::::M
+    888888M:::88::M:::::::::88@@88:::::::::M::88::::::::::::::M
+    888888MM::88::MM::::::::88@@88:::::::::M:::8::::::::::::::*8
+    88888  M:::8::MM:::::::::*88*::::::::::M:::::::::::::::::88@@
+    8888   MM::::::MM:::::::::::::::::::::MM:::::::::::::::::88@@
+     888    M:::::::MM:::::::::::::::::::MM::M::::::::::::::::*8
+     888    MM:::::::MMM::::::::::::::::MM:::MM:::::::::::::::M
+      88     M::::::::MMMM:::::::::::MMMM:::::MM::::::::::::MM
+       88    MM:::::::::MMMMMMMMMMMMMMM::::::::MMM::::::::MMM
+        88    MM::::::::::::MMMMMMM::::::::::::::MMMMMMMMMM
+         88   8MM::::::::::::::::::::::::::::::::::MMMMMM
+          8   88MM::::::::::::::::::::::M:::M::::::::MM
+              888MM::::::::::::::::::MM::::::MM::::::MM
+             88888MM:::::::::::::::MMM:::::::mM:::::MM
+             888888MM:::::::::::::MMM:::::::::MMM:::M
+            88888888MM:::::::::::MMM:::::::::::MM:::M
+           88 8888888M:::::::::MMM::::::::::::::M:::M
+           8  888888 M:::::::MM:::::::::::::::::M:::M:
+              888888 M::::::M:::::::::::::::::::M:::MM
+             888888  M:::::M::::::::::::::::::::::::M:M
+             888888  M:::::M:::::::::@::::::::::::::M::M
+             88888   M::::::::::::::@@:::::::::::::::M::M
+            88888   M::::::::::::::@@@::::::::::::::::M::M
+           88888   M:::::::::::::::@@::::::::::::::::::M::M
+          88888   M:::::m::::::::::@::::::::::Mm:::::::M:::M
+          8888   M:::::M:::::::::::::::::::::::MM:::::::M:::M
+         8888   M:::::M:::::::::::::::::::::::MMM::::::::M:::M
+        888    M:::::Mm::::::::::::::::::::::MMM:::::::::M::::M
+      8888    MM::::Mm:::::::::::::::::::::MMMM:::::::::m::m:::M
+     888      M:::::M::::::::::::::::::::MMM::::::::::::M::mm:::M
+  8888       MM:::::::::::::::::::::::::MM:::::::::::::mM::MM:::M:
+             M:::::::::::::::::::::::::M:::::::::::::::mM::MM:::Mm
+            MM::::::m:::::::::::::::::::::::::::::::::::M::MM:::MM
+            M::::::::M:::::::::::::::::::::::::::::::::::M::M:::MM
+           MM:::::::::M:::::::::::::M:::::::::::::::::::::M:M:::MM
+           M:::::::::::M88:::::::::M:::::::::::::::::::::::MM::MMM 
+           M::::::::::::8888888888M::::::::::::::::::::::::MM::MM 
+           M:::::::::::::88888888M:::::::::::::::::::::::::M::MM
+           M::::::::::::::888888M:::::::::::::::::::::::::M::MM
+           M:::::::::::::::88888M:::::::::::::::::::::::::M:MM
+           M:::::::::::::::::88M::::::::::::::::::::::::::MMM
+           M:::::::::::::::::::M::::::::::::::::::::::::::MMM
+           MM:::::::::::::::::M::::::::::::::::::::::::::MMM
+            M:::::::::::::::::M::::::::::::::::::::::::::MMM
+            MM:::::::::::::::M::::::::::::::::::::::::::MMM
+             M:::::::::::::::M:::::::::::::::::::::::::MMM
+             MM:::::::::::::M:::::::::::::::::::::::::MMM
+              M:::::::::::::M::::::::::::::::::::::::MMM
+              MM:::::::::::M::::::::::::::::::::::::MMM
+               M:::::::::::M:::::::::::::::::::::::MMM
+               MM:::::::::M:::::::::::::::::::::::MMM
+                M:::::::::M::::::::::::::::::::::MMM
+                MM:::::::M::::::::::::::::::::::MMM
+                 MM::::::M:::::::::::::::::::::MMM
+                 MM:::::M:::::::::::::::::::::MMM
+                  MM::::M::::::::::::::::::::MMM 
+                  MM:::M::::::::::::::::::::MMM
+                   MM::M:::::::::::::::::::MMM              THIS IS THE HYDRUS SERVER ADMIN SERVICE, VERSION ''' + str( HC.SOFTWARE_VERSION ) + '''
+                   MM:M:::::::::::::::::::MMM
+                    MMM::::::::::::::::::MMM
+                    MM::::::::::::::::::MMM
+                     M:::::::::::::::::MMM
+                    MM::::::::::::::::MMM
+                    MM:::::::::::::::MMM
+                    MM::::M:::::::::MMM:
+                    mMM::::MM:::::::MMMM
+                     MMM:::::::::::MMM:M
+                     mMM:::M:::::::M:M:M
+                      MM::MMMM:::::::M:M
+                      MM::MMM::::::::M:M
+                      mMM::MM::::::::M:M
+                       MM::MM:::::::::M:M
+                       MM::MM::::::::::M:m
+                       MM:::M:::::::::::MM
+                       MMM:::::::::::::::M:
+                       MMM:::::::::::::::M:
+                       MMM::::::::::::::::M
+                       MMM::::::::::::::::M
+                       MMM::::::::::::::::Mm
+                        MM::::::::::::::::MM
+                        MMM:::::::::::::::MM
+                        MMM:::::::::::::::MM
+                        MMM:::::::::::::::MM
+                        MMM:::::::::::::::MM
+                         MM::::::::::::::MMM
+                         MMM:::::::::::::MM
+                         MMM:::::::::::::MM
+                         MMM::::::::::::MM
+                          MM::::::::::::MM
+                          MM::::::::::::MM
+                          MM:::::::::::MM
+                          MMM::::::::::MM
+                          MMM::::::::::MM
+                           MM:::::::::MM
+                           MMM::::::::MM
+                           MMM::::::::MM
+                            MM::::::::MM
+                            MMM::::::MM
+                            MMM::::::MM
+                             MM::::::MM
+                             MM::::::MM
+                              MM:::::MM
+                              MM:::::MM:
+                              MM:::::M:M
+                              MM:::::M:M
+                              :M::::::M:
+                             M:M:::::::M
+                            M:::M::::::M
+                           M::::M::::::M
+                          M:::::M:::::::M
+                         M::::::MM:::::::M
+                         M:::::::M::::::::M
+                         M;:;::::M:::::::::M
+                         M:m:;:::M::::::::::M
+                         MM:m:m::M::::::::;:M
+                          MM:m::MM:::::::;:;M
+                           MM::MMM::::::;:m:M
+                            MMMM MM::::m:m:MM
+                                  MM::::m:MM
+                                   MM::::MM
+                                    MM::MM
+                                     MMMM
+
''' + +silly_responses = [] +silly_responses.append( u'\ufeff\u25d5 \u203f\u203f \u25d5' ) +silly_responses.append( u"The disco. We go to disco. My body's sweaty from the MDMA inside it. I like to dance with you. You grab my ponytail. It is greasy with Germanic juices that I put inside my hair. Disco, we are the disco. I have a mesh shirt. My leather pants show off my sausage inside it. I grind your body, then we eat ecstasy and have Special K inside of the bathroom. It's a men's bathroom, but no one cares that you come inside because they know that inside it we do lots of drugs. And I will share them if the bouncer lets me go into the bathroom with you, and then we go home. We have efficient sex. And then I realize you're not that hot anymore because I've blown a load and I don't have ecstasy inside of my bloodstream. So I make sandwich. It has hazelnuts, bread, and some jelly that I got from the supermarket. It tastes pretty good, but it probably tastes better because my taste buds have ecstasy inside them. And then I go up to the bathroom, and you're wearing one of my shirts; that isn't cool. You didn't even ask. I met you earlier the evening; you're not my girlfriend, you're just girl that I have sex with. We probably won't do this again because I realize that your hair is frazzled and it probably has extensions. It's not your real hair, and that's kind of gross 'cause who knows where it came from." ) +silly_responses.append( u'\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\n\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\n\u2591\u2588\u2591\u2591\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2588\u2591\n\u2588\u2591\u2591\u2588\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2588\n\u2588\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\n\u2588\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2588\n\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\n\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\n\u2588\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2588\n\u2591\u2588\u2591\u2591\u2591\u2588\u2593\u2593\u2593\u2593\u2593\u2593\u2593\u2593\u2588\u2588\u2588\u2588\u2588\u2593\u2593\u2593\u2588\u2591\u2591\u2591\u2591\u2588\u2591\n\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2593\u2593\u2593\u2593\u2593\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2593\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2591\n\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2593\u2593\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2592\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2591\u2591\n\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2592\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2588\u2588\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591' ) +silly_responses.append( u' (\u25cb\u220b\u25cb)\n\u250f( \u7537 )\u251b\n \uff0f \u2513\nA mystery' ) +silly_responses.append( u'\u25b2\n\u25b2 \u25b2' ) +silly_responses.append( u'\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2584\u2584\u2584\u2584\u2584\u2584\u2584\u2584\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2584\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2584\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2584\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2584\u2591\u2591\u2591\n\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2584\u2591\u2591\n\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\n\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\n\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\n\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2580\u2580\u2580\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2591\u2591\n\u2591\u2591\u2580\u2588\u2588\u2588\u2588\u2588\u2580\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2580\u2588\u2588\u2588\u2588\u2588\u2591\u2591\u2591\n\u2591\u2591\u2591\u2580\u2580\u2588\u2588\u2588\u2591\u2591\u2588\u2591\u2591\u2591\u2588\u2591\u2591\u2588\u2588\u2588\u2580\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2580\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2584\u2588\u2588\u2584\u2591\u2591\u2580\u2580\u2591\u2584\u2588\u2580\u2584\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2584\u2580\u2591\u2580\u2584\u2580\u2580\u2588\u2588\u2588\u2580\u2580\u2584\u2580\u2591\u2580\u2584\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2580\u2584\u2580\u2591\u2580\u2584\u2580\u2591\u2584\u2591\u2591\u2588\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2588\u2591\u2588\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2588\u2591\u2591\u2588\u2591\u2591\u2588\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2588\u2591\u2588\u2591\u2591\u2591\u2591\u2580\u2584\u2580\u2580\u2580\u2580\u2588\u2591\u2591\u2588\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2588\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2584\u2591\u2591\u2584\u2588\u2588\u2584\u2584\u2580\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2588\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2584\u2591\u2591\u2588\u2588\u2588\u2588\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2584\u2591\u2591\u2591\u2584\u2584\u2584\u2591\u2591\u2591\u2584\u2580\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2580\u2580\u2588\u2580\u2580\u2580\u2591\u2584\u2591\u2580\u2580\u2580\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2584\u2584\u2584\u2584\u2588\u2584\u2584\u2584\u2584\u2588\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2588\u2588\u2588\u2588\u2588\u2588\u2584\u2584\u2584\u2584\u2580\u2591\u2591\u2591\u2591\u2591\u2591\u2591\n\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2580\u2580\u2580\u2580\u2580\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591\u2591' ) +silly_responses.append( u'\u0ca0_\u0ca0' ) +silly_responses.append( u'(\xb4_\u309d`)' ) +silly_responses.append( u'\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000\uff0f\u2312\u30fd\n\u3000\u3000\u3000\u2282\u4e8c\u4e8c\u4e8c\uff08\u3000\uff3e\u03c9\uff3e\uff09\u4e8c\u2283\n\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000|\u3000\u3000\u3000 / \u3000\u3000\u3000\u3000\u3000\u3000BU-N\n\u3000\u3000\u3000\u3000 \u3000\u3000\u3000 \uff08\u3000\u30fd\u30ce\n\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000 \u30ce>\u30ce\u3000\n\u3000\u3000\u3000\u3000 \u4e09\u3000\u3000\u30ec\u30ec' ) +silly_responses.append( u'Gen.\n\nThis is wheat.\n\nA blue bud is put out at the cruel\nwinter, it is stepped many times, it\nexpands strongly and straight, and it becomes\nwheat that bears the fruit.' ) +silly_responses.append( u'I\'m a hardcore fan with FF8, so many times I would even try to escape reality with lucid dreaming, I would be in Balamb Garden where I would often train with Zell in the training center and help Selphie with the Garden Festival, but one day as I was talking to Selphie, we went to my dormitory for a private talk. She immediately said, "You know you could live with us forever.." I gave her a confused look and she continued, "We understand that you live on earth and you REALLY wish to live here". I then said "How..How did you know?" She then giggled and said "Because we\'ve been watching you, silly!" This was a dream come true and I almost cried right there. She then said, "I talked with Headmaster Cid and he agreed that you would be PERFECT for SeeD, you just have to do...one thing". She then held my hand and looked deep into my eyes and said "...You have to kill yourself for the transfer to work correctly." I then gave her some questions, "How long do I have before the deal expires?" She then said "Cid said 3 months.." I added by saying "What\'s the most painless way?..." She giggled again, "Suicide will require pain buuut...if you want it quick...Get a gun and a nice shot to the head works, but make sure you put the gun in your mouth for better accuracy, If you can\'t afford a gun, jump off a very VERY tall building, I wouldn\'t recommend pills because it\'s not a guaranteed method of dying, you could just get really really sick and be hospitalized." I then agreed and she gave me a kiss on the forehead, "I know this will be tough but once it\'s over and done, you\'ll get to live here!" I then woke up and this was last week and purchased a gun (I\'ll even post the gun with receipt, if you want) But Friday I might actually kill myself because that dream just felt too fucking real to be fake and my life isn\'t doing so grand.' ) +silly_responses.append( u'Consider this: A pack of wild Niggers.\nSavage, slavering Niggers nearing your white home. Trampling your white lawn. Raping your white daughter.\nAnd you can\'t do shit since they\'re savages. The Nigger leader grabs your wife and fucks her with his shaman stick.\nThe primal Niggers finally dominate your household. They watch barbaric shows on TV and you are forced to be their slave.\nSuch is the downfall of White Man.' ) +silly_responses.append( u'\uFF37\uFF2F\uFF35\uFF2C\uFF24 \uFF39\uFF2F\uFF35 \uFF2C\uFF29\uFF2B\uFF25 \uFF34\uFF2F \uFF22\uFF25 \uFF2D\uFF21\uFF2B\uFF29\uFF2E\uFF27 \uFF26\uFF35\uFF23\uFF2B' ) +silly_responses.append( u'Israelis expect that thousands http://www.canadagooses.co.uk/kensington-parka-c-15.html of missiles might be fired at their cities by Iran\u2019s http://www.monclersole.com/moncler-jackets-womens-c-3.html clients in Lebanon and the Gaza Strip, while U.S. forces might be attacked in Afghanistan, Iraq or in the Persian http://www.monclersnorge.com Gulf. But China has proven to be a continuous http://www.monclerfactoryoutlets.com complication. On trade, Mr. Obama has repeatedly pressured China to http://www.monclerstyle2011.comallow its currency to http://www.doudounemoncleralpin.com appreciate, only to be told by Beijing that China is doing enough. On national security, China http://www.canadagooseexpedition.ca is extending its claims in the region, worrying U.S. partners and allies who both depend on China for trade but fear it http://www.imonclerdoudoune.com may exercise its power in more forceful ways. Toomey acknowledged http://www.monclerfronline.com that both sides have \u201Ca ways to go\u201D in reaching an http://www.monclersole.com/ agreement, but told Fox News: \u201CI am not giving up on getting something done. I think we http://www.monclerdoudounepascher2011.com/ still can, and http://www.monclerdoudounepascher2011.com I am going to do everything to achieve that.\u201DAlso appearing on the program http://www.canadiangoosejacket.ca was Representative James Clyburn, a South Carolina http://www.canadagooseolinestore.com Democrat, who said he remained \u201Cvery hopeful\u201D that both sides will http://www.moncler2u.co.uk reach a compromise before their deadline, now http://www.monclerbransondoudoune.fr less than two weeks away.' ) +silly_responses.append( u'i am a heron. i haev a long neck and i pick fish out of the water w/ my beak. if you dont repost this comment on 10 other pages i will fly into your kitchen tonight and make a mess of your pots and pans' ) +silly_responses.append( u'The maritine avains lack order and are wrought with decent among their kind .\r\n to promote forms of good tidings , appeasement and fair boon . \r\n\r\n One tender note in sequence of two or \r\n a a golden coin tender in sequence of one .\r\n\r\n Forfeiting and abandoning of these are signs of the above mentioned.' ) +silly_responses.append( u'It is the VIPPER beatdown!\r\n Kick that dokyun around!\r\n Bury him six feet under the ground!\r\n Ain\'t no QUALITY but VIP QUALITY in this town.' ) +silly_responses.append( u'IAmA heron. i ahev a long neck and i pick fish out of the water w/ my beak. if you dont repost this comment on 10 other pages i will fly into your kitchen tonight and make a mess of your pots and pans' ) + +CLIENT_ROOT_MESSAGE = ''' + + hydrus client + + +

This hydrus client uses software version ''' + str( HC.SOFTWARE_VERSION ) + ''' and network version ''' + str( HC.NETWORK_VERSION ) + '''.

+

It only serves requests from 127.0.0.1.

+ +''' + +ROOT_MESSAGE_BEGIN = ''' + + hydrus service + + +

This hydrus service uses software version ''' + str( HC.SOFTWARE_VERSION ) + ''' and network version ''' + str( HC.NETWORK_VERSION ) + '''.

+

''' + +ROOT_MESSAGE_END = '''

+ +''' + +def ParseAccountIdentifier( authorisation_text ): + + if authorisation_text is None: access_key = '' + else: + + ( format, access_key_encoded ) = authorisation_text.split( ' ' ) + + if format != 'hydrus_network': raise HC.ForbiddenException( 'Authorisation format error!' ) + + try: access_key = access_key_encoded.decode( 'hex' ) + except: raise HC.ForbiddenException( 'Attempted to parse access key, but could not understand it.' ) + + + return HC.AccountIdentifier( access_key = access_key ) + + +def ParseFileArguments( file ): + + args = {} + + file = HydrusImageHandling.ConvertToPngIfBmp( file ) + + args[ 'file' ] = file + + size = len( file ) + + if size == 0: raise HC.ForbiddenException( 'Not interested in files of zero length' ) + + mime = HC.GetMimeFromString( file ) + + if mime not in HC.ALLOWED_MIMES: raise HC.ForbiddenException( 'Currently, only jpg, gif, bmp, png and swf are supported.' ) + + hash = hashlib.sha256( file ).digest() + + args[ 'hash' ] = hash + args[ 'size' ] = size + args[ 'mime' ] = mime + + if mime in HC.IMAGES: + + try: image_container = HydrusImageHandling.RenderImageFromFile( file, hash ) + except: raise HC.ForbiddenException( 'Could not load that file as an image.' ) + + ( width, height ) = image_container.GetSize() + + args[ 'width' ] = width + args[ 'height' ] = height + + if image_container.IsAnimated(): + + duration = image_container.GetTotalDuration() + num_frames = image_container.GetNumFrames() + + args[ 'duration' ] = duration + args[ 'num_frames' ] = num_frames + + + try: thumbnail = HydrusImageHandling.GenerateThumbnailFileFromFile( file, HC.UNSCALED_THUMBNAIL_DIMENSIONS ) + except: raise HC.ForbiddenException( 'Could not generate thumbnail from that file.' ) + + args[ 'thumbnail' ] = thumbnail + + elif mime == HC.APPLICATION_FLASH: + + ( ( width, height ), duration, num_frames ) = HydrusFlashHandling.GetFlashProperties( file ) + + args[ 'width' ] = width + args[ 'height' ] = height + args[ 'duration' ] = duration + args[ 'num_frames' ] = num_frames + + elif mime == HC.VIDEO_FLV: + + ( ( width, height ), duration, num_frames ) = HydrusVideoHandling.GetFLVProperties( file ) + + args[ 'width' ] = width + args[ 'height' ] = height + args[ 'duration' ] = duration + args[ 'num_frames' ] = num_frames + + + return args + +def ParseHTTPGETArguments( path ): + + path = urllib.unquote( path ) + + arguments = {} + + if '?' in path: + + raw_arguments = path.split( '?', 1 )[1] + + for raw_argument in raw_arguments.split( '&' ): + + if '=' in raw_argument: + + [ name, value ] = raw_argument.split( '=', 1 ) + + if name in ( 'begin', 'num', 'expiration', 'subject_account_id', 'service_type', 'service_port', 'since' ): + + try: arguments[ name ] = int( value ) + except: raise HC.ForbiddenException( 'I was expecting to parse ' + name + ' as an integer, but it failed.' ) + + elif name in ( 'access_key', 'title', 'subject_access_key', 'contact_key', 'hash', 'subject_hash', 'subject_tag', 'message_key' ): + + try: arguments[ name ] = value.decode( 'hex' ) + except: raise HC.ForbiddenException( 'I was expecting to parse ' + name + ' as a hex-encoded string, but it failed.' ) + + + + + if 'subject_account_id' in arguments: arguments[ 'subject_identifier' ] = HC.AccountIdentifier( access_key = arguments[ 'subject_account_id' ] ) + elif 'subject_access_key' in arguments: arguments[ 'subject_identifier' ] = HC.AccountIdentifier( access_key = arguments[ 'subject_access_key' ] ) + elif 'subject_tag' in arguments and 'subject_hash' in arguments: arguments[ 'subject_identifier' ] = HC.AccountIdentifier( tag = arguments[ 'subject_tag' ], hash = arguments[ 'subject_hash' ] ) + elif 'subject_hash' in arguments: arguments[ 'subject_identifier' ] = HC.AccountIdentifier( hash = arguments[ 'subject_hash' ] ) + + + return arguments + +def ParseHTTPPOSTArguments( request, body ): + + if request == 'file': args = ParseFileArguments( body ) + else: + + if body == '': args = () + else: args = yaml.safe_load( body ) + + + return args + +def ParseHTTPRequest( path ): + + path = urllib.unquote( path ) + + if not path.startswith( '/' ): return '' + + after_slash = path.split( '/', 1 )[1] + + return after_slash.split( '?', 1 )[0] + +class HydrusHTTPServer( SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer ): + + def __init__( self, service_identifier, message = '' ): + + self.daemon_threads = True + + self._service_identifier = service_identifier + self._message = message + + port = service_identifier.GetPort() + + BaseHTTPServer.HTTPServer.__init__( self, ( '', port ), HydrusHTTPRequestHandler ) + + HC.pubsub.sub( self, 'shutdown', 'shutdown' ) + + + def GetServiceIdentifier( self ): return self._service_identifier + + def GetMessage( self ): return self._message + + def SetMessage( self, message ): self._message = message + +class HydrusHTTPRequestHandler( BaseHTTPServer.BaseHTTPRequestHandler ): + + server_version = 'hydrus/' + str( HC.NETWORK_VERSION ) + protocol_version = 'HTTP/1.1' + + with open( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', 'rb' ) as f: _favicon = f.read() + + def __init__( self, request, client_address, server ): + + self._service_identifier = server.GetServiceIdentifier() + + try: BaseHTTPServer.BaseHTTPRequestHandler.__init__( self, request, client_address, server ) + except: self.log_string( 'Connection reset by peer' ) + + + def _ProcessRequest( self, request_type ): + + try: + + try: + + hydrus_client = False + + user_agent_text = self.headers.getheader( 'User-Agent' ) + + if user_agent_text is not None: + + user_agents = user_agent_text.split( ' ' ) + + for user_agent in user_agents: + + if '/' in user_agent: + + ( client, network_version ) = user_agent.split( '/', 1 ) + + if client == 'hydrus': + + if int( network_version ) == HC.NETWORK_VERSION: hydrus_client = True + else: raise HC.NetworkVersionException( 'Network version mismatch! This repository\'s network version is ' + str( HC.NETWORK_VERSION ) + ' whereas yours is ' + network_version + '! Please download the latest release.' ) + + + + + + if hydrus_client: + + default_mime = HC.APPLICATION_YAML + default_encoding = lambda x: yaml.safe_dump( unicode( x ) ) + + else: + + default_mime = HC.TEXT_HTML + default_encoding = lambda x: unicode( x ) + + + ( ip, port ) = self.client_address + + service_type = self._service_identifier.GetType() + + if service_type == HC.LOCAL_FILE and ip != '127.0.0.1': raise HC.ForbiddenException( 'Only local access allowed!' ) + + request = ParseHTTPRequest( self.path ) + + if ( service_type, request_type, request ) not in HC.ALLOWED_REQUESTS: raise HC.ForbiddenException( 'This service does not support that request.' ) + + if request_type == HC.GET: + + request_args = ParseHTTPGETArguments( self.path ) + request_length = 0 + + elif request_type == HC.POST: + + body = self.rfile.read( int( self.headers.getheader( 'Content-Length', default = 0 ) ) ) + + request_args = ParseHTTPPOSTArguments( request, body ) + request_length = len( body ) + + + if request == '': + + if service_type == HC.LOCAL_FILE: body = CLIENT_ROOT_MESSAGE + else: + + message = self.server.GetMessage() + + body = ROOT_MESSAGE_BEGIN + message + ROOT_MESSAGE_END + + + response_context = HC.ResponseContext( 200, mime = HC.TEXT_HTML, body = body ) + + elif request == 'favicon.ico': response_context = HC.ResponseContext( 200, mime = HC.IMAGE_ICON, body = self._favicon, filename = 'favicon.ico' ) + else: + + if service_type == HC.LOCAL_FILE: response_context = wx.GetApp().ProcessServerRequest( request_type, request, request_args ) + else: + + account_identifier = ParseAccountIdentifier( self.headers.getheader( 'Authorization' ) ) + + response_context = wx.GetApp().GetDB().AddJobServer( self._service_identifier, account_identifier, ip, request_type, request, request_args, request_length ) + + + + except: + + # wx.MessageBox( traceback.format_exc() ) + + raise + + + except KeyError: response_context = HC.ResponseContext( 403, mime = default_mime, body = default_encoding( 'It appears one or more parameters required for that request were missing.' ) ) + except HC.PermissionException as e: response_context = HC.ResponseContext( 401, mime = default_mime, body = default_encoding( e ) ) + except HC.ForbiddenException as e: response_context = HC.ResponseContext( 403, mime = default_mime, body = default_encoding( e ) ) + except HC.NotFoundException as e: response_context = HC.ResponseContext( 404, mime = default_mime, body = default_encoding( e ) ) + except HC.NetworkVersionException as e: response_context = HC.ResponseContext( 426, mime = default_mime, body = default_encoding( e ) ) + except Exception as e: + + self.log_string( traceback.format_exc() ) + + response_context = HC.ResponseContext( 500, mime = default_mime, body = default_encoding( 'The repository encountered an error it could not handle! Here is a dump of what happened, which will also be written to your client.log file. If it persists, please forward it to hydrus.admin@gmail.com:' + os.linesep + os.linesep + traceback.format_exc() ) ) + + + try: self._Respond( response_context ) + except: pass # wx.MessageBox( traceback.format_exc() ) + + + def _Respond( self, response_context ): + + status_code = response_context.GetStatusCode() + + self.send_response( status_code ) + + if response_context.HasBody(): + + ( mime, body ) = response_context.GetMimeBody() + + content_type = HC.mime_string_lookup[ mime ] + + if response_context.HasFilename(): + + filename = response_context.GetFilename() + + content_type += '; ' + filename + + + self.send_header( 'Content-Type', content_type ) + self.send_header( 'Content-Length', len( body ) ) + + self.end_headers() + + self.wfile.write( body ) + + else: + + self.send_header( 'Content-Length', '0' ) + self.end_headers() + + + + def do_GET( self ): self._ProcessRequest( HC.GET ) + + def do_OPTIONS( self ): + + service_type = self._service_identifier.GetType() + + if service_type == HC.LOCAL_FILE and ip != '127.0.0.1': raise HC.ForbiddenException( 'Only local access allowed!' ) + + request = ParseHTTPRequest( self.path ) + + allowed = [ 'OPTIONS' ] + + if ( service_type, HC.GET, request ) in HC.ALLOWED_REQUESTS: allowed.append( 'GET' ) + if ( service_type, HC.POST, request ) in HC.ALLOWED_REQUESTS: allowed.append( 'POST' ) + + self.send_response( 200 ) + + self.send_header( 'Allow', ','.join( allowed ) ) + self.end_headers() + + + def do_POST( self ): self._ProcessRequest( HC.POST ) + + def log_message( self, format, *args ): print( "[%s] %s%s" % ( self.log_date_time_string(), format%args, os.linesep ) ) + + def log_request( self, *args ): pass # to start logging a little about every request, just delete this def. the default pushes to log_message + + def log_string( self, message ): print( message ) + + # this overrides the base method to no longer use the class variable server_version + def version_string( self ): + + service_type = self._service_identifier.GetType() + + server_version = HC.service_string_lookup[ service_type ] + '/' + str( HC.NETWORK_VERSION ) + + return server_version + ' ' + self.sys_version + diff --git a/include/HydrusVideoHandling.py b/include/HydrusVideoHandling.py new file mode 100755 index 00000000..edee1c94 --- /dev/null +++ b/include/HydrusVideoHandling.py @@ -0,0 +1,41 @@ +import cStringIO +from flvlib import tags as flv_tags +import traceback + +def GetFLVProperties( file ): + + file_handle = cStringIO.StringIO( file ) + + flv = flv_tags.FLV( file_handle ) + + script_tag = None + + for tag in flv.iter_tags(): + + if isinstance( tag, flv_tags.ScriptTag ): + + script_tag = tag + + break + + + + width = 853 + height = 480 + duration = 0 + num_frames = 0 + + if script_tag is not None: + + tag_dict = script_tag.variable + + # tag_dict can sometimes be a float? + # it is on the broken one I tried! + + if 'width' in tag_dict: width = tag_dict[ 'width' ] + if 'height' in tag_dict: height = tag_dict[ 'height' ] + if 'duration' in tag_dict: duration = int( tag_dict[ 'duration' ] * 1000 ) + if 'framerate' in tag_dict: num_frames = int( ( duration / 1000.0 ) * tag_dict[ 'framerate' ] ) + + + return ( ( width, height ), duration, num_frames ) diff --git a/include/ServerController.py b/include/ServerController.py new file mode 100755 index 00000000..2fe4bc06 --- /dev/null +++ b/include/ServerController.py @@ -0,0 +1,111 @@ +import httplib +import HydrusConstants as HC +import ServerDB +import os +import random +import threading +import time +import traceback +import wx +import yaml + +class Controller( wx.App ): + + def _AlreadyRunning( self, port ): + + connection = httplib.HTTPConnection( 'localhost:' + str( port ) ) + + try: + + connection.connect() + connection.close() + + return True + + except: return False + + + def ChangePort( self, port ): + + new_server = self._server_callable( port ) + + server_daemon = threading.Thread( target=new_server.serve_forever ) + server_daemon.setDaemon( True ) + server_daemon.start() + + connection = httplib.HTTPConnection( 'localhost:' + str( port ) ) + + connection.connect() + + connection.request( 'GET', '/' ) + + response = connection.getresponse() + + data = response.read() + + if response.status != 200: raise Exception( yaml.safe_load( data ) ) + + connection.close() + + self._server.shutdown() + + self._server = new_server + + + def EventExit( self, event ): self._tbicon.Destroy() + + def EventPubSub( self, event ): + + pubsubs_queue = HC.pubsub.GetQueue() + + ( callable, args, kwargs ) = pubsubs_queue.get() + + try: callable( *args, **kwargs ) + except TypeError: pass + except Exception as e: + + print( type( e ) ) + print( traceback.format_exc() ) + + + pubsubs_queue.task_done() + + + def GetDB( self ): return self._db + + def OnInit( self ): + + try: self._db = ServerDB.DB() + except Exception as e: + + print( traceback.format_exc() ) + + return False + + + self.Bind( wx.EVT_MENU, self.EventExit, id=wx.ID_EXIT ) + + self.Bind( HC.EVT_PUBSUB, self.EventPubSub ) + + self._tbicon = TaskBarIcon() + + return True + + +class TaskBarIcon( wx.TaskBarIcon ): + + def __init__( self ): + + wx.TaskBarIcon.__init__( self ) + + icon = wx.Icon( HC.STATIC_DIR + os.path.sep + 'hydrus.ico', wx.BITMAP_TYPE_ICO ) + + self.SetIcon( icon, 'hydrus server' ) + + self._tbmenu = wx.Menu() + + self._tbmenu.Append( wx.ID_EXIT, 'exit' ) + + self.Bind( wx.EVT_TASKBAR_RIGHT_UP, lambda event: self.PopupMenu( self._tbmenu ) ) + + \ No newline at end of file diff --git a/include/ServerDB.py b/include/ServerDB.py new file mode 100755 index 00000000..313d397b --- /dev/null +++ b/include/ServerDB.py @@ -0,0 +1,2340 @@ +import collections +import dircache +import hashlib +import httplib +import HydrusConstants as HC +import HydrusServer +import os +import Queue +import shutil +import sqlite3 +import sys +import threading +import time +import traceback +import yaml +import wx + +class FileDB(): + + def _AddFile( self, c, service_id, account_id, file_dict ): + + hash = file_dict[ 'hash' ] + + hash_id = self._GetHashId( c, hash ) + + now = int( time.time() ) + + if c.execute( 'SELECT 1 FROM file_map WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() is None or c.execute( 'SELECT 1 FROM deleted_files WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() is None: + + size = file_dict[ 'size' ] + mime = file_dict[ 'mime' ] + + if 'width' in file_dict: width = file_dict[ 'width' ] + else: width = None + + if 'height' in file_dict: height = file_dict[ 'height' ] + else: height = None + + if 'duration' in file_dict: duration = file_dict[ 'duration' ] + else: duration = None + + if 'num_frames' in file_dict: num_frames = file_dict[ 'num_frames' ] + else: num_frames = None + + if 'num_words' in file_dict: num_words = file_dict[ 'num_words' ] + else: num_words = None + + options = self._GetOptions( c, service_id ) + + max_storage = options[ 'max_storage' ] + + if max_storage is not None: + + # this is wrong! no service_id in files_info. need to cross with file_map or w/e + ( current_storage, ) = c.execute( 'SELECT SUM( size ) FROM file_map, files_info USING ( hash_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if current_storage + size > max_storage: raise HC.ForbiddenException( 'The service is full! It cannot take any more files!' ) + + + dest_path = HC.SERVER_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + if not os.path.exists( dest_path ): + + file = file_dict[ 'file' ] + + with open( dest_path, 'wb' ) as f: f.write( file ) + + + if 'thumbnail' in file_dict: + + thumbnail_dest_path = HC.SERVER_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + if not os.path.exists( thumbnail_dest_path ): + + thumbnail = file_dict[ 'thumbnail' ] + + with open( thumbnail_dest_path, 'wb' ) as f: f.write( thumbnail ) + + + + if c.execute( 'SELECT 1 FROM files_info WHERE hash_id = ?;', ( hash_id, ) ).fetchone() is None: c.execute( 'INSERT OR IGNORE INTO files_info ( hash_id, size, mime, width, height, duration, num_frames, num_words ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ? );', ( hash_id, size, mime, width, height, duration, num_frames, num_words ) ) + + c.execute( 'INSERT OR IGNORE INTO file_map ( service_id, hash_id, account_id, timestamp ) VALUES ( ?, ?, ?, ? );', ( service_id, hash_id, account_id, now ) ) + + if options[ 'log_uploader_ips' ]: + + ip = file_dict[ 'ip' ] + + c.execute( 'INSERT INTO ip_addresses ( service_id, hash_id, ip, timestamp ) VALUES ( ?, ?, ?, ? );', ( service_id, hash_id, ip, now ) ) + + + + + def _AddFilePetition( self, c, service_id, account_id, reason_id, hash_ids ): + + self._ApproveOptimisedFilePetition( c, service_id, account_id, hash_ids ) + + valid_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_map WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) ] + + # this clears out any old reasons, if the user wants to overwrite them + c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND account_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( service_id, account_id ) ) + + c.executemany( 'INSERT OR IGNORE INTO file_petitions ( service_id, account_id, reason_id, hash_id ) VALUES ( ?, ?, ?, ? );', [ ( service_id, account_id, reason_id, hash_id ) for hash_id in valid_hash_ids ] ) + + + def _ApproveFilePetition( self, c, service_id, account_id, reason_id, hash_ids ): + + self._ApproveOptimisedFilePetition( c, service_id, account_id, hash_ids ) + + valid_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_map WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) ] + + self._RewardFilePetitioners( c, service_id, valid_hash_ids, 1 ) + + self._DeleteFiles( c, service_id, account_id, reason_id, valid_hash_ids ) + + + def _ApproveOptimisedFilePetition( self, c, service_id, account_id, hash_ids ): + + ( biggest_end, ) = c.execute( 'SELECT end FROM update_cache ORDER BY end DESC LIMIT 1;' ).fetchone() + + c.execute( 'DELETE FROM file_map WHERE service_id = ? AND account_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' AND timestamp > ?;', ( service_id, account_id, biggest_end ) ) + + + def _DeleteFiles( self, c, service_id, admin_account_id, reason_id, hash_ids ): + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + affected_timestamps = [ timestamp for ( timestamp, ) in c.execute( 'SELECT DISTINCT timestamp FROM file_map WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) ] + + c.execute( 'INSERT OR IGNORE INTO deleted_files ( service_id, hash_id, reason_id, account_id, admin_account_id, timestamp ) SELECT service_id, hash_id, ?, account_id, ?, ? FROM file_map WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( reason_id, admin_account_id, int( time.time() ), service_id ) ) + + c.execute( 'DELETE FROM file_map WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ) + + self._RefreshUpdateCache( c, service_id, affected_timestamps ) + + + def _DenyFilePetition( self, c, service_id, hash_ids ): + + self._RewardFilePetitioners( c, service_id, hash_ids, -1 ) + + c.execute( 'DELETE FROM file_petitions WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ) + + + def _GenerateFileUpdate( self, c, service_id, begin, end ): + + files_info = [ ( hash, size, mime, timestamp, width, height, duration, num_frames, num_words ) for ( hash, size, mime, timestamp, width, height, duration, num_frames, num_words ) in c.execute( 'SELECT hash, size, mime, timestamp, width, height, duration, num_frames, num_words FROM file_map, ( files_info, hashes USING ( hash_id ) ) USING ( hash_id ) WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ) ] + + deleted_hashes = [ hash for ( hash, ) in c.execute( 'SELECT hash FROM deleted_files, hashes USING ( hash_id ) WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ) ] + + news = c.execute( 'SELECT news, timestamp FROM news WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ).fetchall() + + return HC.HydrusUpdateFileRepository( files_info, deleted_hashes, news, begin, end ) + + + def _GenerateHashIdsEfficiently( self, c, hashes ): + + hashes_not_in_db = set( hashes ) + + for i in range( 0, len( hashes ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + hashes_subset = hashes[ i : i + 250 ] + + hashes_not_in_db.difference_update( [ hash for ( hash, ) in c.execute( 'SELECT hash FROM hashes WHERE hash IN (' + ','.join( '?' * len( hashes_subset ) ) + ');', [ sqlite3.Binary( hash ) for hash in hashes_subset ] ) ] ) + + + if len( hashes_not_in_db ) > 0: c.executemany( 'INSERT INTO hashes ( hash ) VALUES( ? );', [ ( sqlite3.Binary( hash ), ) for hash in hashes_not_in_db ] ) + + + def _GetFile( self, hash ): + + try: + + with open( HC.SERVER_FILES_DIR + os.path.sep + hash.encode( 'hex' ), 'rb' ) as f: file = f.read() + + except: raise HC.NotFoundException( 'Could not find that file!' ) + + return file + + + def _GetFilePetition( self, c, service_id ): + + result = c.execute( 'SELECT DISTINCT account_id, reason_id FROM file_petitions WHERE service_id = ? ORDER BY RANDOM() LIMIT 1;', ( service_id, ) ).fetchone() + + if result is None: raise HC.NotFoundException( 'No petitions!' ) + + ( account_id, reason_id ) = result + + petitioner_account_identifier = HC.AccountIdentifier( account_id = account_id ) + + reason = self._GetReason( c, reason_id ) + + hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM file_petitions WHERE service_id = ? AND account_id = ? AND reason_id = ?;', ( service_id, account_id, reason_id ) ) ] + + hashes = self._GetHashes( c, hash_ids ) + + return HC.ServerFilePetition( petitioner_account_identifier, reason, hashes ) + + + def _GetHash( self, c, hash_id ): + + result = c.execute( 'SELECT hash FROM hashes WHERE hash_id = ?;', ( hash_id, ) ).fetchone() + + if result is None: raise Exception( 'File hash error in database' ) + + ( hash, ) = result + + return hash + + + def _GetHashes( self, c, hash_ids ): return [ hash for ( hash, ) in c.execute( 'SELECT hash FROM hashes WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ) ] + + def _GetHashId( self, c, hash ): + + result = c.execute( 'SELECT hash_id FROM hashes WHERE hash = ?;', ( sqlite3.Binary( hash ), ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO hashes ( hash ) VALUES ( ? );', ( sqlite3.Binary( hash ), ) ) + + hash_id = c.lastrowid + + return hash_id + + else: + + ( hash_id, ) = result + + return hash_id + + + + def _GetHashIds( self, c, hashes ): + + hash_ids = [] + + if type( hashes ) == type( set() ): hashes = list( hashes ) + + for i in range( 0, len( hashes ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + hashes_subset = hashes[ i : i + 250 ] + + hash_ids.extend( [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM hashes WHERE hash IN (' + ','.join( '?' * len( hashes_subset ) ) + ');', [ sqlite3.Binary( hash ) for hash in hashes_subset ] ) ] ) + + + if len( hashes ) > len( hash_ids ): + + if len( set( hashes ) ) > len( hash_ids ): + + # must be some new hashes the db has not seen before, so let's generate them as appropriate + + self._GenerateHashIdsEfficiently( c, hashes ) + + hash_ids = self._GetHashIds( c, hashes ) + + + + return hash_ids + + + def _GetHashIdsToHashes( self, c, hash_ids ): return { hash_id : hash for ( hash_id, hash ) in c.execute( 'SELECT hash_id, hash FROM hashes WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';' ) } + + def _GetIPTimestamp( self, c, service_id, hash_id ): + + result = c.execute( 'SELECT ip, timestamp FROM ip_addresses WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() + + if result is None: raise HC.ForbiddenException( 'Did not find ip information for that hash.' ) + + return result + + + def _GetNumFilePetitions( self, c, service_id ): return c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id FROM file_petitions WHERE service_id = ? );', ( service_id, ) ).fetchone()[0] + + def _GetThumbnail( self, hash ): + + try: + + with open( HC.SERVER_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ), 'rb' ) as f: thumbnail = f.read() + + return thumbnail + + except: raise HC.NotFoundException( 'Could not find that thumbnail!' ) + + + def _RewardFilePetitioners( self, c, service_id, hash_ids, multiplier ): + + scores = [ ( account_id, count * multiplier ) for ( account_id, count ) in c.execute( 'SELECT account_id, COUNT( * ) FROM file_petitions WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY account_id;', ( service_id, ) ) ] + + self._RewardAccounts( c, service_id, HC.SCORE_PETITION, scores ) + + +class MessageDB(): + + def _AddMessage( self, c, contact_key, message ): + + try: ( service_id, account_id ) = c.execute( 'SELECT service_id, account_id FROM contacts WHERE contact_key = ?;', ( sqlite3.Binary( contact_key ), ) ).fetchone() + except: raise HC.ForbiddenException( 'Did not find that contact key for the message depot!' ) + + message_key = os.urandom( 32 ) + + c.execute( 'INSERT OR IGNORE INTO messages ( message_key, service_id, account_id, timestamp ) VALUES ( ?, ?, ?, ? );', ( sqlite3.Binary( message_key ), service_id, account_id, int( time.time() ) ) ) + + dest_path = HC.SERVER_MESSAGES_DIR + os.path.sep + message_key.encode( 'hex' ) + + with open( dest_path, 'wb' ) as f: f.write( message ) + + + def _AddStatuses( self, c, contact_key, statuses ): + + try: ( service_id, account_id ) = c.execute( 'SELECT service_id, account_id FROM contacts WHERE contact_key = ?;', ( sqlite3.Binary( contact_key ), ) ).fetchone() + except: raise HC.ForbiddenException( 'Did not find that contact key for the message depot!' ) + + now = int( time.time() ) + + c.executemany( 'INSERT OR REPLACE INTO message_statuses ( status_key, service_id, account_id, status, timestamp ) VALUES ( ?, ?, ?, ?, ? );', [ ( sqlite3.Binary( status_key ), service_id, account_id, sqlite3.Binary( status ), now ) for ( status_key, status ) in statuses ] ) + + + def _CreateContact( self, c, service_id, account_id, public_key ): + + result = c.execute( 'SELECT public_key FROM contacts WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + if result is not None: + + ( existing_public_key, ) = result + + if existing_public_key != public_key: raise HC.ForbiddenException( 'This account already has a public key!' ) + else: return + + + contact_key = hashlib.sha256( public_key ).digest() + + c.execute( 'INSERT INTO contacts ( service_id, account_id, contact_key, public_key ) VALUES ( ?, ?, ?, ? );', ( service_id, account_id, sqlite3.Binary( contact_key ), public_key ) ) + + + def _GetMessage( self, c, service_id, account_id, message_key ): + + result = c.execute( 'SELECT 1 FROM messages WHERE service_id = ? AND account_id = ? AND message_key = ?;', ( service_id, account_id, sqlite3.Binary( message_key ) ) ).fetchone() + + if result is None: raise HC.ForbiddenException( 'Could not find that message key on message depot!' ) + + try: + + with open( HC.SERVER_MESSAGES_DIR + os.path.sep + message_key.encode( 'hex' ), 'rb' ) as f: message = f.read() + + except: raise HC.NotFoundException( 'Could not find that message!' ) + + return message + + + def _GetMessageInfoSince( self, c, service_id, account_id, timestamp ): + + message_keys = [ message_key for ( message_key, ) in c.execute( 'SELECT message_key FROM messages WHERE service_id = ? AND account_id = ? AND timestamp > ? ORDER BY timestamp ASC;', ( service_id, account_id, timestamp ) ) ] + + statuses = [ status for ( status, ) in c.execute( 'SELECT status FROM message_statuses WHERE service_id = ? AND account_id = ? AND timestamp > ? ORDER BY timestamp ASC;', ( service_id, account_id, timestamp ) ) ] + + return ( message_keys, statuses ) + + + def _GetPublicKey( self, c, contact_key ): + + ( public_key, ) = c.execute( 'SELECT public_key FROM contacts WHERE contact_key = ?;', ( sqlite3.Binary( contact_key ), ) ).fetchone() + + return public_key + + +class TagDB(): + + def _AddMappings( self, c, service_id, account_id, tag_id, hash_ids, overwrite_deleted ): + + if overwrite_deleted: + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + affected_timestamps = [ timestamp for ( timestamp, ) in c.execute( 'SELECT DISTINCT timestamp FROM deleted_mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) ) ] + + c.execute( 'DELETE FROM deleted_mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) ) + + self._RefreshUpdateCache( c, service_id, affected_timestamps ) + + else: + + already_deleted = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM deleted_mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, tag_id ) ) ] + + hash_ids = set( hash_ids ).difference( already_deleted ) + + + now = int( time.time() ) + + c.executemany( 'INSERT OR IGNORE INTO mappings ( service_id, tag_id, hash_id, account_id, timestamp ) VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, tag_id, hash_id, account_id, now ) for hash_id in hash_ids ] ) + + + def _AddMappingPetition( self, c, service_id, account_id, reason_id, tag_id, hash_ids ): + + self._ApproveOptimisedMappingPetition( c, service_id, account_id, tag_id, hash_ids ) + + valid_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, tag_id ) ) ] + + # this clears out any old reasons, if the user wants to overwrite them + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND account_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( valid_hash_ids ) + ';', ( service_id, account_id, tag_id ) ) + + c.executemany( 'INSERT OR IGNORE INTO mapping_petitions ( service_id, account_id, reason_id, tag_id, hash_id ) VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, account_id, reason_id, tag_id, hash_id ) for hash_id in valid_hash_ids ] ) + + + def _ApproveMappingPetition( self, c, service_id, account_id, reason_id, tag_id, hash_ids ): + + self._ApproveOptimisedMappingPetition( c, service_id, account_id, tag_id, hash_ids ) + + valid_hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, tag_id ) ) ] + + self._RewardMappingPetitioners( c, service_id, tag_id, valid_hash_ids, 1 ) + + self._DeleteMappings( c, service_id, account_id, reason_id, tag_id, valid_hash_ids ) + + + def _ApproveOptimisedMappingPetition( self, c, service_id, account_id, tag_id, hash_ids ): + + ( biggest_end, ) = c.execute( 'SELECT end FROM update_cache WHERE service_id = ? ORDER BY end DESC LIMIT 1;', ( service_id, ) ).fetchone() + + c.execute( 'DELETE FROM mappings WHERE service_id = ? AND account_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' AND timestamp > ?;', ( service_id, account_id, biggest_end, tag_id ) ) + + + def _DeleteMappings( self, c, service_id, admin_account_id, reason_id, tag_id, hash_ids ): + + splayed_hash_ids = HC.SplayListForDB( hash_ids ) + + affected_timestamps = [ timestamp for ( timestamp, ) in c.execute( 'SELECT DISTINCT timestamp FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) ) ] + + c.execute( 'INSERT OR IGNORE INTO deleted_mappings ( service_id, tag_id, hash_id, reason_id, account_id, admin_account_id, timestamp ) SELECT service_id, tag_id, hash_id, ?, account_id, ?, ? FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( reason_id, admin_account_id, int( time.time() ), service_id, tag_id ) ) + + c.execute( 'DELETE FROM mappings WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) ) + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, tag_id ) ) + + self._RefreshUpdateCache( c, service_id, affected_timestamps ) + + + def _DenyMappingPetition( self, c, service_id, tag_id, hash_ids ): + + self._RewardMappingPetitioners( c, service_id, tag_id, hash_ids, -1 ) + + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, tag_id ) ) + + + def _GenerateMappingUpdate( self, c, service_id, begin, end ): + + hash_ids = set() + + mappings_dict = collections.defaultdict( list ) + + for ( tag, hash_id ) in c.execute( 'SELECT tag, hash_id FROM tags, mappings USING ( tag_id ) WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ): + + mappings_dict[ tag ].append( hash_id ) + + hash_ids.add( hash_id ) + + + mappings = mappings_dict.items() + + deleted_mappings_dict = collections.defaultdict( list ) + + for ( tag, hash_id ) in c.execute( 'SELECT tag, hash_id FROM tags, deleted_mappings USING ( tag_id ) WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ): + + deleted_mappings_dict[ tag ].append( hash_id ) + + hash_ids.add( hash_id ) + + + deleted_mappings = deleted_mappings_dict.items() + + hash_ids_to_hashes = self._GetHashIdsToHashes( c, hash_ids ) + + news = c.execute( 'SELECT news, timestamp FROM news WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ).fetchall() + + return HC.HydrusUpdateTagRepository( mappings, deleted_mappings, hash_ids_to_hashes, news, begin, end ) + + + def _GenerateTagIdsEfficiently( self, c, tags ): + + tags_not_in_db = set( tags ) + + for i in range( 0, len( tags ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + tags_subset = tags[ i : i + 250 ] + + tags_not_in_db.difference_update( [ tag for ( tag, ) in c.execute( 'SELECT tag FROM tags WHERE tag IN (' + ','.join( '?' * len( tags_subset ) ) + ');', [ tag for tag in tags_subset ] ) ] ) + + + if len( tags_not_in_db ) > 0: c.executemany( 'INSERT INTO tags ( tag ) VALUES( ? );', [ ( tag, ) for tag in tags_not_in_db ] ) + + + def _GetMappingPetition( self, c, service_id ): + + result = c.execute( 'SELECT DISTINCT account_id, reason_id, tag_id FROM mapping_petitions WHERE service_id = ? ORDER BY RANDOM() LIMIT 1;', ( service_id, ) ).fetchone() + + if result is None: raise HC.NotFoundException( 'No petitions!' ) + + ( account_id, reason_id, tag_id ) = result + + petitioner_account_identifier = HC.AccountIdentifier( account_id = account_id ) + + reason = self._GetReason( c, reason_id ) + + tag = self._GetTag( c, tag_id ) + + hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM mapping_petitions WHERE service_id = ? AND account_id = ? AND reason_id = ? AND tag_id = ?;', ( service_id, account_id, reason_id, tag_id ) ) ] + + hashes = self._GetHashes( c, hash_ids ) + + return HC.ServerMappingPetition( petitioner_account_identifier, reason, tag, hashes ) + + + def _GetNumMappingPetitions( self, c, service_id ): return c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT account_id, reason_id, tag_id FROM mapping_petitions WHERE service_id = ? );', ( service_id, ) ).fetchone()[0] + + def _GetTag( self, c, tag_id ): + + result = c.execute( 'SELECT tag FROM tags WHERE tag_id = ?;', ( tag_id, ) ).fetchone() + + if result is None: raise Exception( 'Tag error in database' ) + + ( tag, ) = result + + return tag + + + def _GetTagId( self, c, tag ): + + tag = HC.CleanTag( tag ) + + if tag == '': raise ForbiddenException( 'Tag of zero length!' ) + + result = c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( tag, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO tags ( tag ) VALUES ( ? );', ( tag, ) ) + + tag_id = c.lastrowid + + return tag_id + + else: + + ( tag_id, ) = result + + return tag_id + + + + def _RewardMappingPetitioners( self, c, service_id, tag_id, hash_ids, multiplier ): + + scores = [ ( account_id, count * multiplier ) for ( account_id, count ) in c.execute( 'SELECT account_id, COUNT( * ) FROM mapping_petitions WHERE service_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY account_id;', ( service_id, tag_id ) ) ] + + self._RewardAccounts( c, service_id, HC.SCORE_PETITION, scores ) + + +class RatingDB(): + + def _GenerateRatingUpdate( self, c, service_id, begin, end ): + + ratings_info = c.execute( 'SELECT hash, hash_id, score, count, new_timestamp, current_timestamp FROM aggregate_ratings, hashes USING ( hash_id ) WHERE service_id = ? AND ( new_timestamp BETWEEN ? AND ? OR current_timestamp BETWEEN ? AND ? );', ( service_id, begin, end, begin, end ) ).fetchall() + + current_timestamps = { rating_info[5] for rating_info in ratings_info } + + hash_ids = [ rating_info[1] for rating_info in ratings_info ] + + c.execute( 'UPDATE aggregate_ratings SET current_timestamp = new_timestamp AND new_timestamp = 0 WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) + + c.executemany( 'UPDATE updates SET dirty = ? WHERE service_id = ? AND ? BETWEEN begin AND end;', [ ( True, service_id, current_timestamp ) for current_timestamp in current_timestamps if current_timestamp != 0 ] ) + + ratings = [ ( hash, score, count ) for ( hash, hash_id, score, count, new_timestamp, current_timestamp ) in ratings_info ] + + news = c.execute( 'SELECT news, timestamp FROM news WHERE service_id = ? AND timestamp BETWEEN ? AND ?;', ( service_id, begin, end ) ).fetchall() + + return HC.HydrusUpdateRatingsRepository( ratings, news, begin, end ) + + + def _UpdateRatings( self, c, service_id, account_id, ratings ): + + hashes = [ rating[0] for rating in ratings ] + + hashes_to_hash_ids = self._GetHashesToHashIds( c, hashes ) + + valued_ratings = [ ( hash, rating ) for ( hash, rating ) in ratings if rating is not None ] + null_ratings = [ hash for ( hash, rating ) in ratings if rating is None ] + + c.executemany( 'REPLACE INTO ratings ( service_id, account_id, hash_id, rating ) VALUES ( ?, ?, ?, ? );', [ ( service_id, account_id, hashes_to_hash_ids[ hash ], rating ) for ( hash, rating ) in valued_ratings ] ) + c.executemany( 'DELETE FROM ratings WHERE service_id = ? AND account_id = ? AND hash_id = ?;', [ ( service_id, account_id, hashes_to_hash_ids[ hash ] ) for hash in null_ratings ] ) + + hash_ids = set( hashes_to_hash_ids.values() ) + + aggregates = c.execute( 'SELECT hash_id, SUM( rating ), COUNT( * ) FROM ratings WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY hash_id;' ) + + missed_aggregate_hash_ids = hash_ids.difference( aggregate[0] for aggregate in aggregates ) + + aggregates.extend( [ ( hash_id, 0.0, 0 ) for hash_id in missed_aggregate_hash_ids ] ) + + hash_ids_to_new_timestamps = { hash_id : new_timestamp for ( hash_id, new_timestamp ) in c.execute( 'SELECT hash_id, new_timestamp FROM aggregate_ratings WHERE service_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) } + + now = int( time.time() ) + + for ( hash_id, total_score, count ) in aggregates: + + score = float( total_score ) / float( count ) + + if hash_id not in hash_ids_to_new_timestamps or hash_ids_to_new_timestamps[ hash_id ] == 0: + + new_timestamp = now + ( count * HC.UPDATE_DURATION / 10 ) + + else: + + new_timestamp = max( now, hash_ids_to_new_timestamps[ hash_id ] - HC.UPDATE_DURATION ) + + + if hash_id not in hash_ids_to_new_timestamps: c.execute( 'INSERT INTO aggregate_ratings ( service_id, hash_id, score, count, new_timestamp, current_timestamp ) VALUES ( ?, ?, ?, ?, ?, ? );', ( service_id, hash_id, score, new_timestamp, 0 ) ) + elif new_timestamp != hash_ids_to_new_timestamps[ hash_id ]: c.execute( 'UPDATE aggregate_ratings SET new_timestamp = ? WHERE service_id = ? AND hash_id = ?;', ( new_timestamp, service_id, hash_id ) ) + + + +class ServiceDB( FileDB, MessageDB, TagDB ): + + def _AccountTypeExists( self, c, service_id, title ): return c.execute( 'SELECT 1 FROM account_types, account_type_map USING ( account_type_id ) WHERE service_id = ? AND title = ?;', ( service_id, title ) ).fetchone() is not None + + def _AddNews( self, c, service_id, news ): c.execute( 'INSERT INTO news ( service_id, news, timestamp ) VALUES ( ?, ?, ? );', ( service_id, news, int( time.time() ) ) ) + + def _AddToExpires( self, c, service_id, account_ids, increase ): c.execute( 'UPDATE account_map SET expires = expires + ? WHERE service_id = ? AND account_id IN ' + HC.SplayListForDB( account_ids ) + ';', ( increase, service_id ) ) + + def _Ban( self, c, service_id, action, admin_account_id, subject_account_ids, reason_id, expiration = None ): + + splayed_subject_account_ids = HC.SplayListForDB( subject_account_ids ) + + now = int( time.time() ) + + if expiration is not None: expires = expiration + now + else: expires = None + + c.executemany( 'INSERT OR IGNORE INTO bans ( service_id, account_id, admin_account_id, reason_id, created, expires ) VALUES ( ?, ?, ?, ?, ? );', [ ( service_id, subject_account_id, admin_account_id, reason_id, now, expires ) for subject_account_id in subject_account_ids ] ) + + c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND account_id IN ' + splayed_subject_account_ids + ';', ( service_id, ) ) + + c.execute( 'DELETE FROM mapping_petitions WHERE service_id = ? AND account_id IN ' + splayed_subject_account_ids + ';', ( service_id, ) ) + + if action == HC.SUPERBAN: + + hash_ids = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ? AND account_id IN ' + splayed_subject_account_ids + ';', ( service_id, ) ) ] + + if len( hash_ids ) > 0: self._DeleteLocalFiles( c, admin_account_id, reason_id, hash_ids ) + + mappings_dict = HC.BuildKeyToListDict( c.execute( 'SELECT tag_id, hash_id FROM mappings WHERE service_id = ? AND account_id IN ' + splayed_subject_keys_ids + ';', ( service_id, ) ) ) + + if len( mappings_dict ) > 0: + + for ( tag_id, hash_ids ) in mappings_dict.items(): self._DeleteMappings( c, admin_account_id, reason_id, tag_id, hash_ids ) + + + + + def _ChangeAccountType( self, c, service_id, account_ids, account_type_id ): + + splayed_account_ids = HC.SplayListForDB( account_ids ) + + c.execute( 'UPDATE account_map SET account_type_id = ? WHERE service_id = ? AND account_id IN ' + splayed_account_ids + ';', ( account_type_id, service_id ) ) + + + def _CheckDataUsage( self, c ): + + ( version_year, version_month ) = c.execute( 'SELECT year, month FROM version;' ).fetchone() + + current_time_struct = time.gmtime() + + ( current_year, current_month ) = ( current_time_struct.tm_year, current_time_struct.tm_mon ) + + if version_year != current_year or version_month != current_month: + + c.execute( 'UPDATE version SET year = ?, month = ?;', ( current_year, current_month ) ) + + c.execute( 'UPDATE account_map SET used_bytes = ?, used_requests = ?;', ( 0, 0 ) ) + + + + def _CheckMonthlyData( self, c ): + + service_identifiers = self._GetServiceIdentifiers( c ) + + running_total = 0 + + self._services_over_monthly_data = set() + + for service_identifier in service_identifiers: + + service_id = self._GetServiceId( c, service_identifier ) + + options = self._GetOptions( c, service_id ) + + service_type = service_identifier.GetType() + + if service_type != HC.SERVER_ADMIN: + + ( total_used_bytes, ) = c.execute( 'SELECT SUM( used_bytes ) FROM account_map WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if total_used_bytes is None: total_used_bytes = 0 + + running_total += total_used_bytes + + if 'max_monthly_data' in options: + + max_monthly_data = options[ 'max_monthly_data' ] + + if max_monthly_data is not None and total_used_bytes > max_monthly_data: self._services_over_monthly_data.add( service_identifier ) + + + + + # have to do this after + + server_admin_options = self._GetOptions( c, self._server_admin_id ) + + self._over_monthly_data = False + + if 'max_monthly_data' in server_admin_options: + + max_monthly_data = server_admin_options[ 'max_monthly_data' ] + + if max_monthly_data is not None and running_total > max_monthly_data: self._over_monthly_data = True + + + + def _CleanUpdate( self, c, service_id, begin, end ): + + service_type = self._GetServiceType( c, service_id ) + + if service_type == HC.FILE_REPOSITORY: clean_update = self._GenerateFileUpdate( c, service_id, begin, end ) + elif service_type == HC.TAG_REPOSITORY: clean_update = self._GenerateMappingUpdate( c, service_id, begin, end ) + + ( update_key, ) = c.execute( 'SELECT update_key FROM update_cache WHERE service_id = ? AND begin = ?;', ( service_id, begin ) ).fetchone() + + with open( HC.SERVER_UPDATES_DIR + os.path.sep + update_key, 'wb' ) as f: f.write( yaml.safe_dump( clean_update ) ) + + c.execute( 'UPDATE update_cache SET dirty = ? WHERE service_id = ? AND begin = ?;', ( False, service_id, begin ) ) + + + def _ClearBans( self, c ): + + now = int( time.time() ) + + c.execute( 'DELETE FROM bans WHERE expires < ?;', ( now, ) ) + + + def _CreateUpdate( self, c, service_id, begin, end ): + + service_type = self._GetServiceType( c, service_id ) + + if service_type == HC.FILE_REPOSITORY: update = self._GenerateFileUpdate( c, service_id, begin, end ) + elif service_type == HC.TAG_REPOSITORY: update = self._GenerateMappingUpdate( c, service_id, begin, end ) + + update_key_bytes = os.urandom( 32 ) + + update_key = update_key_bytes.encode( 'hex' ) + + with open( HC.SERVER_UPDATES_DIR + os.path.sep + update_key, 'wb' ) as f: f.write( yaml.safe_dump( update ) ) + + c.execute( 'INSERT OR REPLACE INTO update_cache ( service_id, begin, end, update_key, dirty ) VALUES ( ?, ?, ?, ?, ? );', ( service_id, begin, end, update_key, False ) ) + + + def _DeleteOrphans( self, c ): + + # files + + deletees = [ hash_id for ( hash_id, ) in c.execute( 'SELECT DISTINCT hash_id FROM files_info EXCEPT SELECT DISTINCT hash_id FROM file_map;' ) ] + + if len( deletees ) > 0: + + deletee_hashes = set( self._GetHashes( c, deletees ) ) + + local_files_hashes = { hash.decode( 'hex' ) for hash in dircache.listdir( HC.SERVER_FILES_DIR ) } + thumbnails_hashes = { hash.decode( 'hex' ) for hash in dircache.listdir( HC.SERVER_THUMBNAILS_DIR ) } + + for hash in local_files_hashes & deletee_hashes: os.remove( HC.SERVER_FILES_DIR + os.path.sep + hash.encode( 'hex' ) ) + for hash in thumbnails_hashes & deletee_hashes: os.remove( HC.SERVER_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) ) + + c.execute( 'DELETE FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( deletees ) + ';' ) + + + # messages + + required_message_keys = { message_key for ( message_key, ) in c.execute( 'SELECT DISTINCT message_key FROM messages;' ) } + + existing_message_keys = { key.decode( 'hex' ) for key in dircache.listdir( HC.SERVER_MESSAGES_DIR ) } + + deletees = existing_message_keys - required_message_keys + + for message_key in deletees: os.remove( HC.SERVER_MESSAGES_DIR + os.path.sep + message_key.encode( 'hex' ) ) + + + def _GenerateAccessKeys( self, c, service_id, num, account_type_id, expiration ): + + access_keys = [ os.urandom( HC.HYDRUS_KEY_LENGTH ) for i in range( num ) ] + + c.executemany( 'INSERT INTO accounts ( access_key ) VALUES ( ? );', [ ( sqlite3.Binary( hashlib.sha256( access_key ).digest() ), ) for access_key in access_keys ] ) + + account_ids = self._GetAccountIds( c, access_keys ) + + now = int( time.time() ) + + if expiration is not None: expires = expiration + int( time.time() ) + else: expires = None + + c.executemany( 'INSERT INTO account_map ( service_id, account_id, account_type_id, created, expires, used_bytes, used_requests ) VALUES ( ?, ?, ?, ?, ?, ?, ? );', [ ( service_id, account_id, account_type_id, now, expires, 0, 0 ) for account_id in account_ids ] ) + + return access_keys + + + def _FlushRequestsMade( self, c, all_services_requests ): + + all_services_request_dict = HC.BuildKeyToListDict( [ ( service_identifier, ( account, num_bytes ) ) for ( service_identifier, account, num_bytes ) in all_services_requests ] ) + + for ( service_identifier, requests ) in all_services_request_dict.items(): + + service_id = self._GetServiceId( c, service_identifier ) + + requests_dict = HC.BuildKeyToListDict( requests ) + + c.executemany( 'UPDATE account_map SET used_bytes = used_bytes + ?, used_requests = used_requests + ? WHERE service_id = ? AND account_id = ?;', [ ( sum( num_bytes_list ), len( num_bytes_list ), service_id, account.GetAccountId() ) for ( account, num_bytes_list ) in requests_dict.items() ] ) + + + + def _GetAccount( self, c, service_id, account_identifier ): + + if account_identifier.HasAccessKey(): + + access_key = account_identifier.GetAccessKey() + + try: ( account_id, account_type, created, expires, used_bytes, used_requests ) = c.execute( 'SELECT account_id, account_type, created, expires, used_bytes, used_requests FROM account_types, ( accounts, account_map USING ( account_id ) ) USING ( account_type_id ) WHERE service_id = ? AND access_key = ?;', ( service_id, sqlite3.Binary( hashlib.sha256( access_key ).digest() ) ) ).fetchone() + except: raise HC.ForbiddenException( 'The service could not find that account in its database.' ) + + elif account_identifier.HasMapping(): + + try: + + ( tag, hash ) = account_identifier.GetMapping() + + tag_id = self._GetTagId( c, tag ) + hash_id = self._GetHashId( c, hash ) + + except: raise HC.ForbiddenException( 'The service could not find that mapping in its database.' ) + + try: ( account_id, account_type, created, expires, used_bytes, used_requests ) = c.execute( 'SELECT account_id, account_type, created, expires, used_bytes, used_requests FROM account_types, ( accounts, ( account_map, mappings USING ( service_id, account_id ) ) USING ( account_id ) ) USING ( account_type_id ) WHERE service_id = ? AND tag_id = ? AND hash_id = ?;', ( service_id, tag_id, hash_id ) ).fetchone() + except: raise HC.ForbiddenException( 'The service could not find that account in its database.' ) + + elif account_identifier.HasHash(): + + try: hash_id = self._GetHashId( c, account_identifier.GetHash() ) + except: raise HC.ForbiddenException( 'The service could not find that hash in its database.' ) + + try: + + result = c.execute( 'SELECT account_id, account_type, created, expires, used_bytes, used_requests FROM account_types, ( accounts, ( account_map, file_map USING ( service_id, account_id ) ) USING ( account_id ) ) USING ( account_type_id ) WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() + + if result is None: result = c.execute( 'SELECT account_id, account_type, created, expires, used_bytes, used_requests FROM account_types, ( accounts, ( account_map, deleted_files USING ( service_id, account_id ) ) USING ( account_id ) ) USING ( account_type_id ) WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone() + + ( account_id, account_type, created, expires, used_bytes, used_requests ) = result + + except: raise HC.ForbiddenException( 'The service could not find that account in its database.' ) + + elif account_identifier.HasAccountId(): + + try: ( account_id, account_type, created, expires, used_bytes, used_requests ) = c.execute( 'SELECT account_id, account_type, created, expires, used_bytes, used_requests FROM account_types, account_map USING ( account_type_id ) WHERE service_id = ? AND account_id = ?;', ( service_id, account_identifier.GetAccountId() ) ).fetchone() + except: raise HC.ForbiddenException( 'The service could not find that account in its database.' ) + + + used_data = ( used_bytes, used_requests ) + + banned_info = c.execute( 'SELECT reason, created, expires FROM bans, reasons USING ( reason_id ) WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + return HC.Account( account_id, account_type, created, expires, used_data, banned_info = banned_info ) + + + def _GetAccountFileInfo( self, c, service_id, account_id ): + + ( num_deleted_files, ) = c.execute( 'SELECT COUNT( * ) FROM deleted_files WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + ( num_files, num_files_bytes ) = c.execute( 'SELECT COUNT( * ), SUM( size ) FROM file_map, files_info USING ( hash_id ) WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + if num_files_bytes is None: num_files_bytes = 0 + + result = c.execute( 'SELECT score FROM account_scores WHERE service_id = ? AND account_id = ? AND score_type = ?;', ( service_id, account_id, HC.SCORE_PETITION ) ).fetchone() + + if result is None: petition_score = 0 + else: ( petition_score, ) = result + + ( num_petitions, ) = c.execute( 'SELECT COUNT( DISTINCT reason_id ) FROM file_petitions WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + account_info = {} + + account_info[ 'num_deleted_files' ] = num_deleted_files + account_info[ 'num_files' ] = num_files + account_info[ 'num_files_bytes' ] = num_files_bytes + account_info[ 'petition_score' ] = petition_score + account_info[ 'num_petitions' ] = num_petitions + + return account_info + + + def _GetAccountIdFromContactKey( self, c, service_id, contact_key ): + + try: + + ( account_id, ) = c.execute( 'SELECT account_id FROM contacts WHERE service_id = ? AND contact_key = ?;', ( service_id, sqlite3.Binary( contact_key ) ) ).fetchone() + + except: raise HC.NotFoundException( 'Could not find that contact key!' ) + + return account_id + + + def _GetAccountIds( self, c, access_keys ): + + account_ids = [] + + if type( access_keys ) == set: access_keys = list( access_keys ) + + for i in range( 0, len( access_keys ), 250 ): # there is a limit on the number of parameterised variables in sqlite, so only do a few at a time + + access_keys_subset = access_keys[ i : i + 250 ] + + account_ids.extend( [ account_id for ( account_id , ) in c.execute( 'SELECT account_id FROM accounts WHERE access_key IN (' + ','.join( '?' * len( access_keys_subset ) ) + ');', [ sqlite3.Binary( hashlib.sha256( access_key ).digest() ) for access_key in access_keys_subset ] ) ] ) + + + return account_ids + + + def _GetAccountMappingInfo( self, c, service_id, account_id ): + + ( num_deleted_mappings, ) = c.execute( 'SELECT COUNT( * ) FROM deleted_mappings WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + ( num_mappings, ) = c.execute( 'SELECT COUNT( * ) FROM mappings WHERE service_id = ? AND account_id = ?;', ( service_id, account_id ) ).fetchone() + + result = c.execute( 'SELECT score FROM account_scores WHERE service_id = ? AND account_id = ? AND score_type = ?;', ( service_id, account_id, HC.SCORE_PETITION ) ).fetchone() + + if result is None: petition_score = 0 + else: ( petition_score, ) = result + + # crazy query here because two distinct columns + ( num_petitions, ) = c.execute( 'SELECT COUNT( * ) FROM ( SELECT DISTINCT tag_id, reason_id FROM mapping_petitions WHERE service_id = ? AND account_id = ? );', ( service_id, account_id ) ).fetchone() + + account_info = {} + + account_info[ 'num_deleted_mappings' ] = num_deleted_mappings + account_info[ 'num_mappings' ] = num_mappings + account_info[ 'petition_score' ] = petition_score + account_info[ 'num_petitions' ] = num_petitions + + return account_info + + + def _GetAccountTypeId( self, c, service_id, title ): + + result = c.execute( 'SELECT account_type_id FROM account_types, account_type_map USING ( account_type_id ) WHERE service_id = ? AND title = ?;', ( service_id, title ) ).fetchone() + + if result is None: raise HC.NotFoundException( 'Could not find account title ' + str( title ) + ' in db for this service.' ) + + ( account_type_id, ) = result + + return account_type_id + + + def _GetAccountTypes( self, c, service_id ): + + return [ account_type for ( account_type, ) in c.execute( 'SELECT account_type FROM account_type_map, account_types USING ( account_type_id ) WHERE service_id = ?;', ( service_id, ) ) ] + + + def _GetCachedUpdate( self, c, service_id, begin ): + + result = c.execute( 'SELECT update_key FROM update_cache WHERE service_id = ? AND begin = ?;', ( service_id, begin ) ).fetchone() + + if result is None: result = c.execute( 'SELECT update_key FROM update_cache WHERE service_id = ? AND ? BETWEEN begin AND end;', ( service_id, begin ) ).fetchone() + + if result is None: raise HC.NotFoundException( 'Could not find that update!' ) + + ( update_key, ) = result + + with open( HC.SERVER_UPDATES_DIR + os.path.sep + update_key, 'rb' ) as f: update = f.read() + + return update + + + def _GetDirtyUpdates( self, c ): return c.execute( 'SELECT service_id, begin, end FROM update_cache WHERE dirty = ?;', ( True, ) ).fetchall() + + def _GetOptions( self, c, service_id ): + + ( options, ) = c.execute( 'SELECT options FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone() + + return options + + + def _GetReason( self, c, reason_id ): + + result = c.execute( 'SELECT reason FROM reasons WHERE reason_id = ?;', ( reason_id, ) ).fetchone() + + if result is None: raise Exception( 'Reason error in database' ) + + ( reason, ) = result + + return reason + + + def _GetReasonId( self, c, reason ): + + result = c.execute( 'SELECT reason_id FROM reasons WHERE reason = ?;', ( reason, ) ).fetchone() + + if result is None: + + c.execute( 'INSERT INTO reasons ( reason ) VALUES ( ? );', ( reason, ) ) + + reason_id = c.lastrowid + + return reason_id + + else: + + ( reason_id, ) = result + + return reason_id + + + + def _GetRestrictedServiceStats( self, c, service_id ): + + stats = {} + + ( stats[ 'num_accounts' ], ) = c.execute( 'SELECT COUNT( * ) FROM account_map WHERE service_id = ?;', ( service_id, ) ).fetchone() + + ( stats[ 'num_banned' ], ) = c.execute( 'SELECT COUNT( * ) FROM bans WHERE service_id = ?;', ( service_id, ) ).fetchone() + + return stats + + + def _GetServiceId( self, c, service_identifier ): + + service_type = service_identifier.GetType() + port = service_identifier.GetPort() + + result = c.execute( 'SELECT service_id FROM services WHERE type = ? AND port = ?;', ( service_type, port ) ).fetchone() + + if result is None: raise Exception( 'Service id error in database' ) + + ( service_id, ) = result + + return service_id + + + def _GetServiceIds( self, c, limited_types = HC.ALL_SERVICES ): return [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM services WHERE type IN ' + HC.SplayListForDB( limited_types ) + ';' ) ] + + def _GetServiceIdentifiers( self, c, limited_types = HC.ALL_SERVICES ): return [ HC.ServerServiceIdentifier( service_type, port ) for ( service_type, port ) in c.execute( 'SELECT type, port FROM services WHERE type IN '+ HC.SplayListForDB( limited_types ) + ';' ) ] + + def _GetServiceType( self, c, service_id ): + + result = c.execute( 'SELECT type FROM services WHERE service_id = ?;', ( service_id, ) ).fetchone() + + if result is None: raise Exception( 'Service id error in database' ) + + ( service_type, ) = result + + return service_type + + + def _GetUpdateEnds( self, c ): + + service_ids = self._GetServiceIds( c, HC.REPOSITORIES ) + + ends = [ c.execute( 'SELECT service_id, end FROM update_cache WHERE service_id = ? ORDER BY end DESC LIMIT 1;', ( service_id, ) ).fetchone() for service_id in service_ids ] + + return ends + + + def _ModifyAccountTypes( self, c, service_id, edit_log ): + + for ( action, details ) in edit_log: + + if action == 'add': + + account_type = details + + title = account_type.GetTitle() + + if self._AccountTypeExists( c, service_id, title ): raise HC.ForbiddenException( 'Already found account type ' + str( title ) + ' in the db for this service, so could not add!' ) + + c.execute( 'INSERT OR IGNORE INTO account_types ( title, account_type ) VALUES ( ?, ? );', ( title, account_type ) ) + + account_type_id = c.lastrowid + + c.execute( 'INSERT OR IGNORE INTO account_type_map ( service_id, account_type_id ) VALUES ( ?, ? );', ( service_id, account_type_id ) ) + + elif action == 'delete': + + ( title, new_title ) = details + + account_type_id = self._GetAccountId( c, service_id, title ) + + new_account_type_id = self._GetAccountId( c, service_id, new_title ) + + c.execute( 'UPDATE account_map SET account_type_id = ? WHERE service_id = ? AND account_type_id = ?;', ( new_account_type_id, service_id, account_type_id ) ) + + c.execute( 'DELETE FROM account_types WHERE account_type_id = ?;', ( account_type_id, ) ) + + c.execute( 'DELETE FROM account_type_map WHERE service_id = ? AND account_type_id = ?;', ( service_id, account_type_id ) ) + + elif action == 'edit': + + ( old_title, account_type ) = details + + title = account_type.GetTitle() + + if old_title != title and self._AccountTypeExists( c, service_id, title ): raise HC.ForbiddenException( 'Already found account type ' + str( title ) + ' in the database, so could not rename ' + str( old_title ) + '!' ) + + account_type_id = self._GetAccountTypeId( c, service_id, old_title ) + + c.execute( 'UPDATE account_types SET title = ?, account_type = ? WHERE account_type_id = ?;', ( title, account_type, account_type_id ) ) + + + + + def _ModifyServices( self, c, account_id, edit_log ): + + now = int( time.time() ) + + for ( action, data ) in edit_log: + + if action == HC.ADD: + + service_identifier = data + + service_type = service_identifier.GetType() + port = service_identifier.GetPort() + + if c.execute( 'SELECT 1 FROM services WHERE port = ?;', ( port, ) ).fetchone() is not None: raise Exception( 'There is already a service hosted at port ' + str( port ) ) + + c.execute( 'INSERT INTO services ( type, port, options ) VALUES ( ?, ?, ? );', ( service_type, port, yaml.safe_dump( HC.DEFAULT_OPTIONS[ service_type ] ) ) ) + + service_id = c.lastrowid + + service_admin_account_type = HC.AccountType( 'service admin', [ HC.GET_DATA, HC.POST_DATA, HC.POST_PETITIONS, HC.RESOLVE_PETITIONS, HC.MANAGE_USERS, HC.GENERAL_ADMIN ], ( None, None ) ) + + c.execute( 'INSERT INTO account_types ( title, account_type ) VALUES ( ?, ? );', ( 'service admin', service_admin_account_type ) ) + + service_admin_account_type_id = c.lastrowid + + c.execute( 'INSERT INTO account_type_map ( service_id, account_type_id ) VALUES ( ?, ? );', ( service_id, service_admin_account_type_id ) ) + + c.execute( 'INSERT INTO account_map ( service_id, account_id, account_type_id, created, expires, used_bytes, used_requests ) VALUES ( ?, ?, ?, ?, ?, ?, ? );', ( service_id, account_id, service_admin_account_type_id, now, None, 0, 0 ) ) + + if service_type in HC.REPOSITORIES: + + begin = 0 + end = int( time.time() ) + + if service_type == HC.FILE_REPOSITORY: update = self._GenerateFileUpdate( c, service_id, begin, end ) + elif service_type == HC.TAG_REPOSITORY: update = self._GenerateMappingUpdate( c, service_id, begin, end ) + elif service_type in ( HC.RATING_LIKE_REPOSITORY, HC.RATING_NUMERICAL_REPOSITORY ): update = self._GenerateRatingUpdate( c, service_id, begin, end ) + + update_key = os.urandom( 32 ) + + with open( HC.SERVER_UPDATES_DIR + os.path.sep + update_key, 'wb' ) as f: f.write( yaml.safe_dump( update ) ) + + c.execute( 'INSERT INTO update_cache ( service_id, begin, end, update_key ) VALUES ( ?, ?, ?, ? );', ( service_id, begin, end, update_key ) ) + + + elif action == HC.EDIT: + + ( service_identifier, new_port ) = data + + service_id = self._GetServiceId( c, service_identifier ) + + if c.execute( 'SELECT 1 FROM services WHERE port = ?;', ( new_port, ) ).fetchone() is not None: raise Exception( 'There is already a service hosted at port ' + str( port ) ) + + c.execute( 'UPDATE services SET port = ? WHERE service_id = ?;', ( new_port, service_id ) ) + + elif action == HC.DELETE: + + service_identifier = data + + service_id = self._GetServiceId( c, service_identifier ) + + c.execute( 'DELETE FROM services WHERE service_id = ?;', ( service_id, ) ) + + + + # now we are sure the db is happy ( and a commit is like 5ms away), let's boot these servers + + desired_service_identifiers = self._GetServiceIdentifiers( c ) + + existing_servers = dict( self._servers ) + + for existing_service_identifier in existing_servers.keys(): + + if existing_service_identifier not in desired_service_identifiers: + + self._servers[ existing_service_identifier ].shutdown() + + del self._servers[ existing_service_identifier ] + + + + for desired_service_identifier in desired_service_identifiers: + + if desired_service_identifier not in existing_servers: + + service_id = self._GetServiceId( c, desired_service_identifier ) + + options = self._GetOptions( c, service_id ) + + if 'message' in options: message = options[ 'message' ] + else: message = '' + + server = HydrusServer.HydrusHTTPServer( desired_service_identifier, message ) + + self._servers[ desired_service_identifier ] = server + + threading.Thread( target=server.serve_forever ).start() + + + + + def _RefreshUpdateCache( self, c, service_id, affected_timestamps ): c.executemany( 'UPDATE update_cache SET dirty = ? WHERE service_id = ? AND ? BETWEEN begin AND end;', [ ( True, service_id, timestamp ) for timestamp in affected_timestamps ] ) + + def _RewardAccounts( self, c, service_id, score_type, scores ): + + c.executemany( 'INSERT OR IGNORE INTO account_scores ( service_id, account_id, score_type, score ) VALUES ( ?, ?, ?, ? );', [ ( service_id, account_id, score_type, 0 ) for ( account_id, score ) in scores ] ) + + c.executemany( 'UPDATE account_scores SET score = score + ? WHERE service_id = ? AND account_id = ? and score_type = ?;', [ ( score, service_id, account_id, score_type ) for ( account_id, score ) in scores ] ) + + + def _SetExpires( self, c, service_id, account_ids, expires ): c.execute( 'UPDATE account_map SET expires = ? WHERE service_id = ? AND account_id IN ' + HC.SplayListForDB( account_ids ) + ';', ( expires, service_id ) ) + + def _SetOptions( self, c, service_id, service_identifier, options ): + + c.execute( 'UPDATE services SET options = ? WHERE service_id = ?;', ( options, service_id ) ).fetchone() + + if 'message' in options: + + message = options[ 'message' ] + + self._servers[ service_identifier ].SetMessage( message ) + + + + def _UnbanKey( self, c, service_id, account_id ): c.execute( 'DELETE FROM bans WHERE service_id = ? AND account_id = ?;', ( account_id, ) ) + +class DB( ServiceDB ): + + def __init__( self ): + + self._db_path = HC.DB_DIR + os.path.sep + 'server.db' + + self._jobs = Queue.PriorityQueue() + self._pubsubs = [] + + self._over_monthly_data = False + self._services_over_monthly_data = set() + + self._InitDB() + + ( db, c ) = self._GetDBCursor() + + self._UpdateDB( c ) + + service_identifiers = self._GetServiceIdentifiers( c ) + + ( self._server_admin_id, ) = c.execute( 'SELECT service_id FROM services WHERE type = ?;', ( HC.SERVER_ADMIN, ) ).fetchone() + + self._servers = {} + + for service_identifier in service_identifiers: + + service_type = service_identifier.GetType() + + if service_type == HC.SERVER_ADMIN: + + port = service_identifier.GetPort() + + try: + + connection = httplib.HTTPConnection( '127.0.0.1', port ) + + connection.connect() + + connection.close() + + already_running = True + + except: + + already_running = False + + + if already_running: raise Exception( 'The server appears to be running already!' + os.linesep + 'Either that, or something else is using port ' + str( port ) + '.' ) + + + service_id = self._GetServiceId( c, service_identifier ) + + options = self._GetOptions( c, service_id ) + + if 'message' in options: message = options[ 'message' ] + else: message = '' + + self._servers[ service_identifier ] = HydrusServer.HydrusHTTPServer( service_identifier, message ) + + + for server in self._servers.values(): threading.Thread( target=server.serve_forever ).start() + + HC.DAEMONQueue( 'FlushRequestsMade', self.DAEMONFlushRequestsMade, 'request_made', period = 10 ) + + HC.DAEMONWorker( 'CheckMonthlyData', self.DAEMONCheckMonthlyData, period = 3600 ) + HC.DAEMONWorker( 'ClearBans', self.DAEMONClearBans, period = 3600 ) + HC.DAEMONWorker( 'DeleteOrphans', self.DAEMONDeleteOrphans, period = 86400 ) + HC.DAEMONWorker( 'GenerateUpdates', self.DAEMONGenerateUpdates, period = 1200 ) + HC.DAEMONWorker( 'CheckDataUsage', self.DAEMONCheckDataUsage, period = 86400 ) + + threading.Thread( target = self.MainLoop, name = 'Database Main Loop' ).start() + + + def _GetDBCursor( self ): + + db = sqlite3.connect( self._db_path, timeout=600.0, isolation_level = None, detect_types = sqlite3.PARSE_DECLTYPES ) + + c = db.cursor() + + c.execute( 'PRAGMA cache_size = 10000;' ) + c.execute( 'PRAGMA foreign_keys = ON;' ) + c.execute( 'PRAGMA recursive_triggers = ON;' ) + + return ( db, c ) + + + def _InitDB( self ): + + if not os.path.exists( self._db_path ): + + if not os.path.exists( HC.SERVER_FILES_DIR ): os.mkdir( HC.SERVER_FILES_DIR ) + if not os.path.exists( HC.SERVER_THUMBNAILS_DIR ): os.mkdir( HC.SERVER_THUMBNAILS_DIR ) + if not os.path.exists( HC.SERVER_MESSAGES_DIR ): os.mkdir( HC.SERVER_MESSAGES_DIR ) + if not os.path.exists( HC.SERVER_UPDATES_DIR ): os.mkdir( HC.SERVER_UPDATES_DIR ) + + ( db, c ) = self._GetDBCursor() + + c.execute( 'BEGIN IMMEDIATE' ) + + c.execute( 'PRAGMA auto_vacuum = 0;' ) # none + c.execute( 'PRAGMA journal_mode=WAL;' ) + + now = int( time.time() ) + + c.execute( 'CREATE TABLE services ( service_id INTEGER PRIMARY KEY, type INTEGER, port INTEGER, options TEXT_YAML );' ) + + c.execute( 'CREATE TABLE accounts ( account_id INTEGER PRIMARY KEY, access_key BLOB_BYTES );' ) + c.execute( 'CREATE UNIQUE INDEX accounts_access_key_index ON accounts ( access_key );' ) + + c.execute( 'CREATE TABLE account_map ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, account_type_id INTEGER, created INTEGER, expires INTEGER, used_bytes INTEGER, used_requests INTEGER, PRIMARY KEY( service_id, account_id ) );' ) + c.execute( 'CREATE INDEX account_map_service_id_account_type_id_index ON account_map ( service_id, account_type_id );' ) + + c.execute( 'CREATE TABLE account_scores ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, score_type INTEGER, score INTEGER, PRIMARY KEY( service_id, account_id, score_type ) );' ) + + c.execute( 'CREATE TABLE account_type_map ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_type_id, PRIMARY KEY ( service_id, account_type_id ) );' ) + + c.execute( 'CREATE TABLE account_types ( account_type_id INTEGER PRIMARY KEY, title TEXT, account_type TEXT_YAML );' ) + + c.execute( 'CREATE TABLE bans ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, admin_account_id INTEGER, reason_id INTEGER, created INTEGER, expires INTEGER, PRIMARY KEY( service_id, account_id ) );' ) + c.execute( 'CREATE INDEX bans_expires ON bans ( expires );' ) + + c.execute( 'CREATE TABLE contacts ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, contact_key BLOB, public_key TEXT, PRIMARY KEY( service_id, account_id ) );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' ) + + c.execute( 'CREATE TABLE deleted_files ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, reason_id INTEGER, account_id INTEGER, admin_account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX deleted_files_service_id_account_id_index ON deleted_files ( service_id, account_id );' ) + c.execute( 'CREATE INDEX deleted_files_service_id_timestamp_index ON deleted_files ( service_id, timestamp );' ) + + c.execute( 'CREATE TABLE deleted_mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, account_id INTEGER, admin_account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX deleted_mappings_service_id_account_id_index ON deleted_mappings ( service_id, account_id );' ) + c.execute( 'CREATE INDEX deleted_mappings_service_id_timestamp_index ON deleted_mappings ( service_id, timestamp );' ) + + c.execute( 'CREATE TABLE files_info ( hash_id INTEGER PRIMARY KEY, size INTEGER, mime INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER );' ) + + c.execute( 'CREATE TABLE file_map ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, hash_id, account_id ) );' ) + c.execute( 'CREATE INDEX file_map_service_id_account_id_index ON file_map ( service_id, account_id );' ) + c.execute( 'CREATE INDEX file_map_service_id_timestamp_index ON file_map ( service_id, timestamp );' ) + + c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, account_id, hash_id ) );' ) + c.execute( 'CREATE INDEX file_petitions_service_id_account_id_reason_id_index ON file_petitions ( service_id, account_id, reason_id );' ) + c.execute( 'CREATE INDEX file_petitions_service_id_hash_id_index ON file_petitions ( service_id, hash_id );' ) + + c.execute( 'CREATE TABLE hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES );' ) + c.execute( 'CREATE UNIQUE INDEX hashes_hash_index ON hashes ( hash );' ) + + c.execute( 'CREATE TABLE ip_addresses ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, ip TEXT, timestamp INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + + c.execute( 'CREATE TABLE mapping_petitions ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, account_id, tag_id, hash_id ) );' ) + c.execute( 'CREATE INDEX mapping_petitions_service_id_account_id_reason_id_tag_id_index ON mapping_petitions ( service_id, account_id, reason_id, tag_id );' ) + c.execute( 'CREATE INDEX mapping_petitions_service_id_tag_id_hash_id_index ON mapping_petitions ( service_id, tag_id, hash_id );' ) + + c.execute( 'CREATE TABLE mappings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, tag_id INTEGER, hash_id INTEGER, account_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, tag_id, hash_id ) );' ) + + c.execute( 'CREATE TABLE messages ( message_key BLOB_BYTES PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX messages_service_id_account_id_index ON messages ( service_id, account_id );' ) + c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' ) + + c.execute( 'CREATE TABLE message_statuses ( status_key BLOB_BYTES PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, status BLOB_BYTES, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX message_statuses_service_id_account_id_index ON message_statuses ( service_id, account_id );' ) + c.execute( 'CREATE INDEX message_statuses_timestamp_index ON message_statuses ( timestamp );' ) + + c.execute( 'CREATE TABLE news ( service_id INTEGER REFERENCES services ON DELETE CASCADE, news TEXT, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX news_timestamp_index ON news ( timestamp );' ) + + c.execute( 'CREATE TABLE reasons ( reason_id INTEGER PRIMARY KEY, reason TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX reasons_reason_index ON reasons ( reason );' ) + + c.execute( 'CREATE TABLE tags ( tag_id INTEGER PRIMARY KEY, tag TEXT );' ) + c.execute( 'CREATE UNIQUE INDEX tags_tag_index ON tags ( tag );' ) + + c.execute( 'CREATE TABLE update_cache ( service_id INTEGER REFERENCES services ON DELETE CASCADE, begin INTEGER, end INTEGER, update_key TEXT, dirty INTEGER_BOOLEAN, PRIMARY KEY( service_id, begin ) );' ) + c.execute( 'CREATE UNIQUE INDEX update_cache_service_id_end_index ON update_cache ( service_id, end );' ) + c.execute( 'CREATE INDEX update_cache_service_id_dirty_index ON update_cache ( service_id, dirty );' ) + + c.execute( 'CREATE TABLE version ( version INTEGER, year INTEGER, month INTEGER );' ) + + current_time_struct = time.gmtime() + + ( current_year, current_month ) = ( current_time_struct.tm_year, current_time_struct.tm_mon ) + + c.execute( 'INSERT INTO version ( version, year, month ) VALUES ( ?, ?, ? );', ( HC.SOFTWARE_VERSION, current_year, current_month ) ) + + # set up server admin + + c.execute( 'INSERT INTO services ( type, port, options ) VALUES ( ?, ?, ? );', ( HC.SERVER_ADMIN, HC.DEFAULT_SERVER_ADMIN_PORT, yaml.safe_dump( HC.DEFAULT_OPTIONS[ HC.SERVER_ADMIN ] ) ) ) + + server_admin_service_id = c.lastrowid + + server_admin_account_type = HC.AccountType( 'server admin', [ HC.MANAGE_USERS, HC.GENERAL_ADMIN, HC.EDIT_SERVICES ], ( None, None ) ) + + c.execute( 'INSERT INTO account_types ( title, account_type ) VALUES ( ?, ? );', ( 'server admin', server_admin_account_type ) ) + + server_admin_account_type_id = c.lastrowid + + c.execute( 'INSERT INTO account_type_map ( service_id, account_type_id ) VALUES ( ?, ? );', ( server_admin_service_id, server_admin_account_type_id ) ) + + c.execute( 'COMMIT' ) + + + + def _MakeBackup( self, c ): + + c.execute( 'COMMIT' ) + + c.execute( 'VACUUM' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + shutil.copy( self._db_path, self._db_path + '.backup' ) + if os.path.exists( self._db_path + '-wal' ): shutil.copy( self._db_path + '-wal', self._db_path + '-wal.backup' ) + + shutil.rmtree( HC.SERVER_FILES_DIR + '_backup', ignore_errors = True ) + shutil.rmtree( HC.SERVER_THUMBNAILS_DIR + '_backup', ignore_errors = True ) + shutil.rmtree( HC.SERVER_MESSAGES_DIR + '_backup', ignore_errors = True ) + shutil.rmtree( HC.SERVER_UPDATES_DIR + '_backup', ignore_errors = True ) + + shutil.copytree( HC.SERVER_FILES_DIR, HC.SERVER_FILES_DIR + '_backup' ) + shutil.copytree( HC.SERVER_THUMBNAILS_DIR, HC.SERVER_THUMBNAILS_DIR + '_backup' ) + shutil.copytree( HC.SERVER_MESSAGES_DIR, HC.SERVER_MESSAGES_DIR + '_backup' ) + shutil.copytree( HC.SERVER_UPDATES_DIR, HC.SERVER_UPDATES_DIR + '_backup' ) + + + def _UpdateDB( self, c ): + + ( version, ) = c.execute( 'SELECT version FROM version;' ).fetchone() + + if version != HC.SOFTWARE_VERSION: + + c.execute( 'BEGIN IMMEDIATE' ) + + try: + + self._UpdateDBOld( c, version ) + + if version < 37: + + os.mkdir( HC.SERVER_MESSAGES_DIR ) + + c.execute( 'CREATE TABLE contacts ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, contact_key BLOB, public_key TEXT, PRIMARY KEY( service_id, account_id ) );' ) + c.execute( 'CREATE UNIQUE INDEX contacts_contact_key_index ON contacts ( contact_key );' ) + + c.execute( 'CREATE TABLE messages ( message_key BLOB PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX messages_service_id_account_id_index ON messages ( service_id, account_id );' ) + c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' ) + + + if version < 38: + + c.execute( 'COMMIT' ) + c.execute( 'PRAGMA journal_mode=WAL;' ) # possibly didn't work last time, cause of sqlite dll issue + c.execute( 'BEGIN IMMEDIATE' ) + + c.execute( 'DROP TABLE messages;' ) # blob instead of blob_bytes! + + c.execute( 'CREATE TABLE messages ( message_key BLOB_BYTES PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX messages_service_id_account_id_index ON messages ( service_id, account_id );' ) + c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' ) + + c.execute( 'CREATE TABLE message_statuses ( status_key BLOB_BYTES PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, status BLOB_BYTES, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX message_statuses_service_id_account_id_index ON message_statuses ( service_id, account_id );' ) + c.execute( 'CREATE INDEX message_statuses_timestamp_index ON message_statuses ( timestamp );' ) + + + if version < 40: + + try: c.execute( 'SELECT 1 FROM message_statuses;' ).fetchone() # didn't update dbinit on 38 + except: + + c.execute( 'DROP TABLE messages;' ) # blob instead of blob_bytes! + + c.execute( 'CREATE TABLE messages ( message_key BLOB_BYTES PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX messages_service_id_account_id_index ON messages ( service_id, account_id );' ) + c.execute( 'CREATE INDEX messages_timestamp_index ON messages ( timestamp );' ) + + c.execute( 'CREATE TABLE message_statuses ( status_key BLOB_BYTES PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, status BLOB_BYTES, timestamp INTEGER );' ) + c.execute( 'CREATE INDEX message_statuses_service_id_account_id_index ON message_statuses ( service_id, account_id );' ) + c.execute( 'CREATE INDEX message_statuses_timestamp_index ON message_statuses ( timestamp );' ) + + + + if version < 50: + + c.execute( 'CREATE TABLE ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, account_id INTEGER, hash_id INTEGER, rating REAL, PRIMARY KEY( service_id, account_id, hash_id ) );' ) + c.execute( 'CREATE INDEX ratings_hash_id ON ratings ( hash_id );' ) + + c.execute( 'CREATE TABLE ratings_aggregates ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, score INTEGER, count INTEGER, new_timestamp INTEGER, current_timestamp INTEGER, PRIMARY KEY( service_id, hash_id ) );' ) + c.execute( 'CREATE INDEX ratings_aggregates_new_timestamp ON ratings_aggregates ( new_timestamp );' ) + c.execute( 'CREATE INDEX ratings_aggregates_current_timestamp ON ratings_aggregates ( current_timestamp );' ) + + + if version < 51: + + if not os.path.exists( HC.SERVER_UPDATES_DIR ): os.mkdir( HC.SERVER_UPDATES_DIR ) + + all_update_data = c.execute( 'SELECT * FROM update_cache;' ).fetchall() + + c.execute( 'DROP TABLE update_cache;' ) + + c.execute( 'CREATE TABLE update_cache ( service_id INTEGER REFERENCES services ON DELETE CASCADE, begin INTEGER, end INTEGER, update_key TEXT, dirty INTEGER_BOOLEAN, PRIMARY KEY( service_id, begin ) );' ) + c.execute( 'CREATE UNIQUE INDEX update_cache_service_id_end_index ON update_cache ( service_id, end );' ) + c.execute( 'CREATE INDEX update_cache_service_id_dirty_index ON update_cache ( service_id, dirty );' ) + + for ( service_id, begin, end, update, dirty ) in all_update_data: + + update_key_bytes = os.urandom( 32 ) + + update_key = update_key_bytes.encode( 'hex' ) + + with open( HC.SERVER_UPDATES_DIR + os.path.sep + update_key, 'wb' ) as f: f.write( update ) + + c.execute( 'INSERT INTO update_cache ( service_id, begin, end, update_key, dirty ) VALUES ( ?, ?, ?, ?, ? );', ( service_id, begin, end, update_key, dirty ) ) + + + + if version < 56: + + c.execute( 'DROP INDEX mappings_service_id_account_id_index;' ) + c.execute( 'DROP INDEX mappings_service_id_timestamp_index;' ) + + c.execute( 'CREATE INDEX mappings_account_id_index ON mappings ( account_id );' ) + c.execute( 'CREATE INDEX mappings_timestamp_index ON mappings ( timestamp );' ) + + + c.execute( 'UPDATE version SET version = ?;', ( HC.SOFTWARE_VERSION, ) ) + + c.execute( 'COMMIT' ) + + wx.MessageBox( 'The server has updated successfully!' ) + + except: + + c.execute( 'ROLLBACK' ) + + print( traceback.format_exc() ) + + raise Exception( 'Tried to update the server db, but something went wrong:' + os.linesep + traceback.format_exc() ) + + + + self._UpdateDBOldPost( c, version ) + + + def _UpdateDBOld( self, c, version ): + + if version < 29: + + files_db_path = HC.DB_DIR + os.path.sep + 'server_files.db' + + c.execute( 'COMMIT' ) + + # can't do it inside a transaction + c.execute( 'ATTACH database "' + files_db_path + '" as files_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + os.mkdir( HC.SERVER_FILES_DIR ) + + all_local_files = [ hash_id for ( hash_id, ) in c.execute( 'SELECT hash_id FROM files;' ) ] + + for i in range( 0, len( all_local_files ), 100 ): + + local_files_subset = all_local_files[ i : i + 100 ] + + for hash_id in local_files_subset: + + hash = self._GetHash( c, hash_id ) + + ( file, ) = c.execute( 'SELECT file FROM files WHERE hash_id = ?', ( hash_id, ) ).fetchone() + + path_to = HC.SERVER_FILES_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path_to, 'wb' ) as f: f.write( file ) + + + c.execute( 'DELETE FROM files WHERE hash_id IN ' + HC.SplayListForDB( local_files_subset ) + ';' ) + + c.execute( 'COMMIT' ) + + # slow truncate happens here! + + c.execute( 'BEGIN IMMEDIATE' ) + + + c.execute( 'COMMIT' ) + + # can't do it inside a transaction + c.execute( 'DETACH DATABASE files_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + os.remove( files_db_path ) + + + if version < 30: + + thumbnails_db_path = HC.DB_DIR + os.path.sep + 'server_thumbnails.db' + + c.execute( 'COMMIT' ) + + # can't do it inside a transaction + c.execute( 'ATTACH database "' + thumbnails_db_path + '" as thumbnails_db;' ) + + os.mkdir( HC.SERVER_THUMBNAILS_DIR ) + + all_thumbnails = c.execute( 'SELECT DISTINCT hash_id, hash FROM thumbnails, hashes USING ( hash_id );' ).fetchall() + + for i in range( 0, len( all_thumbnails ), 500 ): + + thumbnails_subset = all_thumbnails[ i : i + 500 ] + + for ( hash_id, hash ) in thumbnails_subset: + + ( thumbnail, ) = c.execute( 'SELECT thumbnail FROM thumbnails WHERE hash_id = ?', ( hash_id, ) ).fetchone() + + path_to = HC.SERVER_THUMBNAILS_DIR + os.path.sep + hash.encode( 'hex' ) + + with open( path_to, 'wb' ) as f: f.write( thumbnail ) + + + + # can't do it inside a transaction + c.execute( 'DETACH DATABASE thumbnails_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + os.remove( thumbnails_db_path ) + + + + def _UpdateDBOldPost( self, c, version ): + + if version == 34: # == is important here + + try: + + main_db_path = HC.DB_DIR + os.path.sep + 'server_main.db' + files_info_db_path = HC.DB_DIR + os.path.sep + 'server_files_info.db' + mappings_db_path = HC.DB_DIR + os.path.sep + 'server_mappings.db' + updates_db_path = HC.DB_DIR + os.path.sep + 'server_updates.db' + + if os.path.exists( main_db_path ): + + # can't do it inside transaction + + c.execute( 'ATTACH database "' + main_db_path + '" as main_db;' ) + c.execute( 'ATTACH database "' + files_info_db_path + '" as files_info_db;' ) + c.execute( 'ATTACH database "' + mappings_db_path + '" as mappings_db;' ) + c.execute( 'ATTACH database "' + updates_db_path + '" as updates_db;' ) + + c.execute( 'BEGIN IMMEDIATE' ) + + c.execute( 'REPLACE INTO main.services SELECT * FROM main_db.services;' ) + + all_service_ids = [ service_id for ( service_id, ) in c.execute( 'SELECT service_id FROM main.services;' ) ] + + c.execute( 'DELETE FROM main_db.account_map WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.account_scores WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.account_type_map WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.bans WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM main_db.news WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'DELETE FROM mappings_db.deleted_mappings WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM mappings_db.mappings WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM mappings_db.mapping_petitions WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'DELETE FROM files_info_db.deleted_files WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM files_info_db.file_map WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM files_info_db.ip_addresses WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + c.execute( 'DELETE FROM files_info_db.file_petitions WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'DELETE FROM updates_db.update_cache WHERE service_id NOT IN ' + HC.SplayListForDB( all_service_ids ) + ';' ) + + c.execute( 'REPLACE INTO main.accounts SELECT * FROM main_db.accounts;' ) + c.execute( 'REPLACE INTO main.account_map SELECT * FROM main_db.account_map;' ) + c.execute( 'REPLACE INTO main.account_scores SELECT * FROM main_db.account_scores;' ) + c.execute( 'REPLACE INTO main.account_type_map SELECT * FROM main_db.account_type_map;' ) + c.execute( 'REPLACE INTO main.account_types SELECT * FROM main_db.account_types;' ) + c.execute( 'REPLACE INTO main.bans SELECT * FROM main_db.bans;' ) + c.execute( 'REPLACE INTO main.hashes SELECT * FROM main_db.hashes;' ) + c.execute( 'REPLACE INTO main.news SELECT * FROM main_db.news;' ) + c.execute( 'REPLACE INTO main.reasons SELECT * FROM main_db.reasons;' ) + c.execute( 'REPLACE INTO main.tags SELECT * FROM main_db.tags;' ) + # don't do version, lol + + c.execute( 'REPLACE INTO main.deleted_mappings SELECT * FROM mappings_db.deleted_mappings;' ) + c.execute( 'REPLACE INTO main.mappings SELECT * FROM mappings_db.mappings;' ) + c.execute( 'REPLACE INTO main.mapping_petitions SELECT * FROM mappings_db.mapping_petitions;' ) + + c.execute( 'REPLACE INTO main.deleted_files SELECT * FROM files_info_db.deleted_files;' ) + c.execute( 'REPLACE INTO main.files_info SELECT * FROM files_info_db.files_info;' ) + c.execute( 'REPLACE INTO main.file_map SELECT * FROM files_info_db.file_map;' ) + c.execute( 'REPLACE INTO main.file_petitions SELECT * FROM files_info_db.file_petitions;' ) + c.execute( 'REPLACE INTO main.ip_addresses SELECT * FROM files_info_db.ip_addresses;' ) + + c.execute( 'REPLACE INTO main.update_cache SELECT * FROM updates_db.update_cache;' ) + + c.execute( 'COMMIT' ) + + c.execute( 'DETACH database main_db;' ) + c.execute( 'DETACH database files_info_db;' ) + c.execute( 'DETACH database mappings_db;' ) + c.execute( 'DETACH database updates_db;' ) + + os.remove( main_db_path ) + os.remove( mappings_db_path ) + os.remove( files_info_db_path ) + os.remove( updates_db_path ) + + + except: + + print( traceback.format_exc() ) + + try: c.execute( 'ROLLBACK' ) + except: pass + + raise Exception( 'Tried to update the server db, but something went wrong:' + os.linesep + traceback.format_exc() ) + + + + + def AddJobServer( self, service_identifier, account_identifier, ip, request_type, request, request_args, request_length ): + + priority = HC.HIGH_PRIORITY + + job = HC.JobServer( service_identifier, account_identifier, ip, request_type, request, request_args, request_length ) + + self._jobs.put( ( priority, job ) ) + + if not HC.shutdown: return job.GetResult() + + raise Exception( 'Application quit before db could serve result!' ) + + + def DAEMONCheckDataUsage( self ): self.Write( 'check_data_usage', HC.LOW_PRIORITY ) + + def DAEMONCheckMonthlyData( self ): self.Write( 'check_monthly_data', HC.LOW_PRIORITY ) + + def DAEMONClearBans( self ): self.Write( 'clear_bans', HC.LOW_PRIORITY ) + + def DAEMONDeleteOrphans( self ): self.Write( 'delete_orphans', HC.LOW_PRIORITY ) + + def DAEMONFlushRequestsMade( self, all_services_requests ): self.Write( 'flush_requests_made', HC.LOW_PRIORITY, all_services_requests ) + + def DAEMONGenerateUpdates( self ): + + dirty_updates = self.Read( 'dirty_updates', HC.LOW_PRIORITY ) + + for ( service_id, begin, end ) in dirty_updates: self.Write( 'clean_update', HC.LOW_PRIORITY, service_id, begin, end ) + + update_ends = self.Read( 'update_ends', HC.LOW_PRIORITY ) + + for ( service_id, biggest_end ) in update_ends: + + now = int( time.time() ) + + next_begin = biggest_end + 1 + next_end = biggest_end + HC.UPDATE_DURATION + + while next_end < now: + + self.Write( 'create_update', HC.LOW_PRIORITY, service_id, next_begin, next_end ) + + biggest_end = next_end + + now = int( time.time() ) + + next_begin = biggest_end + 1 + next_end = biggest_end + HC.UPDATE_DURATION + + + + + def _MainLoop_JobInternal( self, c, job ): + + job_type = job.GetType() + + if job_type in ( 'read', 'read_write' ): + + if job_type == 'read': c.execute( 'BEGIN DEFERRED' ) + else: c.execute( 'BEGIN IMMEDIATE' ) + + try: + + action = job.GetAction() + + result = self._MainLoop_Read( c, action ) + + c.execute( 'COMMIT' ) + + for ( topic, args, kwargs ) in self._pubsubs: HC.pubsub.pub( topic, *args, **kwargs ) + + job.PutResult( result ) + + except Exception as e: + + c.execute( 'ROLLBACK' ) + + print( 'while attempting a read on the database, the hydrus client encountered the following problem:' ) + print( traceback.format_exc() ) + + ( exception_type, value, tb ) = sys.exc_info() + + new_e = type( e )( os.linesep.join( traceback.format_exception( exception_type, value, tb ) ) ) + + job.PutResult( new_e ) + + + else: + + if job_type == 'write': c.execute( 'BEGIN IMMEDIATE' ) + + try: + + action = job.GetAction() + + args = job.GetArgs() + + self._MainLoop_Write( c, action, args ) + + if job_type == 'write': c.execute( 'COMMIT' ) + + for ( topic, args, kwargs ) in self._pubsubs: HC.pubsub.pub( topic, *args, **kwargs ) + + except Exception as e: + + if job_type == 'write': c.execute( 'ROLLBACK' ) + + print( 'while attempting a write on the database, the hydrus client encountered the following problem:' ) + print( traceback.format_exc() ) + + + + + def _MainLoop_JobServer( self, c, job ): + + ( service_identifier, account_identifier, ip, request_type, request, request_args, request_length ) = job.GetInfo() + + service_type = service_identifier.GetType() + + if ( service_type, request_type, request ) in HC.BANDWIDTH_CONSUMING_REQUESTS and ( self._over_monthly_data or service_identifier in self._services_over_monthly_data ): job.PutResult( HC.PermissionException( 'This service has exceeded its monthly data allowance, please check back on the 1st.' ) ) + else: + + if request_type == HC.GET and request != 'accesskeys': c.execute( 'BEGIN DEFERRED' ) + else: c.execute( 'BEGIN IMMEDIATE' ) + + try: + + service_id = self._GetServiceId( c, service_identifier ) + + permissions = HC.REQUESTS_TO_PERMISSIONS[ ( service_type, request_type, request ) ] + + if permissions is not None or request == 'account': + + account = self._GetAccount( c, service_id, account_identifier ) + + if permissions is not None: account.CheckPermissions( permissions ) + + else: account = None + + if request_type == HC.GET: response_context = self._MainLoop_Get( c, request, service_type, service_id, account, request_args ) + else: + + self._MainLoop_Post( c, request, service_type, service_id, account, ip, request_args ) + + response_context = HC.ResponseContext( 200 ) + + + c.execute( 'COMMIT' ) + + for ( topic, args, kwargs ) in self._pubsubs: HC.pubsub.pub( topic, *args, **kwargs ) + + if ( service_type, request_type, request ) in HC.BANDWIDTH_CONSUMING_REQUESTS: + + if request_type == HC.GET: HC.pubsub.pub( 'request_made', ( service_identifier, account, response_context.GetLength() ) ) + elif request_type == HC.POST: HC.pubsub.pub( 'request_made', ( service_identifier, account, request_length ) ) + + + job.PutResult( response_context ) + + except Exception as e: + + c.execute( 'ROLLBACK' ) + + ( exception_type, value, tb ) = sys.exc_info() + + new_e = type( e )( os.linesep.join( traceback.format_exception( exception_type, value, tb ) ) ) + + job.PutResult( new_e ) + + + + + def _MainLoop_Get( self, c, request, service_type, service_id, account, request_args ): + + try: account_id = account.GetAccountId() + except: pass + + if request == 'accesskeys': + + num = request_args[ 'num' ] + title = request_args[ 'title' ] + expiration = request_args[ 'expiration' ] + + account_type_id = self._GetAccountTypeId( c, service_id, title ) + + access_keys = self._GenerateAccessKeys( c, service_id, num, account_type_id, expiration ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( access_keys ) ) + + elif request == 'account': response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( account ) ) + elif request == 'accountinfo': + + subject_identifier = request_args[ 'subject_identifier' ] + + subject = self._GetAccount( c, service_id, subject_identifier ) + + subject_account_id = subject.GetAccountId() + + if service_type == HC.FILE_REPOSITORY: subject_info = self._GetAccountFileInfo( c, service_id, subject_account_id ) + elif service_type == HC.TAG_REPOSITORY: subject_info = self._GetAccountMappingInfo( c, service_id, subject_account_id ) + else: subject_info = {} + + subject_info[ 'account' ] = subject + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( subject_info ) ) + + elif request == 'accounttypes': + + account_types = self._GetAccountTypes( c, service_id ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( account_types ) ) + + elif request == 'file': + + hash = request_args[ 'hash' ] + + file = self._GetFile( hash ) + + mime = HC.GetMimeFromString( file ) + + response_context = HC.ResponseContext( 200, mime = mime, body = file, filename = hash.encode( 'hex' ) + HC.mime_ext_lookup[ mime ] ) + + elif request == 'init': + + if c.execute( 'SELECT 1 FROM account_map WHERE service_id = ?;', ( service_id, ) ).fetchone() is not None: raise HC.ForbiddenException( 'This server is already initialised!' ) + + account_type_id = self._GetAccountTypeId( c, service_id, 'server admin' ) + + ( access_key, ) = self._GenerateAccessKeys( c, service_id, 1, account_type_id, None ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( access_key ) ) + + elif request == 'ip': + + hash = request_args[ 'hash' ] + + hash_id = self._GetHashId( c, hash ) + + ( ip, timestamp ) = self._GetIPTimestamp( c, service_id, hash_id ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( ( ip, timestamp ) ) ) + + elif request == 'message': + + message_key = request_args[ 'message_key' ] + + message = self._GetMessage( c, service_id, account_id, message_key ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_OCTET_STREAM, body = message ) + + elif request == 'messageinfosince': + + timestamp = request_args[ 'since' ] + + ( message_keys, statuses ) = self._GetMessageInfoSince( c, service_id, account_id, timestamp ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( ( message_keys, statuses ) ) ) + + elif request == 'numpetitions': + + if service_type == HC.FILE_REPOSITORY: num_petitions = self._GetNumFilePetitions( c, service_id ) + elif service_type == HC.TAG_REPOSITORY: num_petitions = self._GetNumMappingPetitions( c, service_id ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( num_petitions ) ) + + elif request == 'options': + + options = self._GetOptions( c, service_id ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( options ) ) + + elif request == 'petition': + + if service_type == HC.FILE_REPOSITORY: petition = self._GetFilePetition( c, service_id ) + if service_type == HC.TAG_REPOSITORY: petition = self._GetMappingPetition( c, service_id ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( petition ) ) + + elif request == 'publickey': + + contact_key = request_args[ 'contact_key' ] + + public_key = self._GetPublicKey( c, contact_key ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( public_key ) ) + + elif request == 'services': + + service_identifiers = self._GetServiceIdentifiers( c ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( service_identifiers ) ) + + elif request == 'stats': + + stats = self._GetRestrictedServiceStats( c, service_id ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = yaml.safe_dump( stats ) ) + + elif request == 'thumbnail': + + hash = request_args[ 'hash' ] + + thumbnail = self._GetThumbnail( hash ) + + mime = HC.GetMimeFromString( thumbnail ) + + response_context = HC.ResponseContext( 200, mime = mime, body = thumbnail, filename = hash.encode( 'hex' ) + '_thumbnail' + HC.mime_ext_lookup[ mime ] ) + + elif request == 'update': + + begin = request_args[ 'begin' ] + + update = self._GetCachedUpdate( c, service_id, begin ) + + response_context = HC.ResponseContext( 200, mime = HC.APPLICATION_YAML, body = update ) # note that we don't yaml.safe_dump, because it already is! + + + return response_context + + + def _MainLoop_Post( self, c, request, service_type, service_id, account, ip, request_args ): + + try: account_id = account.GetAccountId() + except: pass + + if request == 'accountmodification': + + action = request_args[ 'action' ] + subject_identifiers = request_args[ 'subject_identifiers' ] + + admin_account_id = account.GetAccountId() + + subjects = [ self._GetAccount( c, service_id, subject_identifier ) for subject_identifier in subject_identifiers ] + + subject_account_ids = [ subject.GetAccountId() for subject in subjects ] + + if action in ( HC.BAN, HC.SUPERBAN ): + + reason = request_args[ 'reason' ] + + reason_id = self._GetReasonId( c, reason ) + + if expiration in request_args: expiration = request_args[ 'expiration' ] + else: expiration = None + + self._Ban( c, service_id, action, admin_account_id, subject_account_ids, reason_id, expiration ) # fold ban and superban together, yo + + else: + + account.CheckPermissions( HC.GENERAL_ADMIN ) # special case, don't let manage_users people do these: + + if action == HC.CHANGE_ACCOUNT_TYPE: + + title = request_args[ 'title' ] + + account_type_id = self._GetAccountTypeId( c, service_id, title ) + + self._ChangeAccountType( c, service_id, subject_account_ids, account_type_id ) + + elif action == HC.ADD_TO_EXPIRES: + + expiration = request_args[ 'expiration' ] + + self._AddToExpires( c, service_id, subject_account_ids, expiration ) + + elif action == HC.SET_EXPIRES: + + expires = request_args[ 'expiry' ] + + self._SetExpires( c, service_id, subject_account_ids, expires ) + + + + elif request == 'accounttypesmodification': + + edit_log = request_args[ 'edit_log' ] + + self._ModifyAccountTypes( c, service_id, edit_log ) + + elif request == 'backup': self._MakeBackup( c ) + elif request == 'contact': + + public_key = request_args[ 'public_key' ] + + self._CreateContact( c, service_id, account_id, public_key ) + + elif request == 'file': + + request_args[ 'ip' ] = ip + + self._AddFile( c, service_id, account_id, request_args ) + + elif request == 'mappings': + + mappings = request_args[ 'mappings' ] + + tags = mappings.GetTags() + + hashes = mappings.GetHashes() + + self._GenerateTagIdsEfficiently( c, tags ) + + self._GenerateHashIdsEfficiently( c, hashes ) + + overwrite_deleted = account.HasPermission( HC.RESOLVE_PETITIONS ) + + for ( tag, hashes ) in mappings: + + tag_id = self._GetTagId( c, tag ) + + hash_ids = self._GetHashIds( c, hashes ) + + self._AddMappings( c, service_id, account_id, tag_id, hash_ids, overwrite_deleted ) + + + elif request == 'message': + + contact_key = request_args[ 'contact_key' ] + message = request_args[ 'message' ] + + self._AddMessage( c, contact_key, message ) + + elif request == 'message_statuses': + + contact_key = request_args[ 'contact_key' ] + statuses = request_args[ 'statuses' ] + + self._AddStatuses( c, contact_key, statuses ) + + elif request == 'news': + + news = request_args[ 'news' ] + + self._AddNews( c, service_id, news ) + + elif request == 'options': + + options = request_args[ 'options' ] + + self._SetOptions( c, service_id, service_identifier, options ) + + elif request == 'petitiondenial': + + petition_denial = request_args[ 'petition_denial' ] + + if service_type == HC.FILE_REPOSITORY: + + hashes = petition_denial.GetInfo() + + hash_ids = self._GetHashIds( c, hashes ) + + self._DenyFilePetition( c, service_id, hash_ids ) + + elif service_type == HC.TAG_REPOSITORY: + + ( tag, hashes ) = petition_denial.GetInfo() + + tag_id = self._GetTagId( c, tag ) + + hash_ids = self._GetHashIds( c, hashes ) + + self._DenyMappingPetition( c, service_id, tag_id, hash_ids ) + + + elif request == 'petitions': + + petitions = request_args[ 'petitions' ] + + if service_type == HC.FILE_REPOSITORY: + + if account.HasPermission( HC.RESOLVE_PETITIONS ): petition_method = self._ApproveFilePetition + elif account.HasPermission( HC.POST_PETITIONS ): petition_method = self._AddFilePetition + + for ( reason, hashes ) in petitions: + + reason_id = self._GetReasonId( c, reason ) + + hash_ids = self._GetHashIds( c, hashes ) + + petition_method( c, service_id, account_id, reason_id, hash_ids ) + + + elif service_type == HC.TAG_REPOSITORY: + + if account.HasPermission( HC.RESOLVE_PETITIONS ): petition_method = self._ApproveMappingPetition + elif account.HasPermission( HC.POST_PETITIONS ): petition_method = self._AddMappingPetition + + for ( reason, tag, hashes ) in petitions: + + reason_id = self._GetReasonId( c, reason ) + + tag_id = self._GetTagId( c, tag ) + + hash_ids = self._GetHashIds( c, hashes ) + + petition_method( c, service_id, account_id, reason_id, tag_id, hash_ids ) + + + + elif request == 'servicesmodification': + + edit_log = request_args[ 'edit_log' ] + + self._ModifyServices( c, account_id, edit_log ) + + + + def _MainLoop_Read( self, c, action ): + + if action == 'dirty_updates': return self._GetDirtyUpdates( c ) + elif action == 'update_ends': return self._GetUpdateEnds( c ) + else: raise Exception( 'db received an unknown read command: ' + action ) + + + def _MainLoop_Write( self, c, action, args ): + + if action == 'check_data_usage': self._CheckDataUsage( c ) + elif action == 'check_monthly_data': self._CheckMonthlyData( c ) + elif action == 'clear_bans': self._ClearBans( c ) + elif action == 'delete_orphans': self._DeleteOrphans( c ) + elif action == 'flush_requests_made': self._FlushRequestsMade( c, *args ) + elif action == 'clean_update': self._CleanUpdate( c, *args ) + elif action == 'create_update': self._CreateUpdate( c, *args ) + else: raise Exception( 'db received an unknown write command: ' + action ) + + + def MainLoop( self ): + + ( db, c ) = self._GetDBCursor() + + while not HC.shutdown or not self._jobs.empty(): + + try: + + ( priority, job ) = self._jobs.get( timeout = 1 ) + + self._pubsubs = [] + + try: + + if isinstance( job, HC.JobServer ): self._MainLoop_JobServer( c, job ) + else: self._MainLoop_JobInternal( c, job ) + + except: + + self._jobs.put( ( priority, job ) ) # couldn't lock db; put job back on queue + + time.sleep( 5 ) + + + except: pass # no jobs this second; let's see if we should shutdown + + + + def Read( self, action, priority, *args, **kwargs ): + + job_type = 'read' + + job = HC.JobInternal( action, job_type, *args, **kwargs ) + + self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first + + if action != 'do_query': return job.GetResult() + + + def Write( self, action, priority, *args, **kwargs ): + + job_type = 'write' + + job = HC.JobInternal( action, job_type, *args, **kwargs ) + + self._jobs.put( ( priority, job ) ) + + \ No newline at end of file diff --git a/include/__init__.py b/include/__init__.py new file mode 100755 index 00000000..e69de29b diff --git a/include/hexagonitswfheader.py b/include/hexagonitswfheader.py new file mode 100755 index 00000000..438e64a1 --- /dev/null +++ b/include/hexagonitswfheader.py @@ -0,0 +1,106 @@ +import struct +import zlib + + +def parse(input): + """Parses the header information from an SWF file.""" + if hasattr(input, 'read'): + input.seek(0) + else: + input = open(input, 'rb') + + def read_ui8(c): + return struct.unpack('> 3 + + current_byte, buffer = read_ui8(buffer[0]), buffer[1:] + bit_cursor = 5 + + for item in 'xmin', 'xmax', 'ymin', 'ymax': + value = 0 + for value_bit in range(nbits - 1, -1, -1): # == reversed(range(nbits)) + if (current_byte << bit_cursor) & 0x80: + value |= 1 << value_bit + # Advance the bit cursor to the next bit + bit_cursor += 1 + + if bit_cursor > 7: + # We've exhausted the current byte, consume the next one + # from the buffer. + current_byte, buffer = read_ui8(buffer[0]), buffer[1:] + bit_cursor = 0 + + # Convert value from TWIPS to a pixel value + header[item] = value / 20 + + header['width'] = header['xmax'] - header['xmin'] + header['height'] = header['ymax'] - header['ymin'] + + # Appendix A of the SWF specification states the following about the FPS + # field: + # + # [FPS] is supposed to be stored as a 16-bit integer, but the first + # byte (or last depending on how you look at it) is completely ignored. + # + # We handle this by parsing the value as UI16 and taking only the first + # byte as the value. + header['fps'] = read_ui16(buffer[0:2]) >> 8 + header['frames'] = read_ui16(buffer[2:4]) + + input.close() + return header + + +def main(): + import sys + + if len(sys.argv) < 2: + print 'Usage: %s [SWF file]' % sys.argv[0] + sys.exit(1) + + header = parse(sys.argv[1]) + print 'SWF header' + print '----------' + print 'Version: %s' % header['version'] + print 'Compression: %s' % header['compressed'] + print 'Dimensions: %s x %s' % (header['width'], header['height']) + print 'Bounding box: (%s, %s, %s, %s)' % (header['xmin'], header['xmax'], header['ymin'], header['ymax']) + print 'Frames: %s' % header['frames'] + print 'FPS: %s' % header['fps'] + + +if __name__ == '__main__': + main() diff --git a/include/multipart.py b/include/multipart.py new file mode 100755 index 00000000..c7d9c50f --- /dev/null +++ b/include/multipart.py @@ -0,0 +1,147 @@ +''' +Classes for using multipart form data from Python, which does not (at the +time of writing) support this directly. + +To use this, make an instance of Multipart and add parts to it via the factory +methods field and file. When you are done, get the content via the get method. + +@author: Stacy Prowell (http://stacyprowell.com) +''' + +import mimetypes + +class Part(object): + ''' + Class holding a single part of the form. You should never need to use + this class directly; instead, use the factory methods in Multipart: + field and file. + ''' + + # The boundary to use. This is shamelessly taken from the standard. + BOUNDARY = '----------AaB03x' + CRLF = '\r\n' + # Common headers. + CONTENT_TYPE = 'Content-Type' + CONTENT_DISPOSITION = 'Content-Disposition' + # The default content type for parts. + DEFAULT_CONTENT_TYPE = 'application/octet-stream' + + def __init__(self, name, filename, body, headers): + ''' + Make a new part. The part will have the given headers added initially. + + @param name: The part name. + @type name: str + @param filename: If this is a file, the name of the file. Otherwise + None. + @type filename: str + @param body: The body of the part. + @type body: str + @param headers: Additional headers, or overrides, for this part. + You can override Content-Type here. + @type headers: dict + ''' + self._headers = headers.copy() + self._name = name + self._filename = filename + self._body = body + # We respect any content type passed in, but otherwise set it here. + # We set the content disposition now, overwriting any prior value. + if self._filename == None: + self._headers[Part.CONTENT_DISPOSITION] = \ + ('form-data; name="%s"' % self._name) + self._headers.setdefault(Part.CONTENT_TYPE, + Part.DEFAULT_CONTENT_TYPE) + else: + self._headers[Part.CONTENT_DISPOSITION] = \ + ('form-data; name="%s"; filename="%s"' % + (self._name, self._filename)) + self._headers.setdefault(Part.CONTENT_TYPE, + mimetypes.guess_type(filename)[0] + or Part.DEFAULT_CONTENT_TYPE) + return + + def get(self): + ''' + Convert the part into a list of lines for output. This includes + the boundary lines, part header lines, and the part itself. A + blank line is included between the header and the body. + + @return: Lines of this part. + @rtype: list + ''' + lines = [] + lines.append('--' + Part.BOUNDARY) + for (key, val) in self._headers.items(): + lines.append('%s: %s' % (key, val)) + lines.append('') + lines.append(self._body) + return lines + +class Multipart(object): + ''' + Encapsulate multipart form data. To use this, make an instance and then + add parts to it via the two methods (field and file). When done, you can + get the result via the get method. + + See http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2 for + details on multipart/form-data. + + Watch http://bugs.python.org/issue3244 to see if this is fixed in the + Python libraries. + + @return: content type, body + @rtype: tuple + ''' + + def __init__(self): + self.parts = [] + return + + def field(self, name, value, headers={}): + ''' + Create and append a field part. This kind of part has a field name + and value. + + @param name: The field name. + @type name: str + @param value: The field value. + @type value: str + @param headers: Headers to set in addition to disposition. + @type headers: dict + ''' + self.parts.append(Part(name, None, value, headers)) + return + + def file(self, name, filename, value, headers={}): + ''' + Create and append a file part. THis kind of part has a field name, + a filename, and a value. + + @param name: The field name. + @type name: str + @param value: The field value. + @type value: str + @param headers: Headers to set in addition to disposition. + @type headers: dict + ''' + self.parts.append(Part(name, filename, value, headers)) + return + + def get(self): + ''' + Get the multipart form data. This returns the content type, which + specifies the boundary marker, and also returns the body containing + all parts and bondary markers. + + @return: content type, body + @rtype: tuple + ''' + all = [] + for part in self.parts: + all += part.get() + all.append('--' + Part.BOUNDARY + '--') + all.append('') + # We have to return the content type, since it specifies the boundary. + content_type = 'multipart/form-data; boundary=%s' % Part.BOUNDARY + return content_type, Part.CRLF.join(all) \ No newline at end of file diff --git a/include/pyconfig.h b/include/pyconfig.h new file mode 100755 index 00000000..dd11699c --- /dev/null +++ b/include/pyconfig.h @@ -0,0 +1,759 @@ +#ifndef Py_CONFIG_H +#define Py_CONFIG_H + +/* pyconfig.h. NOT Generated automatically by configure. + +This is a manually maintained version used for the Watcom, +Borland and Microsoft Visual C++ compilers. It is a +standard part of the Python distribution. + +WINDOWS DEFINES: +The code specific to Windows should be wrapped around one of +the following #defines + +MS_WIN64 - Code specific to the MS Win64 API +MS_WIN32 - Code specific to the MS Win32 (and Win64) API (obsolete, this covers all supported APIs) +MS_WINDOWS - Code specific to Windows, but all versions. +MS_WINCE - Code specific to Windows CE +Py_ENABLE_SHARED - Code if the Python core is built as a DLL. + +Also note that neither "_M_IX86" or "_MSC_VER" should be used for +any purpose other than "Windows Intel x86 specific" and "Microsoft +compiler specific". Therefore, these should be very rare. + + +NOTE: The following symbols are deprecated: +NT, USE_DL_EXPORT, USE_DL_IMPORT, DL_EXPORT, DL_IMPORT +MS_CORE_DLL. + +WIN32 is still required for the locale module. + +*/ + +#ifdef _WIN32_WCE +#define MS_WINCE +#endif + +/* Deprecated USE_DL_EXPORT macro - please use Py_BUILD_CORE */ +#ifdef USE_DL_EXPORT +# define Py_BUILD_CORE +#endif /* USE_DL_EXPORT */ + +/* Visual Studio 2005 introduces deprecation warnings for + "insecure" and POSIX functions. The insecure functions should + be replaced by *_s versions (according to Microsoft); the + POSIX functions by _* versions (which, according to Microsoft, + would be ISO C conforming). Neither renaming is feasible, so + we just silence the warnings. */ + +#ifndef _CRT_SECURE_NO_DEPRECATE +#define _CRT_SECURE_NO_DEPRECATE 1 +#endif +#ifndef _CRT_NONSTDC_NO_DEPRECATE +#define _CRT_NONSTDC_NO_DEPRECATE 1 +#endif + +/* Windows CE does not have these */ +#ifndef MS_WINCE +#define HAVE_IO_H +#define HAVE_SYS_UTIME_H +#define HAVE_TEMPNAM +#define HAVE_TMPFILE +#define HAVE_TMPNAM +#define HAVE_CLOCK +#define HAVE_STRERROR +#endif + +#ifdef HAVE_IO_H +#include +#endif + +#define HAVE_HYPOT +#define HAVE_STRFTIME +#define DONT_HAVE_SIG_ALARM +#define DONT_HAVE_SIG_PAUSE +#define LONG_BIT 32 +#define WORD_BIT 32 +#define PREFIX "" +#define EXEC_PREFIX "" + +#define MS_WIN32 /* only support win32 and greater. */ +#define MS_WINDOWS +#ifndef PYTHONPATH +# define PYTHONPATH ".\\DLLs;.\\lib;.\\lib\\plat-win;.\\lib\\lib-tk" +#endif +#define NT_THREADS +#define WITH_THREAD +#ifndef NETSCAPE_PI +#define USE_SOCKET +#endif + +/* CE6 doesn't have strdup() but _strdup(). Assume the same for earlier versions. */ +#if defined(MS_WINCE) +# include +# define strdup _strdup +#endif + +#ifdef MS_WINCE +/* Windows CE does not support environment variables */ +#define getenv(v) (NULL) +#define environ (NULL) +#endif + +/* Compiler specific defines */ + +/* ------------------------------------------------------------------------*/ +/* Microsoft C defines _MSC_VER */ +#ifdef _MSC_VER + +/* We want COMPILER to expand to a string containing _MSC_VER's *value*. + * This is horridly tricky, because the stringization operator only works + * on macro arguments, and doesn't evaluate macros passed *as* arguments. + * Attempts simpler than the following appear doomed to produce "_MSC_VER" + * literally in the string. + */ +#define _Py_PASTE_VERSION(SUFFIX) \ + ("[MSC v." _Py_STRINGIZE(_MSC_VER) " " SUFFIX "]") +/* e.g., this produces, after compile-time string catenation, + * ("[MSC v.1200 32 bit (Intel)]") + * + * _Py_STRINGIZE(_MSC_VER) expands to + * _Py_STRINGIZE1((_MSC_VER)) expands to + * _Py_STRINGIZE2(_MSC_VER) but as this call is the result of token-pasting + * it's scanned again for macros and so further expands to (under MSVC 6) + * _Py_STRINGIZE2(1200) which then expands to + * "1200" + */ +#define _Py_STRINGIZE(X) _Py_STRINGIZE1((X)) +#define _Py_STRINGIZE1(X) _Py_STRINGIZE2 ## X +#define _Py_STRINGIZE2(X) #X + +/* MSVC defines _WINxx to differentiate the windows platform types + + Note that for compatibility reasons _WIN32 is defined on Win32 + *and* on Win64. For the same reasons, in Python, MS_WIN32 is + defined on Win32 *and* Win64. Win32 only code must therefore be + guarded as follows: + #if defined(MS_WIN32) && !defined(MS_WIN64) + Some modules are disabled on Itanium processors, therefore we + have MS_WINI64 set for those targets, otherwise MS_WINX64 +*/ +#ifdef _WIN64 +#define MS_WIN64 +#endif + +/* set the COMPILER */ +#ifdef MS_WIN64 +#if defined(_M_IA64) +#define COMPILER _Py_PASTE_VERSION("64 bit (Itanium)") +#define MS_WINI64 +#elif defined(_M_X64) || defined(_M_AMD64) +#define COMPILER _Py_PASTE_VERSION("64 bit (AMD64)") +#define MS_WINX64 +#else +#define COMPILER _Py_PASTE_VERSION("64 bit (Unknown)") +#endif +#endif /* MS_WIN64 */ + +/* set the version macros for the windows headers */ +#ifdef MS_WINX64 +/* 64 bit only runs on XP or greater */ +#define Py_WINVER _WIN32_WINNT_WINXP +#define Py_NTDDI NTDDI_WINXP +#else +/* Python 2.6+ requires Windows 2000 or greater */ +#ifdef _WIN32_WINNT_WIN2K +#define Py_WINVER _WIN32_WINNT_WIN2K +#else +#define Py_WINVER 0x0500 +#endif +#define Py_NTDDI NTDDI_WIN2KSP4 +#endif + +/* We only set these values when building Python - we don't want to force + these values on extensions, as that will affect the prototypes and + structures exposed in the Windows headers. Even when building Python, we + allow a single source file to override this - they may need access to + structures etc so it can optionally use new Windows features if it + determines at runtime they are available. +*/ +#if defined(Py_BUILD_CORE) || defined(Py_BUILD_CORE_MODULE) +#ifndef NTDDI_VERSION +#define NTDDI_VERSION Py_NTDDI +#endif +#ifndef WINVER +#define WINVER Py_WINVER +#endif +#ifndef _WIN32_WINNT +#define _WIN32_WINNT Py_WINVER +#endif +#endif + +/* _W64 is not defined for VC6 or eVC4 */ +#ifndef _W64 +#define _W64 +#endif + +/* Define like size_t, omitting the "unsigned" */ +#ifdef MS_WIN64 +typedef __int64 ssize_t; +#else +typedef _W64 int ssize_t; +#endif +#define HAVE_SSIZE_T 1 + +#if defined(MS_WIN32) && !defined(MS_WIN64) +#ifdef _M_IX86 +#define COMPILER _Py_PASTE_VERSION("32 bit (Intel)") +#else +#define COMPILER _Py_PASTE_VERSION("32 bit (Unknown)") +#endif +#endif /* MS_WIN32 && !MS_WIN64 */ + +typedef int pid_t; + +#include +#define Py_IS_NAN _isnan +#define Py_IS_INFINITY(X) (!_finite(X) && !_isnan(X)) +#define Py_IS_FINITE(X) _finite(X) +#define copysign _copysign +#define hypot _hypot + +#endif /* _MSC_VER */ + +/* define some ANSI types that are not defined in earlier Win headers */ +#if defined(_MSC_VER) && _MSC_VER >= 1200 +/* This file only exists in VC 6.0 or higher */ +#include +#endif + +/* ------------------------------------------------------------------------*/ +/* The Borland compiler defines __BORLANDC__ */ +/* XXX These defines are likely incomplete, but should be easy to fix. */ +#ifdef __BORLANDC__ +#define COMPILER "[Borland]" + +#ifdef _WIN32 +/* tested with BCC 5.5 (__BORLANDC__ >= 0x0550) + */ + +typedef int pid_t; +/* BCC55 seems to understand __declspec(dllimport), it is used in its + own header files (winnt.h, ...) - so we can do nothing and get the default*/ + +#undef HAVE_SYS_UTIME_H +#define HAVE_UTIME_H +#define HAVE_DIRENT_H + +/* rename a few functions for the Borland compiler */ +#include +#define _chsize chsize +#define _setmode setmode + +#else /* !_WIN32 */ +#error "Only Win32 and later are supported" +#endif /* !_WIN32 */ + +#endif /* BORLANDC */ + +/* ------------------------------------------------------------------------*/ +/* egcs/gnu-win32 defines __GNUC__ and _WIN32 */ +#if defined(__GNUC__) && defined(_WIN32) +/* XXX These defines are likely incomplete, but should be easy to fix. + They should be complete enough to build extension modules. */ +/* Suggested by Rene Liebscher to avoid a GCC 2.91.* + bug that requires structure imports. More recent versions of the + compiler don't exhibit this bug. +*/ +#if (__GNUC__==2) && (__GNUC_MINOR__<=91) +#warning "Please use an up-to-date version of gcc! (>2.91 recommended)" +#endif + +#define COMPILER "[gcc]" +#define hypot _hypot +#define PY_LONG_LONG long long +#define PY_LLONG_MIN LLONG_MIN +#define PY_LLONG_MAX LLONG_MAX +#define PY_ULLONG_MAX ULLONG_MAX +#endif /* GNUC */ + +/* ------------------------------------------------------------------------*/ +/* lcc-win32 defines __LCC__ */ +#if defined(__LCC__) +/* XXX These defines are likely incomplete, but should be easy to fix. + They should be complete enough to build extension modules. */ + +#define COMPILER "[lcc-win32]" +typedef int pid_t; +/* __declspec() is supported here too - do nothing to get the defaults */ + +#endif /* LCC */ + +/* ------------------------------------------------------------------------*/ +/* End of compilers - finish up */ + +#ifndef NO_STDIO_H +# include +#endif + +/* 64 bit ints are usually spelt __int64 unless compiler has overridden */ +#define HAVE_LONG_LONG 1 +#ifndef PY_LONG_LONG +# define PY_LONG_LONG __int64 +# define PY_LLONG_MAX _I64_MAX +# define PY_LLONG_MIN _I64_MIN +# define PY_ULLONG_MAX _UI64_MAX +#endif + +/* For Windows the Python core is in a DLL by default. Test +Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ +#if !defined(MS_NO_COREDLL) && !defined(Py_NO_ENABLE_SHARED) +# define Py_ENABLE_SHARED 1 /* standard symbol for shared library */ +# define MS_COREDLL /* deprecated old symbol */ +#endif /* !MS_NO_COREDLL && ... */ + +/* All windows compilers that use this header support __declspec */ +#define HAVE_DECLSPEC_DLL + +/* For an MSVC DLL, we can nominate the .lib files used by extensions */ +#ifdef MS_COREDLL +# ifndef Py_BUILD_CORE /* not building the core - must be an ext */ +# if defined(_MSC_VER) + /* So MSVC users need not specify the .lib file in + their Makefile (other compilers are generally + taken care of by distutils.) */ +# ifdef _DEBUG +# pragma comment(lib,"python27_d.lib") +# else +# pragma comment(lib,"python27.lib") +# endif /* _DEBUG */ +# endif /* _MSC_VER */ +# endif /* Py_BUILD_CORE */ +#endif /* MS_COREDLL */ + +#if defined(MS_WIN64) +/* maintain "win32" sys.platform for backward compatibility of Python code, + the Win64 API should be close enough to the Win32 API to make this + preferable */ +# define PLATFORM "win32" +# define SIZEOF_VOID_P 8 +# define SIZEOF_TIME_T 8 +# define SIZEOF_OFF_T 4 +# define SIZEOF_FPOS_T 8 +# define SIZEOF_HKEY 8 +# define SIZEOF_SIZE_T 8 +/* configure.in defines HAVE_LARGEFILE_SUPPORT iff HAVE_LONG_LONG, + sizeof(off_t) > sizeof(long), and sizeof(PY_LONG_LONG) >= sizeof(off_t). + On Win64 the second condition is not true, but if fpos_t replaces off_t + then this is true. The uses of HAVE_LARGEFILE_SUPPORT imply that Win64 + should define this. */ +# define HAVE_LARGEFILE_SUPPORT +#elif defined(MS_WIN32) +# define PLATFORM "win32" +# define HAVE_LARGEFILE_SUPPORT +# define SIZEOF_VOID_P 4 +# define SIZEOF_OFF_T 4 +# define SIZEOF_FPOS_T 8 +# define SIZEOF_HKEY 4 +# define SIZEOF_SIZE_T 4 + /* MS VS2005 changes time_t to an 64-bit type on all platforms */ +# if defined(_MSC_VER) && _MSC_VER >= 1400 +# define SIZEOF_TIME_T 8 +# else +# define SIZEOF_TIME_T 4 +# endif +#endif + +#ifdef _DEBUG +# define Py_DEBUG +#endif + + +#ifdef MS_WIN32 + +#define SIZEOF_SHORT 2 +#define SIZEOF_INT 4 +#define SIZEOF_LONG 4 +#define SIZEOF_LONG_LONG 8 +#define SIZEOF_DOUBLE 8 +#define SIZEOF_FLOAT 4 + +/* VC 7.1 has them and VC 6.0 does not. VC 6.0 has a version number of 1200. + Microsoft eMbedded Visual C++ 4.0 has a version number of 1201 and doesn't + define these. + If some compiler does not provide them, modify the #if appropriately. */ +#if defined(_MSC_VER) +#if _MSC_VER > 1300 +#define HAVE_UINTPTR_T 1 +#define HAVE_INTPTR_T 1 +#else +/* VC6, VS 2002 and eVC4 don't support the C99 LL suffix for 64-bit integer literals */ +#define Py_LL(x) x##I64 +#endif /* _MSC_VER > 1200 */ +#endif /* _MSC_VER */ + +#endif + +/* define signed and unsigned exact-width 32-bit and 64-bit types, used in the + implementation of Python long integers. */ +#ifndef PY_UINT32_T +#if SIZEOF_INT == 4 +#define HAVE_UINT32_T 1 +#define PY_UINT32_T unsigned int +#elif SIZEOF_LONG == 4 +#define HAVE_UINT32_T 1 +#define PY_UINT32_T unsigned long +#endif +#endif + +#ifndef PY_UINT64_T +#if SIZEOF_LONG_LONG == 8 +#define HAVE_UINT64_T 1 +#define PY_UINT64_T unsigned PY_LONG_LONG +#endif +#endif + +#ifndef PY_INT32_T +#if SIZEOF_INT == 4 +#define HAVE_INT32_T 1 +#define PY_INT32_T int +#elif SIZEOF_LONG == 4 +#define HAVE_INT32_T 1 +#define PY_INT32_T long +#endif +#endif + +#ifndef PY_INT64_T +#if SIZEOF_LONG_LONG == 8 +#define HAVE_INT64_T 1 +#define PY_INT64_T PY_LONG_LONG +#endif +#endif + +/* Fairly standard from here! */ + +/* Define to 1 if you have the `copysign' function. */ +#define HAVE_COPYSIGN 1 + +/* Define to 1 if you have the `isinf' macro. */ +#define HAVE_DECL_ISINF 1 + +/* Define to 1 if you have the `isnan' function. */ +#define HAVE_DECL_ISNAN 1 + +/* Define if on AIX 3. + System headers sometimes define this. + We just want to avoid a redefinition error message. */ +#ifndef _ALL_SOURCE +/* #undef _ALL_SOURCE */ +#endif + +/* Define to empty if the keyword does not work. */ +/* #define const */ + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_CONIO_H 1 +#endif + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_DIRECT_H 1 +#endif + +/* Define if you have dirent.h. */ +/* #define DIRENT 1 */ + +/* Define to the type of elements in the array set by `getgroups'. + Usually this is either `int' or `gid_t'. */ +/* #undef GETGROUPS_T */ + +/* Define to `int' if doesn't define. */ +/* #undef gid_t */ + +/* Define if your struct tm has tm_zone. */ +/* #undef HAVE_TM_ZONE */ + +/* Define if you don't have tm_zone but do have the external array + tzname. */ +#define HAVE_TZNAME + +/* Define to `int' if doesn't define. */ +/* #undef mode_t */ + +/* Define if you don't have dirent.h, but have ndir.h. */ +/* #undef NDIR */ + +/* Define to `long' if doesn't define. */ +/* #undef off_t */ + +/* Define to `int' if doesn't define. */ +/* #undef pid_t */ + +/* Define if the system does not provide POSIX.1 features except + with this defined. */ +/* #undef _POSIX_1_SOURCE */ + +/* Define if you need to in order for stat and other things to work. */ +/* #undef _POSIX_SOURCE */ + +/* Define as the return type of signal handlers (int or void). */ +#define RETSIGTYPE void + +/* Define to `unsigned' if doesn't define. */ +/* #undef size_t */ + +/* Define if you have the ANSI C header files. */ +#define STDC_HEADERS 1 + +/* Define if you don't have dirent.h, but have sys/dir.h. */ +/* #undef SYSDIR */ + +/* Define if you don't have dirent.h, but have sys/ndir.h. */ +/* #undef SYSNDIR */ + +/* Define if you can safely include both and . */ +/* #undef TIME_WITH_SYS_TIME */ + +/* Define if your declares struct tm. */ +/* #define TM_IN_SYS_TIME 1 */ + +/* Define to `int' if doesn't define. */ +/* #undef uid_t */ + +/* Define if the closedir function returns void instead of int. */ +/* #undef VOID_CLOSEDIR */ + +/* Define if getpgrp() must be called as getpgrp(0) + and (consequently) setpgrp() as setpgrp(0, 0). */ +/* #undef GETPGRP_HAVE_ARGS */ + +/* Define this if your time.h defines altzone */ +/* #define HAVE_ALTZONE */ + +/* Define if you have the putenv function. */ +#ifndef MS_WINCE +#define HAVE_PUTENV +#endif + +/* Define if your compiler supports function prototypes */ +#define HAVE_PROTOTYPES + +/* Define if you can safely include both and + (which you can't on SCO ODT 3.0). */ +/* #undef SYS_SELECT_WITH_SYS_TIME */ + +/* Define if you want documentation strings in extension modules */ +#define WITH_DOC_STRINGS 1 + +/* Define if you want to compile in rudimentary thread support */ +/* #undef WITH_THREAD */ + +/* Define if you want to use the GNU readline library */ +/* #define WITH_READLINE 1 */ + +/* Define if you want to have a Unicode type. */ +#define Py_USING_UNICODE + +/* Define as the size of the unicode type. */ +/* This is enough for unicodeobject.h to do the "right thing" on Windows. */ +#define Py_UNICODE_SIZE 2 + +/* Use Python's own small-block memory-allocator. */ +#define WITH_PYMALLOC 1 + +/* Define if you have clock. */ +/* #define HAVE_CLOCK */ + +/* Define when any dynamic module loading is enabled */ +#define HAVE_DYNAMIC_LOADING + +/* Define if you have ftime. */ +#ifndef MS_WINCE +#define HAVE_FTIME +#endif + +/* Define if you have getpeername. */ +#define HAVE_GETPEERNAME + +/* Define if you have getpgrp. */ +/* #undef HAVE_GETPGRP */ + +/* Define if you have getpid. */ +#ifndef MS_WINCE +#define HAVE_GETPID +#endif + +/* Define if you have gettimeofday. */ +/* #undef HAVE_GETTIMEOFDAY */ + +/* Define if you have getwd. */ +/* #undef HAVE_GETWD */ + +/* Define if you have lstat. */ +/* #undef HAVE_LSTAT */ + +/* Define if you have the mktime function. */ +#define HAVE_MKTIME + +/* Define if you have nice. */ +/* #undef HAVE_NICE */ + +/* Define if you have readlink. */ +/* #undef HAVE_READLINK */ + +/* Define if you have select. */ +/* #undef HAVE_SELECT */ + +/* Define if you have setpgid. */ +/* #undef HAVE_SETPGID */ + +/* Define if you have setpgrp. */ +/* #undef HAVE_SETPGRP */ + +/* Define if you have setsid. */ +/* #undef HAVE_SETSID */ + +/* Define if you have setvbuf. */ +#define HAVE_SETVBUF + +/* Define if you have siginterrupt. */ +/* #undef HAVE_SIGINTERRUPT */ + +/* Define if you have symlink. */ +/* #undef HAVE_SYMLINK */ + +/* Define if you have tcgetpgrp. */ +/* #undef HAVE_TCGETPGRP */ + +/* Define if you have tcsetpgrp. */ +/* #undef HAVE_TCSETPGRP */ + +/* Define if you have times. */ +/* #undef HAVE_TIMES */ + +/* Define if you have uname. */ +/* #undef HAVE_UNAME */ + +/* Define if you have waitpid. */ +/* #undef HAVE_WAITPID */ + +/* Define to 1 if you have the `wcscoll' function. */ +#ifndef MS_WINCE +#define HAVE_WCSCOLL 1 +#endif + +/* Define if the zlib library has inflateCopy */ +#define HAVE_ZLIB_COPY 1 + +/* Define if you have the header file. */ +/* #undef HAVE_DLFCN_H */ + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_ERRNO_H 1 +#endif + +/* Define if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_FCNTL_H 1 +#endif + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_PROCESS_H 1 +#endif + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_SIGNAL_H 1 +#endif + +/* Define if you have the prototypes. */ +#define HAVE_STDARG_PROTOTYPES + +/* Define if you have the header file. */ +#define HAVE_STDDEF_H 1 + +/* Define if you have the header file. */ +/* #undef HAVE_SYS_AUDIOIO_H */ + +/* Define if you have the header file. */ +/* #define HAVE_SYS_PARAM_H 1 */ + +/* Define if you have the header file. */ +/* #define HAVE_SYS_SELECT_H 1 */ + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_SYS_STAT_H 1 +#endif + +/* Define if you have the header file. */ +/* #define HAVE_SYS_TIME_H 1 */ + +/* Define if you have the header file. */ +/* #define HAVE_SYS_TIMES_H 1 */ + +/* Define to 1 if you have the header file. */ +#ifndef MS_WINCE +#define HAVE_SYS_TYPES_H 1 +#endif + +/* Define if you have the header file. */ +/* #define HAVE_SYS_UN_H 1 */ + +/* Define if you have the header file. */ +/* #define HAVE_SYS_UTIME_H 1 */ + +/* Define if you have the header file. */ +/* #define HAVE_SYS_UTSNAME_H 1 */ + +/* Define if you have the header file. */ +/* #undef HAVE_THREAD_H */ + +/* Define if you have the header file. */ +/* #define HAVE_UNISTD_H 1 */ + +/* Define if you have the header file. */ +/* #define HAVE_UTIME_H 1 */ + +/* Define if the compiler provides a wchar.h header file. */ +#define HAVE_WCHAR_H 1 + +/* Define if you have the dl library (-ldl). */ +/* #undef HAVE_LIBDL */ + +/* Define if you have the mpc library (-lmpc). */ +/* #undef HAVE_LIBMPC */ + +/* Define if you have the nsl library (-lnsl). */ +#define HAVE_LIBNSL 1 + +/* Define if you have the seq library (-lseq). */ +/* #undef HAVE_LIBSEQ */ + +/* Define if you have the socket library (-lsocket). */ +#define HAVE_LIBSOCKET 1 + +/* Define if you have the sun library (-lsun). */ +/* #undef HAVE_LIBSUN */ + +/* Define if you have the termcap library (-ltermcap). */ +/* #undef HAVE_LIBTERMCAP */ + +/* Define if you have the termlib library (-ltermlib). */ +/* #undef HAVE_LIBTERMLIB */ + +/* Define if you have the thread library (-lthread). */ +/* #undef HAVE_LIBTHREAD */ + +/* WinSock does not use a bitmask in select, and uses + socket handles greater than FD_SETSIZE */ +#define Py_SOCKET_FD_CAN_BE_GE_FD_SETSIZE + +/* Define if C doubles are 64-bit IEEE 754 binary format, stored with the + least significant byte first */ +#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754 1 + +#endif /* !Py_CONFIG_H */ diff --git a/server.pyw b/server.pyw new file mode 100755 index 00000000..1bc2d129 --- /dev/null +++ b/server.pyw @@ -0,0 +1,26 @@ +# This program is free software. It comes without any warranty, to +# the extent permitted by applicable law. You can redistribute it +# and/or modify it under the terms of the Do What The Fuck You Want +# To Public License, Version 2, as published by Sam Hocevar. See +# http://sam.zoy.org/wtfpl/COPYING for more details. + +import os +from include import HydrusConstants as HC +from include import ServerController + +try: + + app = ServerController.Controller( True, HC.LOGS_DIR + os.path.sep + 'server.log' ) + + app.MainLoop() + +except: + + import traceback + print( traceback.format_exc() ) + + +try: HC.shutdown = True +except: pass + +HC.pubsub.pubimmediate( 'shutdown' ) diff --git a/static/Icons.svg b/static/Icons.svg new file mode 100755 index 00000000..87501f19 --- /dev/null +++ b/static/Icons.svg @@ -0,0 +1,326 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + L + + ψ + + ψ + ψ + + ψ + + ψ + + diff --git a/static/collection.png b/static/collection.png new file mode 100755 index 00000000..d9591c13 Binary files /dev/null and b/static/collection.png differ diff --git a/static/contact - hydrus admin.yaml b/static/contact - hydrus admin.yaml new file mode 100755 index 00000000..3a799f11 --- /dev/null +++ b/static/contact - hydrus admin.yaml @@ -0,0 +1,18 @@ +!Contact {_host: 98.214.1.156, _name: hydrus admin, _port: 45873, _public_key: '-----BEGIN + PUBLIC KEY----- + + MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq0CX3+CPRx/rLrQ+AFW2 + + 5Ta0lWIhdzp84vvgDv/WJwoufdmZ4v/C01qzRerznsSeu0ak2KijcY19AqNJp7tt + + VxHYMqnuCQ7Hsw7unAEkz4bq7J4oK+Kmzrm6OqYbFxni00jT1WpQOwTkBuyg3/XD + + Gi1u3awyRqIUTAzkrf4yPsODyQSJS2g+OwFEcyzfk3bvfhJbD4rcO+eCiHh+d/lv + + KZ+K6IyqXVcQ+8jQkciPKqyfVTepP7KubT/gKLwM70v3/XSArL3lhxYEnqkwbYVe + + x/i2wvxwGreHw6TYBCNqLt+S6pUSJ+3+TWbTbF2rKJYj6aWcUreborAXyfoVPNeV + + /wIDAQAB + + -----END PUBLIC KEY-----'} diff --git a/static/cross.ico b/static/cross.ico new file mode 100755 index 00000000..fcf431de Binary files /dev/null and b/static/cross.ico differ diff --git a/static/default imageboards.yaml b/static/default imageboards.yaml new file mode 100755 index 00000000..04712f13 --- /dev/null +++ b/static/default imageboards.yaml @@ -0,0 +1,865 @@ +4chan: +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /w/ + _post_url: http://sys.4chan.org/w/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /h/ + _post_url: http://sys.4chan.org/h/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /u/ + _post_url: http://sys.4chan.org/u/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /mlp/ + _post_url: http://sys.4chan.org/mlp/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /fit/ + _post_url: http://sys.4chan.org/fit/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /tg/ + _post_url: http://sys.4chan.org/tg/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /an/ + _post_url: http://sys.4chan.org/an/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /s/ + _post_url: http://sys.4chan.org/s/post + _restrictions: + 2: 8388608 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /d/ + _post_url: http://sys.4chan.org/d/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /trv/ + _post_url: http://sys.4chan.org/trv/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /n/ + _post_url: http://sys.4chan.org/n/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /y/ + _post_url: http://sys.4chan.org/y/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /jp/ + _post_url: http://sys.4chan.org/jp/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /c/ + _post_url: http://sys.4chan.org/c/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /a/ + _post_url: http://sys.4chan.org/a/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /cgl/ + _post_url: http://sys.4chan.org/cgl/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /wg/ + _post_url: http://sys.4chan.org/wg/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /k/ + _post_url: http://sys.4chan.org/k/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /r9k/ + _post_url: http://sys.4chan.org/r9k/post + _restrictions: + 2: 2097152 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /tv/ + _post_url: http://sys.4chan.org/tv/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', false] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', false] + _name: /soc/ + _post_url: http://sys.4chan.org/soc/post + _restrictions: + 2: 2097152 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /toy/ + _post_url: http://sys.4chan.org/toy/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /gif/ + _post_url: http://sys.4chan.org/gif/post + _restrictions: + 2: 4194304 + 3: [3] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /co/ + _post_url: http://sys.4chan.org/co/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /pol/ + _post_url: http://sys.4chan.org/pol/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /diy/ + _post_url: http://sys.4chan.org/diy/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /ck/ + _post_url: http://sys.4chan.org/ck/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /sci/ + _post_url: http://sys.4chan.org/sci/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /o/ + _post_url: http://sys.4chan.org/o/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /fa/ + _post_url: http://sys.4chan.org/fa/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /m/ + _post_url: http://sys.4chan.org/m/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /hc/ + _post_url: http://sys.4chan.org/hc/post + _restrictions: + 2: 8388608 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /g/ + _post_url: http://sys.4chan.org/g/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /e/ + _post_url: http://sys.4chan.org/e/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /3/ + _post_url: http://sys.4chan.org/3/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /p/ + _post_url: http://sys.4chan.org/p/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /v/ + _post_url: http://sys.4chan.org/v/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /cm/ + _post_url: http://sys.4chan.org/cm/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', false] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', false] + _name: /b/ + _post_url: http://sys.4chan.org/b/post + _restrictions: + 2: 2097152 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /po/ + _post_url: http://sys.4chan.org/po/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /mu/ + _post_url: http://sys.4chan.org/mu/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /vp/ + _post_url: http://sys.4chan.org/vp/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /int/ + _post_url: http://sys.4chan.org/int/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /hm/ + _post_url: http://sys.4chan.org/hm/post + _restrictions: + 2: 8388608 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /adv/ + _post_url: http://sys.4chan.org/adv/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /x/ + _post_url: http://sys.4chan.org/x/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /sp/ + _post_url: http://sys.4chan.org/sp/post + _restrictions: + 2: 4194304 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /vg/ + _post_url: http://sys.4chan.org/vg/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [spoiler/on, 3, 'False', true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /lit/ + _post_url: http://sys.4chan.org/lit/post + _restrictions: + 2: 3145728 + 3: [3, 2, 1] +- !Imageboard + _flood_time: 75 + _form_fields: + - [sub, 2, '', true] + - [recaptcha_response_field, 0, 6Ldp2bsSAAAAAAJ5uyx_lx34lJeEpTLVkP5k04qc, true] + - [pwd, 6, '', true] + - [resto, 5, thread_id, true] + - [mode, 2, regist, false] + - [upfile, 4, '', true] + - [com, 1, '', true] + - [email, 2, '', true] + - [name, 2, '', true] + _name: /hr/ + _post_url: http://sys.4chan.org/hr/post + _restrictions: + 0: [700, 700] + 1: [10000, 10000] + 2: 8388608 + 3: [3, 2, 1] diff --git a/static/downloading.png b/static/downloading.png new file mode 100755 index 00000000..c25545d3 Binary files /dev/null and b/static/downloading.png differ diff --git a/static/dump_fail.png b/static/dump_fail.png new file mode 100755 index 00000000..c37bd062 Binary files /dev/null and b/static/dump_fail.png differ diff --git a/static/dump_ok.png b/static/dump_ok.png new file mode 100755 index 00000000..89c8129a Binary files /dev/null and b/static/dump_ok.png differ diff --git a/static/dump_recoverable.png b/static/dump_recoverable.png new file mode 100755 index 00000000..628cf2da Binary files /dev/null and b/static/dump_recoverable.png differ diff --git a/static/e621.yaml b/static/e621.yaml new file mode 100755 index 00000000..b54efe8b --- /dev/null +++ b/static/e621.yaml @@ -0,0 +1,10 @@ +!Booru +_gallery_advance_num: 1 +_image_data: Download +_image_id: null +_name: e621 +_search_separator: '%20' +_search_url: http://e621.net/post/index?page=%index%&tags=%tags% +_tag_classnames_to_namespaces: {tag-type-artist: creator, tag-type-character: character, + tag-type-copyright: series, tag-type-general: ''} +_thumb_classname: thumb blacklisted diff --git a/static/file_repository_pending.png b/static/file_repository_pending.png new file mode 100755 index 00000000..8dc48607 Binary files /dev/null and b/static/file_repository_pending.png differ diff --git a/static/file_repository_pending_small.png b/static/file_repository_pending_small.png new file mode 100755 index 00000000..7511f20b Binary files /dev/null and b/static/file_repository_pending_small.png differ diff --git a/static/file_repository_petitioned.png b/static/file_repository_petitioned.png new file mode 100755 index 00000000..da1ca42d Binary files /dev/null and b/static/file_repository_petitioned.png differ diff --git a/static/file_repository_petitioned_small.png b/static/file_repository_petitioned_small.png new file mode 100755 index 00000000..d67420f1 Binary files /dev/null and b/static/file_repository_petitioned_small.png differ diff --git a/static/file_repository_small.png b/static/file_repository_small.png new file mode 100755 index 00000000..94acd4cc Binary files /dev/null and b/static/file_repository_small.png differ diff --git a/static/flash.png b/static/flash.png new file mode 100755 index 00000000..48c780ce Binary files /dev/null and b/static/flash.png differ diff --git a/static/flv.png b/static/flv.png new file mode 100755 index 00000000..2b0479c7 Binary files /dev/null and b/static/flv.png differ diff --git a/static/furry@booru.org.yaml b/static/furry@booru.org.yaml new file mode 100755 index 00000000..812250d5 --- /dev/null +++ b/static/furry@booru.org.yaml @@ -0,0 +1,10 @@ +!Booru +_gallery_advance_num: 25 +_image_data: Original image +_image_id: null +_name: furry@booru.org +_search_separator: + +_search_url: http://furry.booru.org/index.php?page=post&s=list&tags=%tags%&pid=%index% +_tag_classnames_to_namespaces: {tag-type-artist: creator, tag-type-character: character, + tag-type-copyright: series, tag-type-general: ''} +_thumb_classname: thumb diff --git a/static/gelbooru.yaml b/static/gelbooru.yaml new file mode 100755 index 00000000..30df369b --- /dev/null +++ b/static/gelbooru.yaml @@ -0,0 +1,10 @@ +!Booru +_gallery_advance_num: 28 +_image_data: Original image +_image_id: null +_name: gelbooru +_search_separator: + +_search_url: http://gelbooru.com/index.php?page=post&s=list&tags=%tags%&pid=%index% +_tag_classnames_to_namespaces: {tag-type-artist: creator, tag-type-character: character, + tag-type-copyright: series, tag-type-general: ''} +_thumb_classname: thumb diff --git a/static/hydrus.ico b/static/hydrus.ico new file mode 100755 index 00000000..6df08155 Binary files /dev/null and b/static/hydrus.ico differ diff --git a/static/hydrus.png b/static/hydrus.png new file mode 100755 index 00000000..afa6444d Binary files /dev/null and b/static/hydrus.png differ diff --git a/static/hydrus_splash.png b/static/hydrus_splash.png new file mode 100755 index 00000000..e84b5daf Binary files /dev/null and b/static/hydrus_splash.png differ diff --git a/static/inbox.png b/static/inbox.png new file mode 100755 index 00000000..7348aed7 Binary files /dev/null and b/static/inbox.png differ diff --git a/static/mishimmie.yaml b/static/mishimmie.yaml new file mode 100755 index 00000000..cf4b36b6 --- /dev/null +++ b/static/mishimmie.yaml @@ -0,0 +1,9 @@ +!Booru +_gallery_advance_num: 1 +_image_data: null +_image_id: main_image +_name: mishimmie +_search_separator: '%20' +_search_url: http://shimmie.katawa-shoujo.com/post/list/%tags%/%index% +_tag_classnames_to_namespaces: {tag_name: ''} +_thumb_classname: thumb diff --git a/static/player_flv_maxi_1.6.0.swf b/static/player_flv_maxi_1.6.0.swf new file mode 100755 index 00000000..01ec373b Binary files /dev/null and b/static/player_flv_maxi_1.6.0.swf differ diff --git a/static/rule34@booru.org.yaml b/static/rule34@booru.org.yaml new file mode 100755 index 00000000..e45419a4 --- /dev/null +++ b/static/rule34@booru.org.yaml @@ -0,0 +1,9 @@ +!Booru +_gallery_advance_num: 25 +_image_data: Original image +_image_id: null +_name: rule34@booru.org +_search_separator: '%20' +_search_url: http://rule34.xxx/index.php?page=post&s=list&tags=%tags%&pid=%index% +_tag_classnames_to_namespaces: {tag-type-general: ''} +_thumb_classname: thumb diff --git a/static/safebooru.yaml b/static/safebooru.yaml new file mode 100755 index 00000000..21956d1f --- /dev/null +++ b/static/safebooru.yaml @@ -0,0 +1,10 @@ +!Booru +_gallery_advance_num: 25 +_image_data: Original image +_image_id: null +_name: safebooru +_search_separator: + +_search_url: http://safebooru.org/index.php?page=post&s=list&tags=%tags%&pid=%index% +_tag_classnames_to_namespaces: {tag-type-artist: creator, tag-type-character: character, + tag-type-copyright: series, tag-type-general: ''} +_thumb_classname: thumb diff --git a/static/silk icons/color_swatch.png b/static/silk icons/color_swatch.png new file mode 100755 index 00000000..6e6e8521 Binary files /dev/null and b/static/silk icons/color_swatch.png differ diff --git a/static/silk icons/font.png b/static/silk icons/font.png new file mode 100755 index 00000000..b7960db9 Binary files /dev/null and b/static/silk icons/font.png differ diff --git a/static/silk icons/link.png b/static/silk icons/link.png new file mode 100755 index 00000000..25eacb7c Binary files /dev/null and b/static/silk icons/link.png differ diff --git a/static/silk icons/link_break.png b/static/silk icons/link_break.png new file mode 100755 index 00000000..52357530 Binary files /dev/null and b/static/silk icons/link_break.png differ diff --git a/static/silk icons/text_align_center.png b/static/silk icons/text_align_center.png new file mode 100755 index 00000000..57beb381 Binary files /dev/null and b/static/silk icons/text_align_center.png differ diff --git a/static/silk icons/text_align_justify.png b/static/silk icons/text_align_justify.png new file mode 100755 index 00000000..2fbdd692 Binary files /dev/null and b/static/silk icons/text_align_justify.png differ diff --git a/static/silk icons/text_align_left.png b/static/silk icons/text_align_left.png new file mode 100755 index 00000000..6c8fcc11 Binary files /dev/null and b/static/silk icons/text_align_left.png differ diff --git a/static/silk icons/text_align_right.png b/static/silk icons/text_align_right.png new file mode 100755 index 00000000..a1502571 Binary files /dev/null and b/static/silk icons/text_align_right.png differ diff --git a/static/silk icons/text_bold.png b/static/silk icons/text_bold.png new file mode 100755 index 00000000..889ae80e Binary files /dev/null and b/static/silk icons/text_bold.png differ diff --git a/static/silk icons/text_indent.png b/static/silk icons/text_indent.png new file mode 100755 index 00000000..93645323 Binary files /dev/null and b/static/silk icons/text_indent.png differ diff --git a/static/silk icons/text_indent_remove.png b/static/silk icons/text_indent_remove.png new file mode 100755 index 00000000..1651b074 Binary files /dev/null and b/static/silk icons/text_indent_remove.png differ diff --git a/static/silk icons/text_italic.png b/static/silk icons/text_italic.png new file mode 100755 index 00000000..8482ac8c Binary files /dev/null and b/static/silk icons/text_italic.png differ diff --git a/static/silk icons/text_underline.png b/static/silk icons/text_underline.png new file mode 100755 index 00000000..90d0df28 Binary files /dev/null and b/static/silk icons/text_underline.png differ diff --git a/static/the culture.swf b/static/the culture.swf new file mode 100755 index 00000000..3f0d02a5 Binary files /dev/null and b/static/the culture.swf differ diff --git a/static/transparent.png b/static/transparent.png new file mode 100755 index 00000000..cee0658e Binary files /dev/null and b/static/transparent.png differ diff --git a/static/tumblr.png b/static/tumblr.png new file mode 100755 index 00000000..88ad3f97 Binary files /dev/null and b/static/tumblr.png differ diff --git a/static/twitter.png b/static/twitter.png new file mode 100755 index 00000000..f92b5c5e Binary files /dev/null and b/static/twitter.png differ diff --git a/static/xbooru.yaml b/static/xbooru.yaml new file mode 100755 index 00000000..252292a4 --- /dev/null +++ b/static/xbooru.yaml @@ -0,0 +1,10 @@ +!Booru +_gallery_advance_num: 25 +_image_data: Original image +_image_id: null +_name: xbooru +_search_separator: + +_search_url: http://xbooru.com/index.php?page=post&s=list&tags=%tags%&pid=%index% +_tag_classnames_to_namespaces: {tag-type-artist: creator, tag-type-character: character, + tag-type-copyright: series, tag-type-general: ''} +_thumb_classname: thumb