Version 477

This is the week release, untested. Committing to this branch just to do a test build tonight.
This commit is contained in:
Hydrus Network Developer 2022-03-15 21:52:54 -05:00
parent 7561fa357b
commit 2ea848588c
28 changed files with 665 additions and 120 deletions

View File

@ -1,59 +0,0 @@
import requests
import cbor2
import base64
import json
import urllib.parse
hydrus_api_url = "http://localhost:45888"
metadata = hydrus_api_url+"/get_files/file_metadata"
del_note = hydrus_api_url+"/add_notes/delete_notes"
set_note = hydrus_api_url+"/add_notes/set_notes"
search = hydrus_api_url+"/get_files/search_files"
hsh="1b625544bcfbd7151000a816e6db6388ba0ef4dc3a664b62e2cb4e9d3036bed8"
key="222f3c82f4f7e8ce57747ff1cccfaf7014357dc509cdb77af20ff910c26ea05b"
# search for notes
print(json.loads((requests.get(url = search, params = {
"Hydrus-Client-API-Access-Key": key,
"tags": urllib.parse.quote("[\"system:has notes\"]")
}).text)))
# retrieve notes
print(json.loads((requests.get(url = metadata, params = {
"Hydrus-Client-API-Access-Key": key,
"include_notes": "true",
"hashes": urllib.parse.quote("[\""+hsh+"\"]")
}).text))["metadata"][0]["notes"])
# retrieve notes, request that the response is CBOR encoded
print(cbor2.loads((requests.get(url = metadata, params = {
"Hydrus-Client-API-Access-Key": key,
"include_notes": base64.urlsafe_b64encode(cbor2.dumps(True)),
"hashes": base64.urlsafe_b64encode(cbor2.dumps([hsh])),
"cbor": ""
}).content))["metadata"][0]["notes"])
# Add notes
headers = {"Hydrus-Client-API-Access-Key": key, "Content-Type": "application/json"}
print(requests.post(url = set_note, headers = headers, data = json.dumps({
"notes": {"note1":"content1", "note2":"content2"},
"hash": hsh
})))
# Delete notes
headers = {"Hydrus-Client-API-Access-Key": key, "Content-Type": "application/json"}
print(requests.post(url = del_note, headers = headers, data = json.dumps({
"note_names": ["note1","note2","asgasgasgasgaa"],
"hash": hsh
})))
# Add notes, but send CBOR instead of json
headers = {"Hydrus-Client-API-Access-Key": key, "Content-Type": "application/cbor"}
print(requests.post(url = set_note, headers = headers, data = cbor2.dumps({
"notes": {"note1":"content1", "note2":"content2"},
"hash": hsh
})))

View File

@ -5,7 +5,7 @@
## [Version 476](https://github.com/hydrusnetwork/hydrus/releases/tag/v476)
## domain modified times
### domain modified times
* the downloader now saves the 'source time' (or, if none was parsed, 'creation time') for each file import object to the database when a file import is completed. separate timestamps are tracked for every domain you download from, and a file's number can update to an earlier time if a new one comes in for that domain
* I overhauled how hydrus stores timestamps in each media object and added these domain timestamps to it. now, when you see 'modified time', it is the minimum of the file modified time and all recorded domain modified times. this aggregated modfified time works for sort in UI and when sorting before applying system:limit, and it also works for system:modified time search. the search may be slow in some situations--let me know
* I also added the very recent 'archived' timestamps into this new object and added sort for archived time too. 'archived 3 minutes ago' style text will appear in thumbnail right-click menus and the media viewer top status text
@ -13,13 +13,13 @@
* I also expect to write an optional 'fill in dummy data' routine for the archived timestamps for files archived before I started tracking these timestamps. something like 'for all archived files, put in an archive time 20% between import time and now', but maybe there is a better way of doing it, let me know if you have any ideas. we'll only get one shot at this, so maybe we can do a better estimate with closer analysis
* in the longer future, I expect import/export support for this data and maintenance routines to retroactively populate the domain data based on hitting up known urls again, so all us long-time users can backfill in nicer post times for all our downloaded files
## searching tags on client api
### searching tags on client api
* a user has helped me out by writing autocomplete tag search for the client api, under /add_tags/search_tags. I normally do not accept pull requests like this, but the guy did a great job and I have not been able to fit this in myself despite wanting it a lot
* I added some bells and whistles--py 3.8 support, tag sorting, filtering results according to any api permissions, and some unit tests
* at the moment, it searches the 'storage' domain that you see in a manage tags dialog, i.e. without siblings collapsed. I can and will expand it to support more options in future. please give it a go and let me know what you think
* client api version is now 26
## misc
### misc
* when you edit something in a multi-column list, I think I have updated every single one so the selection is preserved through the edit. annoyingly and confusingly on most of the old lists, for instance subscriptions, the 'ghost' of the selection focus would bump up one position after an edit. now it should stay the same even if you rename etc... and if you have multiple selected/edited
* I _think_ I fixed a bug in the selected files taglist where, in some combination of changing the tag service of the page and then loading up a favourite search, the taglist could get stuck on the previous tag domain. typically this would look as if the page's taglist had nothing in it no matter what files were selected
* if you set some files as 'alternates' when they are already 'duplicates', this now works (previously it did nothing). the non-kings of the group will be extracted from the duplicate group and applied as new alts
@ -29,7 +29,7 @@
## [Version 475](https://github.com/hydrusnetwork/hydrus/releases/tag/v475)
## new help docs
### new help docs
* the hydrus help is now built from markup using MkDocs! it now looks nicer and has search and automatically generated tables of contents and so on. please check it out. a user converted _all_ my old handwritten html to markup and figured out a migration process. thank you very much to this user.
* the help has pretty much the same structure, but online it has moved up a directory from https://hydrusnetwork.github.io/hydrus/help to https://hydrusnetwork.github.io/hydrus. all the old links should redirect in any case, so it isn't a big deal, but I have updated the various places in the program and my social media that have direct links. let me know if you have any trouble
* if you run from source and want a local copy of the help, you can build your own as here: https://hydrusnetwork.github.io/hydrus/about_docs.html . it is super simple, it just takes one extra step. Or just download and extract one of the archive builds
@ -39,7 +39,7 @@
* I added a list of some quick links back in to the main index page of the help
* I wrote an unlinked 'after_distaster' page for the help that collects my 'ok we finished recovering your broken database, now use your pain to maintain a backup in future' spiel, which I will point people to in future
## misc
### misc
* fixed a bug where changes to the search space in a duplicate filter page were not sticking after the first time they were changed. this was related to a recent 'does page have changes?' optimisation--it was giving a false negative for this page type (issue #1079)
* fixed a bug when searching for both 'media' and 'preview' view count/viewtime simultaneously (issue #1089, issue #1090)
* added support for audio-only mp4 files. these would previously generally fail, sometimes be read as m4a. all m4as are scheduled for a metadata regen scan
@ -58,12 +58,12 @@
* I think I fixed a bug, when crawling for file paths, where on Windows some network file paths were being detected incorrectly as directories and causing parse errors
* fixed a broken command in the release build so the windows installer executable should correctly get 'v475' as its version metadata (previously this was blank), which should help some software managers that use this info to decide to do updates (issue #1071)
## some cleanup
### some cleanup
* replaced last instances of EVT_CLOSE wx wrapper with proper Qt code
* did a heap of very minor code cleanup jobs all across the program, mostly just to get into pycharm
* clarified the help text in _options->external programs_ regarding %path% variable
## pycharm
### pycharm
* as a side note, I finally moved from my jank old WingIDE IDE to PyCharm in this release. I am overall happy with it--it is clearly very powerful and customisable--but adjusting after about ten or twelve years of Wing was a bit awkward. I am very much a person of habit, and it will take me a little while to get fully used to the new shortcuts and UI and so on, but PyCharm does everything that is critical for me, supports many modern coding concepts, and will work well as we move to python 3.9 and beyond
## [Version 474](https://github.com/hydrusnetwork/hydrus/releases/tag/v474)

View File

@ -23,12 +23,13 @@ Once the API is running, go to its entry in _services->review services_. Each ex
## Browsers and tools created by hydrus users
* [Hydrus Companion](https://gitgud.io/prkc/hydrus-companion): a Chrome/Firefox extension for hydrus that allows easy download queueing as you browse and advanced login support
* [Hydrus Web](https://github.com/floogulinc/hydrus-web): a web client for hydrus (allows phone browsing of hydrus)
* [LoliSnatcher](https://github.com/NO-ob/LoliSnatcher_Droid): a booru client for Android that can talk to hydrus
* [Anime Boxes](https://www.animebox.es/): a booru browser, now supports adding your client as a Hydrus Server
* [FlipFlip](https://ififfy.github.io/flipflip/#/): an advanced slideshow interface, now supports hydrus as a source
* [Iwara-Hydrus](https://github.com/GoAwayNow/Iwara-Hydrus): a userscript to simplify sending Iwara videos to Hydrus Network
* [Hydrus Archive Delete](https://gitgud.io/koto/hydrus-archive-delete): Archive/Delete filter in your web browser
* [hydrus-dd](https://gitgud.io/koto/hydrus-dd): DeepDanbooru neural network tagging for Hydrus
* [hyextract](https://github.com/floogulinc/hyextract): Extract archives from Hydrus and reimport with tags and URL associations
* [dolphin-hydrus-actions](https://gitgud.io/prkc/dolphin-hydrus-actions): Adds Hydrus right-click context menu actions to Dolphin file manager.
* [Hydrus Web](https://github.com/floogulinc/hydrus-web): a web client for hydrus (allows phone browsing of hydrus)
* [LoliSnatcher](https://github.com/NO-ob/LoliSnatcher_Droid): a booru client for Android that can talk to hydrus
* [Anime Boxes](https://www.animebox.es/): a booru browser, now supports adding your client as a Hydrus Server
* [FlipFlip](https://ififfy.github.io/flipflip/#/): an advanced slideshow interface, now supports hydrus as a source
* [Iwara-Hydrus](https://github.com/GoAwayNow/Iwara-Hydrus): a userscript to simplify sending Iwara videos to Hydrus Network
* [Hydrus Archive Delete](https://gitgud.io/koto/hydrus-archive-delete): Archive/Delete filter in your web browser
* [hydrus-dd](https://gitgud.io/koto/hydrus-dd): DeepDanbooru neural network tagging for Hydrus
* [hyextract](https://github.com/floogulinc/hyextract): Extract archives from Hydrus and reimport with tags and URL associations
* [dolphin-hydrus-actions](https://gitgud.io/prkc/dolphin-hydrus-actions): Adds Hydrus right-click context menu actions to Dolphin file manager.
* [more projects on github](https://github.com/stars/hydrusnetwork/lists/hydrus-related-projects)

View File

@ -7,8 +7,9 @@ hide: navigation
## Library modules created by hydrus users
* [Hydrus API](https://gitlab.com/cryzed/hydrus-api): A python module that talks to the API.
* [hydrus.js](https://github.com/cravxx/hydrus.js): A node.js module that talks to the API.
* [Hydrus API](https://gitlab.com/cryzed/hydrus-api): A python module that talks to the API.
* [hydrus.js](https://github.com/cravxx/hydrus.js): A node.js module that talks to the API.
* [more projects on github](https://github.com/stars/hydrusnetwork/lists/hydrus-related-projects)
## API
@ -52,6 +53,26 @@ On 200 OK, the API returns JSON for everything except actual file/thumbnail requ
Exceeding this limit will cause the request to fail. Make sure to use pagination if you are passing very large JSON arrays as parameters in a GET request.
## CBOR
The API now tentatively supports CBOR, which is basically 'byte JSON'. If you are in a lower level language or need to do a lot of heavy work quickly, try it out!
To work in CBOR, use CBOR to encode any parameters that you would previously put in JSON, and put Content-Type `application/cbor` in your request header. For POST requests, just print the pure bytes in the body, like this:
```py
cbor2.dumps( arg_dict )
```
For GET, encode the parameter value in base64, like this:
```py
base64.urlsafe_b64encode( cbor2.dumps( argument ) )
-or-
str( base64.urlsafe_b64encode( cbor2.dumps( argument ) ), 'ascii' )
```
## Access and permissions
The client gives access to its API through different 'access keys', which are the typical 64-character hex used in many other places across hydrus. Each guarantees different permissions such as handling files or tags. Most of the time, a user will provide full access, but do not assume this. If the access header or parameter is not provided, you will get 401, and all insufficient permission problems will return 403 with appropriate error text.

View File

@ -1,4 +1,5 @@
import os
import numpy
import threading
import time
@ -53,6 +54,8 @@ def GenerateHydrusBitmapFromNumPyImage( numpy_image, compressed = True ):
def GenerateHydrusBitmapFromPILImage( pil_image, compressed = True ):
depth = 3
if pil_image.mode == 'RGBA':
depth = 4
@ -88,6 +91,11 @@ class ImageRenderer( object ):
def _GetNumPyImage( self, clip_rect: QC.QRect, target_resolution: QC.QSize ):
if self._numpy_image is None:
return numpy.zeros( ( target_resolution.height(), target_resolution.width() ), dtype = 'uint8' )
clip_size = clip_rect.size()
clip_width = clip_size.width()
clip_height = clip_size.height()
@ -223,24 +231,41 @@ class ImageRenderer( object ):
if not self._this_is_for_metadata_alone:
my_resolution_size = QC.QSize( self._resolution[0], self._resolution[1] )
my_numpy_size = QC.QSize( self._numpy_image.shape[1], self._numpy_image.shape[0] )
if my_resolution_size != my_numpy_size:
if self._numpy_image is None:
HG.client_controller.Write( 'file_maintenance_add_jobs_hashes', { self._hash }, ClientFiles.REGENERATE_FILE_DATA_JOB_FILE_METADATA )
m = 'There was a problem rendering the image with hash {}! Hydrus thinks its resolution is {}, but it was actually {}.'.format(
self._hash.hex(),
my_resolution_size,
my_numpy_size
m = 'There was a problem rendering the image with hash {}! It may be damaged.'.format(
self._hash.hex()
)
m += os.linesep * 2
m += 'You may see some black squares in the image. A metadata regeneration has been scheduled, so with luck the image will fix itself soon.'
m += 'Jobs to check its integrity and metadata have been scheduled. If it is damaged, it may be redownloaded or removed from the client completely. If it is not damaged, it may be fixed automatically or further action may be required.'
HydrusData.ShowText( m )
HG.client_controller.Write( 'file_maintenance_add_jobs_hashes', { self._hash }, ClientFiles.REGENERATE_FILE_DATA_JOB_FILE_INTEGRITY_DATA_TRY_URL_ELSE_REMOVE_RECORD )
HG.client_controller.Write( 'file_maintenance_add_jobs_hashes', { self._hash }, ClientFiles.REGENERATE_FILE_DATA_JOB_FILE_METADATA )
else:
my_resolution_size = QC.QSize( self._resolution[0], self._resolution[1] )
my_numpy_size = QC.QSize( self._numpy_image.shape[1], self._numpy_image.shape[0] )
if my_resolution_size != my_numpy_size:
m = 'There was a problem rendering the image with hash {}! Hydrus thinks its resolution is {}, but it was actually {}.'.format(
self._hash.hex(),
my_resolution_size,
my_numpy_size
)
m += os.linesep * 2
m += 'You may see some black squares in the image. A metadata regeneration has been scheduled, so with luck the image will fix itself soon.'
HydrusData.ShowText( m )
HG.client_controller.Write( 'file_maintenance_add_jobs_hashes', { self._hash }, ClientFiles.REGENERATE_FILE_DATA_JOB_FILE_METADATA )

View File

@ -1,17 +1,41 @@
import typing
from hydrus.core import HydrusData
def ShouldUpdateDomainModifiedTime( existing_timestamp: int, timestamp: int ):
def ShouldUpdateDomainModifiedTime( existing_timestamp: int, new_timestamp: int ):
# assume anything too early is a meme and a timestamp parsing conversion error
if timestamp <= 86400 * 7:
if new_timestamp <= 86400 * 7:
return False
# only go backwards, in general
if timestamp >= existing_timestamp:
if new_timestamp >= existing_timestamp:
return False
return True
def MergeModifiedTimes( existing_timestamp: typing.Optional[ int ], new_timestamp: typing.Optional[ int ] ) -> typing.Optional[ int ]:
if existing_timestamp is None:
return new_timestamp
if new_timestamp is None:
return existing_timestamp
if ShouldUpdateDomainModifiedTime( existing_timestamp, new_timestamp ):
return new_timestamp
else:
return existing_timestamp

View File

@ -3980,6 +3980,141 @@ class DB( HydrusDB.HydrusDB ):
return result
def _GetFileHistory( self, num_steps: int ):
# get all sorts of stats and present them in ( timestamp, cumulative_num ) tuple pairs
file_history = {}
# first let's do current files. we increment when added, decrement when we know removed
current_files_table_name = ClientDBFilesStorage.GenerateFilesTableName( self.modules_services.combined_local_file_service_id, HC.CONTENT_STATUS_CURRENT )
current_timestamps = self._STL( self._Execute( 'SELECT timestamp FROM {};'.format( current_files_table_name ) ) )
deleted_files_table_name = ClientDBFilesStorage.GenerateFilesTableName( self.modules_services.combined_local_file_service_id, HC.CONTENT_STATUS_DELETED )
since_deleted = self._STL( self._Execute( 'SELECT original_timestamp FROM {} WHERE original_timestamp IS NOT NULL;'.format( deleted_files_table_name ) ) )
current_timestamps.extend( since_deleted )
current_timestamps.sort()
deleted_timestamps = self._STL( self._Execute( 'SELECT timestamp FROM {} WHERE timestamp IS NOT NULL ORDER BY timestamp ASC;'.format( deleted_files_table_name ) ) )
combined_timestamps_with_delta = [ ( timestamp, 1 ) for timestamp in current_timestamps ]
combined_timestamps_with_delta.extend( ( ( timestamp, -1 ) for timestamp in deleted_timestamps ) )
combined_timestamps_with_delta.sort()
current_file_history = []
if len( combined_timestamps_with_delta ) > 0:
if len( combined_timestamps_with_delta ) < 2:
step_gap = 1
else:
step_gap = max( ( combined_timestamps_with_delta[-1][0] - combined_timestamps_with_delta[0][0] ) // num_steps, 1 )
total_current_files = 0
step_timestamp = combined_timestamps_with_delta[0][0]
for ( timestamp, delta ) in combined_timestamps_with_delta:
if timestamp > step_timestamp + step_gap:
current_file_history.append( ( step_timestamp, total_current_files ) )
step_timestamp = timestamp
total_current_files += delta
file_history[ 'current' ] = current_file_history
# now deleted times. we will pre-populate total_num_files with non-timestamped records
( total_deleted_files, ) = self._Execute( 'SELECT COUNT( * ) FROM {} WHERE timestamp IS NULL;'.format( deleted_files_table_name ) ).fetchone()
deleted_file_history = []
if len( deleted_timestamps ) > 0:
if len( deleted_timestamps ) < 2:
step_gap = 1
else:
step_gap = max( ( deleted_timestamps[-1] - deleted_timestamps[0] ) // num_steps, 1 )
step_timestamp = deleted_timestamps[0]
for deleted_timestamp in deleted_timestamps:
if deleted_timestamp > step_timestamp + step_gap:
deleted_file_history.append( ( step_timestamp, total_deleted_files ) )
step_timestamp = deleted_timestamp
total_deleted_files += 1
file_history[ 'deleted' ] = deleted_file_history
# and inbox, which will work backwards since we have numbers for archiving. several subtle differences here
( total_inbox_files, ) = self._Execute( 'SELECT COUNT( * ) FROM file_inbox;' ).fetchone()
archive_timestamps = self._STL( self._Execute( 'SELECT archived_timestamp FROM archive_timestamps ORDER BY archived_timestamp ASC;' ) )
inbox_file_history = []
if len( archive_timestamps ) > 0:
if len( archive_timestamps ) < 2:
step_gap = 1
else:
step_gap = max( ( archive_timestamps[-1] - archive_timestamps[0] ) // num_steps, 1 )
archive_timestamps.reverse()
step_timestamp = archive_timestamps[0]
for archived_timestamp in archive_timestamps:
if archived_timestamp < step_timestamp - step_gap:
inbox_file_history.append( ( archived_timestamp, total_inbox_files ) )
step_timestamp = archived_timestamp
total_inbox_files += 1
inbox_file_history.reverse()
file_history[ 'inbox' ] = inbox_file_history
return file_history
def _GetFileNotes( self, hash ):
hash_id = self.modules_hashes_local_cache.GetHashId( hash )
@ -10211,6 +10346,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'file_duplicate_hashes': result = self.modules_files_duplicates.DuplicatesGetFileHashesByDuplicateType( *args, **kwargs )
elif action == 'file_duplicate_info': result = self.modules_files_duplicates.DuplicatesGetFileDuplicateInfo( *args, **kwargs )
elif action == 'file_hashes': result = self.modules_hashes.GetFileHashes( *args, **kwargs )
elif action == 'file_history': result = self._GetFileHistory( *args, **kwargs )
elif action == 'file_maintenance_get_job': result = self.modules_files_maintenance_queue.GetJob( *args, **kwargs )
elif action == 'file_maintenance_get_job_counts': result = self.modules_files_maintenance_queue.GetJobCounts( *args, **kwargs )
elif action == 'file_query_ids': result = self._GetHashIdsFromQuery( *args, **kwargs )
@ -13978,6 +14114,54 @@ class DB( HydrusDB.HydrusDB ):
if version == 476:
try:
# fixed apng duration calculation
table_join = self.modules_files_storage.GetTableJoinLimitedByFileDomain( self.modules_services.combined_local_file_service_id, 'files_info', HC.CONTENT_STATUS_CURRENT )
hash_ids = self._STL( self._Execute( 'SELECT hash_id FROM {} WHERE mime = ?;'.format( table_join ), ( HC.IMAGE_APNG, ) ) )
self.modules_files_maintenance_queue.AddJobs( hash_ids, ClientFiles.REGENERATE_FILE_DATA_JOB_FILE_METADATA )
except Exception as e:
HydrusData.PrintException( e )
message = 'Some apng regen scheduling failed to set! This is not super important, but hydev would be interested in seeing the error that was printed to the log.'
self.pub_initial_message( message )
try:
domain_manager = self.modules_serialisable.GetJSONDump( HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_DOMAIN_MANAGER )
domain_manager.Initialise()
#
domain_manager.OverwriteDefaultParsers( ( 'nitter tweet parser', 'nitter tweet parser (video from koto.reisen)' ) )
#
domain_manager.TryToLinkURLClassesAndParsers()
#
self.modules_serialisable.SetJSONDump( domain_manager )
except Exception as e:
HydrusData.PrintException( e )
message = 'Trying to update some parsers failed! Please let hydrus dev know!'
self.pub_initial_message( message )
self._controller.frame_splash_status.SetTitleText( 'updated db to v{}'.format( HydrusData.ToHumanInt( version + 1 ) ) )
self._Execute( 'UPDATE version SET version = ?;', ( version + 1, ) )

View File

@ -44,6 +44,7 @@ from hydrus.client import ClientPaths
from hydrus.client import ClientServices
from hydrus.client import ClientThreading
from hydrus.client.gui import ClientGUIAsync
from hydrus.client.gui import ClientGUICharts
from hydrus.client.gui import ClientGUICore as CGC
from hydrus.client.gui import ClientGUIDialogs
from hydrus.client.gui import ClientGUIDialogsManage
@ -2145,6 +2146,7 @@ class FrameGUI( ClientGUITopLevelWindows.MainFrameThatResizes ):
self._menu_updater_pages_count = ClientGUIAsync.FastThreadToGUIUpdater( self, self._UpdateMenuPagesCount )
self._boned_updater = self._InitialiseMenubarGetBonesUpdater()
self._file_history_updater = self._InitialiseMenubarGetFileHistoryUpdater()
self.setMenuBar( self._menubar )
@ -2259,6 +2261,44 @@ class FrameGUI( ClientGUITopLevelWindows.MainFrameThatResizes ):
return ClientGUIAsync.AsyncQtUpdater( self, loading_callable, work_callable, publish_callable )
def _InitialiseMenubarGetFileHistoryUpdater( self ):
def loading_callable():
pass
def work_callable():
job_key = ClientThreading.JobKey()
job_key.SetVariable( 'popup_text_1', 'Loading File History\u2026' )
HG.client_controller.pub( 'message', job_key )
num_steps = 1000
file_history = HG.client_controller.Read( 'file_history', num_steps )
return ( job_key, file_history )
def publish_callable( result ):
( job_key, file_history ) = result
job_key.Delete()
frame = ClientGUITopLevelWindowsPanels.FrameThatTakesScrollablePanel( self, 'file history' )
panel = ClientGUIScrolledPanelsReview.ReviewFileHistory( frame, file_history )
frame.SetPanel( panel )
return ClientGUIAsync.AsyncQtUpdater( self, loading_callable, work_callable, publish_callable )
def _InitialiseMenubarGetMenuUpdaterDatabase( self ):
def loading_callable():
@ -3105,6 +3145,7 @@ class FrameGUI( ClientGUITopLevelWindows.MainFrameThatResizes ):
ClientGUIMenus.AppendSeparator( menu )
ClientGUIMenus.AppendMenuItem( menu, 'how boned am I?', 'Check for a summary of your ride so far.', self._HowBonedAmI )
ClientGUIMenus.AppendMenuItem( menu, 'view file history', 'See a chart of your file import history.', self._ShowFileHistory )
ClientGUIMenus.AppendSeparator( menu )
@ -6168,6 +6209,20 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def _ShowFileHistory( self ):
if not ClientGUICharts.QT_CHARTS_OK:
message = 'Sorry, you do not have QtCharts available, so this chart cannot be shown!'
QW.QMessageBox.warning( self, 'Warning', message )
return
self._file_history_updater.update()
def _ShowHideSplitters( self ):
page = self._notebook.GetCurrentMediaPage()

View File

@ -39,12 +39,13 @@ try:
bar_set.append( month_values )
x_category_axis.append( month_strs )
x_category_axis.setLabelsAngle( 90 )
y_value_axis = QCh.QtCharts.QValueAxis()
y_value_axis.setRange( 0.0, ( highest_usage * 1.2 ) / line_divisor )
y_value_axis.setTitleText( '({})'.format( unit ) )
y_value_axis.setLabelFormat( '%i{}'.format( unit ) )
y_value_axis.applyNiceNumbers()
@ -67,6 +68,80 @@ try:
class FileHistory( QCh.QtCharts.QChartView ):
def __init__( self, parent, file_history: dict ):
QCh.QtCharts.QChartView.__init__( self, parent )
# this lad takes ms timestamp, not s, so * 1000
# note you have to give this floats for the ms or it throws a type problem of big number to C long
current_files_series = QCh.QtCharts.QLineSeries()
current_files_series.setName( 'files in storage' )
for ( timestamp, num_files ) in file_history[ 'current' ]:
current_files_series.append( timestamp * 1000.0, num_files )
deleted_files_series = QCh.QtCharts.QLineSeries()
deleted_files_series.setName( 'deleted' )
for ( timestamp, num_files ) in file_history[ 'deleted' ]:
deleted_files_series.append( timestamp * 1000.0, num_files )
inbox_files_series = QCh.QtCharts.QLineSeries()
inbox_files_series.setName( 'inbox' )
for ( timestamp, num_files ) in file_history[ 'inbox' ]:
inbox_files_series.append( timestamp * 1000.0, num_files )
# takes ms since epoch
x_datetime_axis = QCh.QtCharts.QDateTimeAxis()
x_datetime_axis.setTickCount( 25 )
x_datetime_axis.setLabelsAngle( 90 )
x_datetime_axis.setFormat( 'yyyy-MM-dd' )
y_value_axis = QCh.QtCharts.QValueAxis()
y_value_axis.setLabelFormat( '%i' )
chart = QCh.QtCharts.QChart()
chart.addSeries( current_files_series )
chart.addSeries( inbox_files_series )
chart.addSeries( deleted_files_series )
chart.addAxis( x_datetime_axis, QC.Qt.AlignBottom )
chart.addAxis( y_value_axis, QC.Qt.AlignLeft )
current_files_series.attachAxis( x_datetime_axis )
current_files_series.attachAxis( y_value_axis )
deleted_files_series.attachAxis( x_datetime_axis )
deleted_files_series.attachAxis( y_value_axis )
inbox_files_series.attachAxis( x_datetime_axis )
inbox_files_series.attachAxis( y_value_axis )
y_value_axis.setMin( 0 )
y_value_axis.applyNiceNumbers()
self.setChart( chart )
except:
QT_CHARTS_OK = False

View File

@ -34,10 +34,11 @@ from hydrus.client import ClientRendering
from hydrus.client import ClientSearch
from hydrus.client import ClientSerialisable
from hydrus.client import ClientThreading
from hydrus.client.gui import ClientGUIDragDrop
from hydrus.client.gui import ClientGUIAsync
from hydrus.client.gui import ClientGUICharts
from hydrus.client.gui import ClientGUIDialogs
from hydrus.client.gui import ClientGUIDialogsQuick
from hydrus.client.gui import ClientGUIDragDrop
from hydrus.client.gui import ClientGUIFunctions
from hydrus.client.gui import ClientGUIImport
from hydrus.client.gui import ClientGUIScrolledPanels
@ -2287,6 +2288,32 @@ class ReviewDownloaderImport( ClientGUIScrolledPanels.ReviewPanel ):
self._ImportPaths( paths )
class ReviewFileHistory( ClientGUIScrolledPanels.ReviewPanel ):
def __init__( self, parent, file_history ):
ClientGUIScrolledPanels.ReviewPanel.__init__( self, parent )
file_history_chart = ClientGUICharts.FileHistory( self, file_history )
file_history_chart.setMinimumSize( 640, 480 )
vbox = QP.VBoxLayout()
label = 'Please note that delete and inbox time tracking are new so you may not have full data for them.'
st = ClientGUICommon.BetterStaticText( self, label = label )
st.setWordWrap( True )
st.setAlignment( QC.Qt.AlignCenter )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, file_history_chart, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
class ReviewFileMaintenance( ClientGUIScrolledPanels.ReviewPanel ):
def __init__( self, parent, stats ):

View File

@ -23,6 +23,7 @@ from hydrus.core import HydrusTemp
from hydrus.client import ClientConstants as CC
from hydrus.client import ClientData
from hydrus.client import ClientParsing
from hydrus.client import ClientTime
from hydrus.client.importing import ClientImportFiles
from hydrus.client.importing import ClientImporting
from hydrus.client.importing.options import FileImportOptions
@ -474,6 +475,10 @@ class FileSeed( HydrusSerialisable.SerialisableBase ):
last_modified_time = network_job.GetLastModifiedTime()
self.source_time = ClientTime.MergeModifiedTimes( self.source_time, last_modified_time )
status_hook( 'importing file' )
self.Import( temp_path, file_import_options, status_hook = status_hook )

View File

@ -760,9 +760,12 @@ class LocationsManager( object ):
elif action == HC.CONTENT_UPDATE_ARCHIVE:
self.inbox = False
self._timestamp_manager.SetArchivedTimestamp( HydrusData.GetNow() )
if self.inbox:
self.inbox = False
self._timestamp_manager.SetArchivedTimestamp( HydrusData.GetNow() )
elif action == HC.CONTENT_UPDATE_INBOX:

View File

@ -256,6 +256,8 @@ def ParseClientAPIPOSTArgs( request ):
if not request.requestHeaders.hasHeader( 'Content-Type' ):
request_mime = HC.APPLICATION_JSON
parsed_request_args = HydrusNetworkVariableHandling.ParsedRequestArguments()
total_bytes_read = 0
@ -274,7 +276,7 @@ def ParseClientAPIPOSTArgs( request ):
try:
mime = HC.mime_enum_lookup[ content_type ]
request_mime = HC.mime_enum_lookup[ content_type ]
except:
@ -283,7 +285,7 @@ def ParseClientAPIPOSTArgs( request ):
total_bytes_read = 0
if mime == HC.APPLICATION_JSON:
if request_mime == HC.APPLICATION_JSON:
json_bytes = request.content.read()
@ -295,7 +297,7 @@ def ParseClientAPIPOSTArgs( request ):
parsed_request_args = ParseClientAPIPOSTByteArgs( args )
elif mime == HC.APPLICATION_CBOR and CBOR_AVAILABLE:
elif request_mime == HC.APPLICATION_CBOR and CBOR_AVAILABLE:
cbor_bytes = request.content.read()
@ -325,7 +327,7 @@ def ParseClientAPIPOSTArgs( request ):
return ( parsed_request_args, total_bytes_read, mime )
return ( parsed_request_args, total_bytes_read, request_mime )
def ParseClientAPISearchPredicates( request ):
@ -779,13 +781,13 @@ class HydrusResourceClientAPI( HydrusServerResources.HydrusResource ):
def _callbackParsePOSTArgs( self, request: HydrusServerRequest.HydrusRequest ):
( parsed_request_args, total_bytes_read, mime ) = ParseClientAPIPOSTArgs( request )
( parsed_request_args, total_bytes_read, request_mime ) = ParseClientAPIPOSTArgs( request )
self._reportDataUsed( request, total_bytes_read )
request.parsed_request_args = parsed_request_args
request.preferred_mime = mime
request.preferred_mime = request_mime
return request
@ -1292,7 +1294,7 @@ class HydrusResourceClientAPIRestrictedAddNotesSetNotes( HydrusResourceClientAPI
if 'hash' in request.parsed_request_args:
hash = request.parsed_request_args.GetValue( 'hash', bytes )
elif 'file_id' in request.parsed_request_args:
hash_id = request.parsed_request_args.GetValue( 'file_id', int )
@ -1300,10 +1302,11 @@ class HydrusResourceClientAPIRestrictedAddNotesSetNotes( HydrusResourceClientAPI
hash_ids_to_hashes = HG.client_controller.Read( 'hash_ids_to_hashes', hash_ids = [ hash_id ] )
hash = hash_ids_to_hashes[ hash_id ]
else:
raise HydrusExceptions.BadRequestException( 'There was no file identifier or hash given!' )
notes = request.parsed_request_args.GetValue( 'notes', dict )
@ -2859,7 +2862,7 @@ class HydrusResourceClientAPIRestrictedManagePagesGetPages( HydrusResourceClient
body_dict = { 'pages' : page_info_dict }
body = Dumps( body_dict )
body = Dumps( body_dict, request.preferred_mime )
response_context = HydrusServerResources.ResponseContext( 200, mime = request.preferred_mime, body = body )

View File

@ -1,5 +1,8 @@
import calendar
import io
import os
import typing
import requests
import threading
import traceback
@ -167,6 +170,8 @@ class NetworkJob( object ):
self._actual_fetched_url = self._url
self._temp_path = temp_path
self._response_last_modified = None
if self._temp_path is None:
# 100MB HTML file lmao
@ -253,6 +258,34 @@ class NetworkJob( object ):
return self._current_connection_attempt_number <= max_attempts_allowed
def _GenerateModifiedDate( self, response: requests.Response ):
if 'Last-Modified' in response.headers:
# Thu, 20 May 2010 07:00:23 GMT
# these are always in GMT
last_modified_string = response.headers[ 'Last-Modified' ]
if last_modified_string.endswith( ' GMT' ):
last_modified_string = last_modified_string[:-4]
try:
struct_time = time.strptime( last_modified_string, '%a, %d %b %Y %H:%M:%S' )
# the given struct is in GMT, so calendar.timegm is appropriate here
self._response_last_modified = int( calendar.timegm( struct_time ) )
except:
pass
def _GenerateNetworkContexts( self ):
network_contexts = []
@ -1044,6 +1077,14 @@ class NetworkJob( object ):
def GetLastModifiedTime( self ) -> typing.Optional[ int ]:
with self._lock:
return self._response_last_modified
def GetLoginNetworkContext( self ):
with self._lock:
@ -1382,6 +1423,10 @@ class NetworkJob( object ):
with self._lock:
# we are complete here and worked ok
self._GenerateModifiedDate( response )
self._status_text = 'done!'

View File

@ -738,7 +738,7 @@ class URLClass( HydrusSerialisable.SerialisableBaseNamed ):
num_total_parameters = len( self._parameters )
len_example_url = len( self.Normalise( self._example_url ) )
return ( num_required_parameters, num_total_path_components, num_required_parameters, num_total_parameters, len_example_url )
return ( num_required_path_components, num_total_path_components, num_required_parameters, num_total_parameters, len_example_url )
def GetURLBooleans( self ):

View File

@ -81,7 +81,7 @@ options = {}
NETWORK_VERSION = 20
SOFTWARE_VERSION = 476
CLIENT_API_VERSION = 26
CLIENT_API_VERSION = 27
SERVER_THUMBNAIL_DIMENSIONS = ( 200, 200 )

View File

@ -326,6 +326,12 @@ def TimeDeltaToPrettyTimeDelta( seconds, show_seconds = True ):
seconds %= duration
# little rounding thing if you get 364th day with 30 day months
if time_string == 'month' and time_quantity > 11:
time_quantity = 11
if time_quantity > 0:
s = ToHumanInt( time_quantity ) + ' ' + time_string

View File

@ -92,7 +92,37 @@ def GetAPNGACTLChunkData( file_header_bytes: bytes ):
return None
def GetAPNGNumFrames( apng_actl_bytes ):
def GetAPNGDuration( apng_bytes: bytes ):
frame_control_chunk_name = b'fcTL'
chunks = GetAPNGChunks( apng_bytes )
total_duration = 0
for ( chunk_name, chunk_data ) in chunks:
if chunk_name == frame_control_chunk_name and len( chunk_data ) >= 24:
( delay_numerator, ) = struct.unpack( '>H', chunk_data[20:22] )
( delay_denominator, ) = struct.unpack( '>H', chunk_data[22:24] )
if delay_denominator == 0:
duration = 0.1
else:
duration = delay_numerator / delay_denominator
total_duration += duration
return total_duration
def GetAPNGNumFrames( apng_actl_bytes: bytes ):
( num_frames, ) = struct.unpack( '>I', apng_actl_bytes[ : 4 ] )
@ -289,19 +319,17 @@ def GetFFMPEGAPNGProperties( path ):
num_frames = GetAPNGNumFrames( apng_actl_bytes )
with open( path, 'rb' ) as f:
file_bytes = f.read()
duration = GetAPNGDuration( file_bytes )
lines = GetFFMPEGInfoLines( path )
resolution = ParseFFMPEGVideoResolution( lines, png_ok = True )
( fps, confident_fps ) = ParseFFMPEGFPS( lines, png_ok = True )
if not confident_fps:
fps = 24
duration = num_frames / fps
duration_in_ms = int( duration * 1000 )
has_audio = False

View File

@ -1,5 +1,6 @@
from twisted.web.server import Request
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
class HydrusRequest( Request ):
@ -13,6 +14,7 @@ class HydrusRequest( Request ):
self.hydrus_response_context = None
self.hydrus_account = None
self.client_api_permissions = None
self.preferred_mime = HC.APPLICATION_JSON
def IsGET( self ):
@ -50,4 +52,3 @@ class HydrusRequestLogging( HydrusRequest ):
HydrusData.Print( message )

View File

@ -921,7 +921,75 @@ class TestClientAPI( unittest.TestCase ):
self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates )
def _test_add_notes( self, connection, set_up_permissions ):
hash = os.urandom( 32 )
hash_hex = hash.hex()
#
api_permissions = set_up_permissions[ 'everything' ]
access_key_hex = api_permissions.GetAccessKey().hex()
headers = { 'Hydrus-Client-API-Access-Key' : access_key_hex, 'Content-Type' : HC.mime_mimetype_string_lookup[ HC.APPLICATION_JSON ] }
# set notes
HG.test_controller.ClearWrites( 'content_updates' )
path = '/add_notes/set_notes'
new_notes_dict = { 'new note' : 'hello test', 'new note 2' : 'hello test 2' }
body_dict = { 'hash' : hash_hex, 'notes' : new_notes_dict }
body = json.dumps( body_dict )
connection.request( 'POST', path, body = body, headers = headers )
response = connection.getresponse()
data = response.read()
self.assertEqual( response.status, 200 )
expected_service_keys_to_content_updates = collections.defaultdict( list )
expected_service_keys_to_content_updates[ CC.LOCAL_NOTES_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_NOTES, HC.CONTENT_UPDATE_SET, ( hash, name, note ) ) for ( name, note ) in new_notes_dict.items() ]
[ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' )
self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates )
# delete notes
HG.test_controller.ClearWrites( 'content_updates' )
path = '/add_notes/delete_notes'
delete_note_names = { 'new note 3', 'new note 4' }
body_dict = { 'hash' : hash_hex, 'note_names' : list( delete_note_names ) }
body = json.dumps( body_dict )
connection.request( 'POST', path, body = body, headers = headers )
response = connection.getresponse()
data = response.read()
self.assertEqual( response.status, 200 )
expected_service_keys_to_content_updates = collections.defaultdict( list )
expected_service_keys_to_content_updates[ CC.LOCAL_NOTES_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_NOTES, HC.CONTENT_UPDATE_DELETE, ( hash, name ) ) for name in delete_note_names ]
[ ( ( service_keys_to_content_updates, ), kwargs ) ] = HG.test_controller.GetWrite( 'content_updates' )
self._compare_content_updates( service_keys_to_content_updates, expected_service_keys_to_content_updates )
def _test_add_tags( self, connection, set_up_permissions ):
api_permissions = set_up_permissions[ 'everything' ]
@ -2534,7 +2602,7 @@ class TestClientAPI( unittest.TestCase ):
timestamp_manager = timestamp_manager
)
ratings_manager = ClientMediaManagers.RatingsManager( {} )
notes_manager = ClientMediaManagers.NotesManager( {} )
notes_manager = ClientMediaManagers.NotesManager( { 'note' : 'hello', 'note2' : 'hello2' } )
file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager()
media_result = ClientMediaResult.MediaResult( file_info_manager, tags_manager, locations_manager, ratings_manager, notes_manager, file_viewing_stats_manager )
@ -2545,6 +2613,7 @@ class TestClientAPI( unittest.TestCase ):
hide_service_names_tags_metadata = []
metadata = []
detailed_known_urls_metadata = []
with_notes_metadata = []
services_manager = HG.client_controller.services_manager
@ -2656,10 +2725,17 @@ class TestClientAPI( unittest.TestCase ):
detailed_known_urls_metadata.append( detailed_known_urls_metadata_row )
with_notes_metadata_row = dict( metadata_row )
with_notes_metadata_row[ 'notes' ] = media_result.GetNotesManager().GetNamesToNotes()
with_notes_metadata.append( with_notes_metadata_row )
expected_hide_service_names_tags_metadata_result = { 'metadata' : hide_service_names_tags_metadata }
expected_metadata_result = { 'metadata' : metadata }
expected_detailed_known_urls_metadata_result = { 'metadata' : detailed_known_urls_metadata }
expected_notes_metadata_result = { 'metadata' : with_notes_metadata }
HG.test_controller.SetRead( 'hash_ids_to_hashes', file_ids_to_hashes )
@ -2820,6 +2896,24 @@ class TestClientAPI( unittest.TestCase ):
self.assertEqual( d, expected_detailed_known_urls_metadata_result )
# metadata from hashes with notes info
path = '/get_files/file_metadata?hashes={}&include_notes=true'.format( urllib.parse.quote( json.dumps( [ hash.hex() for hash in file_ids_to_hashes.values() ] ) ) )
connection.request( 'GET', path, headers = headers )
response = connection.getresponse()
data = response.read()
text = str( data, 'utf-8' )
self.assertEqual( response.status, 200 )
d = json.loads( text )
self.assertEqual( d, expected_notes_metadata_result )
# failure on missing file_ids
HG.test_controller.SetRead( 'media_results_from_ids', HydrusExceptions.DataMissing( 'test missing' ) )
@ -3182,6 +3276,7 @@ class TestClientAPI( unittest.TestCase ):
self._test_manage_database( connection, set_up_permissions )
self._test_add_files_add_file( connection, set_up_permissions )
self._test_add_files_other_actions( connection, set_up_permissions )
self._test_add_notes( connection, set_up_permissions )
self._test_add_tags( connection, set_up_permissions )
self._test_add_tags_search_tags( connection, set_up_permissions )
self._test_add_urls( connection, set_up_permissions )

View File

@ -1,4 +1,5 @@
beautifulsoup4>=4.0.0
cbor2
chardet>=3.0.4
cloudscraper>=1.2.33
html5lib>=1.0.1

View File

@ -1,4 +1,5 @@
beautifulsoup4>=4.0.0
cbor2
chardet>=3.0.4
cloudscraper>=1.2.33
html5lib>=1.0.1

View File

@ -1,4 +1,5 @@
beautifulsoup4>=4.0.0
cbor2
chardet>=3.0.4
cloudscraper>=1.2.33
html5lib>=1.0.1

View File

@ -1,4 +1,5 @@
beautifulsoup4>=4.0.0
cbor2
chardet>=3.0.4
cloudscraper>=1.2.33
html5lib>=1.0.1

View File

@ -1,4 +1,5 @@
beautifulsoup4>=4.0.0
cbor2
chardet>=3.0.4
cloudscraper>=1.2.33
html5lib>=1.0.1

View File

@ -1,4 +1,5 @@
beautifulsoup4>=4.0.0
cbor2
chardet>=3.0.4
cloudscraper>=1.2.33
html5lib>=1.0.1

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB