From 5ce8d3e35629d6f5270fa1af66a29f040891f33f Mon Sep 17 00:00:00 2001 From: thatfuckingbird <67429906+thatfuckingbird@users.noreply.github.com> Date: Sat, 26 Mar 2022 18:04:21 +0100 Subject: [PATCH 1/3] also return file ids if hashes requested in file search API --- docs/developer_api.md | 5 +++-- hydrus/client/networking/ClientLocalServerResources.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/developer_api.md b/docs/developer_api.md index baa62dd5..13d7db09 100644 --- a/docs/developer_api.md +++ b/docs/developer_api.md @@ -1228,7 +1228,7 @@ Arguments (in percent-encoded JSON): * `tag_service_key`: (optional, selective, hexadecimal, the tag domain on which to search) * `file_sort_type`: (optional, integer, the results sort method) * `file_sort_asc`: true or false (optional, the results sort order) - * `return_hashes`: true or false (optional, default false, returns hex hashes instead of file ids) + * `return_hashes`: true or false (optional, default false, returns hex hashes in addition to file ids, hashes and file ids are in the same order) * _`system_inbox`: true or false (obsolete, use tags)_ * _`system_archive`: true or false (obsolete, use tags)_ @@ -1375,7 +1375,8 @@ Response: "1b04c4df7accd5a61c5d02b36658295686b0abfebdc863110e7d7249bba3f9ad", "fe416723c731d679aa4d20e9fd36727f4a38cd0ac6d035431f0f452fad54563f", "b53505929c502848375fbc4dab2f40ad4ae649d34ef72802319a348f81b52bad" - ] + ], + "file_ids": [125462, 4852415, 123] } ``` diff --git a/hydrus/client/networking/ClientLocalServerResources.py b/hydrus/client/networking/ClientLocalServerResources.py index 33f19dd8..a253f225 100644 --- a/hydrus/client/networking/ClientLocalServerResources.py +++ b/hydrus/client/networking/ClientLocalServerResources.py @@ -2099,7 +2099,7 @@ class HydrusResourceClientAPIRestrictedGetFilesSearchFiles( HydrusResourceClient hash_ids_to_hashes = HG.client_controller.Read( 'hash_ids_to_hashes', hash_ids = hash_ids ) # maintain sort - body_dict = { 'hashes' : [ hash_ids_to_hashes[ hash_id ].hex() for hash_id in hash_ids ] } + body_dict = { 'hashes' : [ hash_ids_to_hashes[ hash_id ].hex() for hash_id in hash_ids ], 'file_ids' : list( hash_ids ) } else: From 33187d756113ad493c15a10956b17e55839b3a1a Mon Sep 17 00:00:00 2001 From: thatfuckingbird <67429906+thatfuckingbird@users.noreply.github.com> Date: Sat, 26 Mar 2022 18:42:25 +0100 Subject: [PATCH 2/3] api: accept file ids everywhere where hashes are accepted --- docs/developer_api.md | 12 ++++++++++++ .../networking/ClientLocalServerResources.py | 18 ++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/docs/developer_api.md b/docs/developer_api.md index 13d7db09..37f7a870 100644 --- a/docs/developer_api.md +++ b/docs/developer_api.md @@ -322,6 +322,8 @@ Arguments (in JSON): : * `hash`: (an SHA256 hash for a file in 64 characters of hexadecimal) * `hashes`: (a list of SHA256 hashes) +* `file_id`: (a numerical file id) +* `file_ids`: (a list of numerical file ids) * `file_service_name`: (optional, selective, string, the local file domain from which to delete, or all local files) * `file_service_key`: (optional, selective, hexadecimal, the local file domain from which to delete, or all local files) * `reason`: (optional, string, the reason attached to the delete action) @@ -352,6 +354,8 @@ Arguments (in JSON): : * `hash`: (an SHA256 hash for a file in 64 characters of hexadecimal) * `hashes`: (a list of SHA256 hashes) +* `file_id`: (a numerical file id) +* `file_ids`: (a list of numerical file ids) * `file_service_name`: (optional, selective, string, the local file domain to which to undelete) * `file_service_key`: (optional, selective, hexadecimal, the local file domain to which to undelete) @@ -382,6 +386,8 @@ Arguments (in JSON): : * `hash`: (an SHA256 hash for a file in 64 characters of hexadecimal) * `hashes`: (a list of SHA256 hashes) +* `file_id`: (a numerical file id) +* `file_ids`: (a list of numerical file ids) ```json title="Example request body" {"hash": "78f92ba4a786225ee2a1236efa6b7dc81dd729faf4af99f96f3e20bad6d8b538"} @@ -410,6 +416,8 @@ Arguments (in JSON): : * `hash`: (an SHA256 hash for a file in 64 characters of hexadecimal) * `hashes`: (a list of SHA256 hashes) +* `file_id`: (a numerical file id) +* `file_ids`: (a list of numerical file ids) ```json title="Example request body" {"hash": "78f92ba4a786225ee2a1236efa6b7dc81dd729faf4af99f96f3e20bad6d8b538"} @@ -544,6 +552,8 @@ Arguments (in JSON): : * `hash`: (selective A, an SHA256 hash for a file in 64 characters of hexadecimal) * `hashes`: (selective A, a list of SHA256 hashes) +* `file_id`: (a numerical file id) +* `file_ids`: (a list of numerical file ids) * `service_names_to_tags`: (selective B, an Object of service names to lists of tags to be 'added' to the files) * `service_keys_to_tags`: (selective B, an Object of service keys to lists of tags to be 'added' to the files) * `service_names_to_actions_to_tags`: (selective B, an Object of service names to content update actions to lists of tags) @@ -816,6 +826,8 @@ Arguments (in JSON): * `urls_to_delete`: (a list of urls you want to disassociate from the file(s)) * `hash`: (an SHA256 hash for a file in 64 characters of hexadecimal) * `hashes`: (a list of SHA256 hashes) + * `file_id`: (a numerical file id) + * `file_ids`: (a list of numerical file ids) All of these are optional, but you obviously need to have at least one of `url` arguments and one of the `hash` arguments. The single/multiple arguments work the same--just use whatever is convenient for you. Unless you really know what you are doing with URL Classes, I strongly recommend you stick to associating URLs with just one single 'hash' at a time. Multiple hashes pointing to the same URL is unusual and frequently unhelpful. ```json title="Example request body" diff --git a/hydrus/client/networking/ClientLocalServerResources.py b/hydrus/client/networking/ClientLocalServerResources.py index a253f225..7396221a 100644 --- a/hydrus/client/networking/ClientLocalServerResources.py +++ b/hydrus/client/networking/ClientLocalServerResources.py @@ -446,6 +446,24 @@ def ParseHashes( request: HydrusServerRequest.HydrusRequest ): hashes.update( more_hashes ) + if 'file_id' in request.parsed_request_args: + + hash_id = request.parsed_request_args.GetValue( 'file_id', int ) + + hash_ids_to_hashes = HG.client_controller.Read( 'hash_ids_to_hashes', hash_ids = [ hash_id ] ) + + hashes.update( hash_ids_to_hashes.values() ) + + + if 'file_ids' in request.parsed_request_args: + + hash_ids = request.parsed_request_args.GetValue( 'file_ids', list, expected_list_type = int ) + + hash_ids_to_hashes = HG.client_controller.Read( 'hash_ids_to_hashes', hash_ids = hash_ids ) + + hashes.update( hash_ids_to_hashes.values() ) + + CheckHashLength( hashes ) return hashes From 2a352104dedafe3cd90b0e036ad94623a8a5b4a1 Mon Sep 17 00:00:00 2001 From: thatfuckingbird <67429906+thatfuckingbird@users.noreply.github.com> Date: Sat, 26 Mar 2022 19:01:26 +0100 Subject: [PATCH 3/3] consistency: accept 'file_id' and 'hash' where 'file_ids'/'hashes' is accepted --- docs/developer_api.md | 4 +++ .../networking/ClientLocalServerResources.py | 36 ++++++++++++++++--- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/docs/developer_api.md b/docs/developer_api.md index 37f7a870..f509a0ae 100644 --- a/docs/developer_api.md +++ b/docs/developer_api.md @@ -1176,7 +1176,9 @@ Required Headers: Arguments (in JSON): : * `page_key`: (the page key for the page you wish to add files to) + * `file_id`: (selective, a numerical file id) * `file_ids`: (selective, a list of numerical file ids) + * `hash`: (selective, a hexadecimal SHA256 hash) * `hashes`: (selective, a list of hexadecimal SHA256 hashes) You need to use either file_ids or hashes. The files they refer to will be appended to the given page, just like a thumbnail drag and drop operation. The page key is the same as fetched in the [/manage\_pages/get\_pages](#manage_pages_get_pages) call. @@ -1407,7 +1409,9 @@ Required Headers: n/a Arguments (in percent-encoded JSON): : + * `file_id`: (selective, a numerical file id) * `file_ids`: (selective, a list of numerical file ids) + * `hash`: (selective, a hexadecimal SHA256 hash) * `hashes`: (selective, a list of hexadecimal SHA256 hashes) * `only_return_identifiers`: true or false (optional, defaulting to false) * `detailed_url_information`: true or false (optional, defaulting to false) diff --git a/hydrus/client/networking/ClientLocalServerResources.py b/hydrus/client/networking/ClientLocalServerResources.py index 7396221a..b746d1be 100644 --- a/hydrus/client/networking/ClientLocalServerResources.py +++ b/hydrus/client/networking/ClientLocalServerResources.py @@ -2196,9 +2196,15 @@ class HydrusResourceClientAPIRestrictedGetFilesFileMetadata( HydrusResourceClien try: - if 'file_ids' in request.parsed_request_args: + if 'file_ids' in request.parsed_request_args or 'file_id' in request.parsed_request_args: - file_ids = request.parsed_request_args.GetValue( 'file_ids', list, expected_list_type = int ) + if 'file_ids' in request.parsed_request_args: + + file_ids = request.parsed_request_args.GetValue( 'file_ids', list, expected_list_type = int ) + + else: + + file_ids = [ request.parsed_request_args.GetValue( 'file_id', int ) ] request.client_api_permissions.CheckPermissionToSeeFiles( file_ids ) @@ -2211,11 +2217,17 @@ class HydrusResourceClientAPIRestrictedGetFilesFileMetadata( HydrusResourceClien media_results = HG.client_controller.Read( 'media_results_from_ids', file_ids, sorted = True ) - elif 'hashes' in request.parsed_request_args: + elif 'hashes' in request.parsed_request_args or 'hash' in request.parsed_request_args: request.client_api_permissions.CheckCanSeeAllFiles() - hashes = request.parsed_request_args.GetValue( 'hashes', list, expected_list_type = bytes ) + if 'hashes' in request.parsed_request_args: + + hashes = request.parsed_request_args.GetValue( 'hashes', list, expected_list_type = bytes ) + + else: + + hashes = [ request.parsed_request_args.GetValue( 'hash', bytes ) ] CheckHashLength( hashes ) @@ -2742,7 +2754,15 @@ class HydrusResourceClientAPIRestrictedManagePagesAddFiles( HydrusResourceClient page_key = request.parsed_request_args.GetValue( 'page_key', bytes ) - if 'hashes' in request.parsed_request_args: + if 'hash' in request.parsed_request_args: + + hashes = [ request.parsed_request_args.GetValue( 'hash', bytes ) ] + + CheckHashLength( hashes ) + + media_results = HG.client_controller.Read( 'media_results', hashes, sorted = True ) + + elif 'hashes' in request.parsed_request_args: hashes = request.parsed_request_args.GetValue( 'hashes', list, expected_list_type = bytes ) @@ -2750,6 +2770,12 @@ class HydrusResourceClientAPIRestrictedManagePagesAddFiles( HydrusResourceClient media_results = HG.client_controller.Read( 'media_results', hashes, sorted = True ) + elif 'file_id' in request.parsed_request_args: + + hash_ids = [ request.parsed_request_args.GetValue( 'file_id', int ) ] + + media_results = HG.client_controller.Read( 'media_results_from_ids', hash_ids, sorted = True ) + elif 'file_ids' in request.parsed_request_args: hash_ids = request.parsed_request_args.GetValue( 'file_ids', list, expected_list_type = int )