diff --git a/IDHANServer/src/ConnectionArguments.hpp b/IDHANServer/src/ConnectionArguments.hpp index abd3a66..38cfc6a 100644 --- a/IDHANServer/src/ConnectionArguments.hpp +++ b/IDHANServer/src/ConnectionArguments.hpp @@ -22,6 +22,8 @@ struct ConnectionArguments std::string user { "idhan" }; std::string password { "" }; bool testmode { false }; + //! If true then the server will use stdout to log things. + bool use_stdout { false }; std::string format() const; }; diff --git a/IDHANServer/src/ServerContext.cpp b/IDHANServer/src/ServerContext.cpp index 25ed627..541def2 100644 --- a/IDHANServer/src/ServerContext.cpp +++ b/IDHANServer/src/ServerContext.cpp @@ -4,6 +4,13 @@ #include "ServerContext.hpp" +#include +#include +#include +#include + +#include + #include "ConnectionArguments.hpp" #include "NET_CONSTANTS.hpp" #include "api/helpers/ResponseCallback.hpp" @@ -15,19 +22,21 @@ namespace idhan { -void ServerContext::setupCORSSupport() +void ServerContext::setupCORSSupport() const { drogon::app().registerPreRoutingAdvice( - []( const drogon::HttpRequestPtr& request, drogon::FilterCallback&& stop, drogon::FilterChainCallback&& pass ) + [ this ]( + const drogon::HttpRequestPtr& request, drogon::FilterCallback&& stop, drogon::FilterChainCallback&& pass ) { - log::debug( "{}:{}", request->getMethodString(), request->getPath() ); + // if ( args.testmode ) log::info( "Handling query: {}:{}", request->getMethodString(), request->getPath() ); + if ( !request->path().starts_with( "/hyapi" ) || request->method() != drogon::Options ) { pass(); return; } - auto response { drogon::HttpResponse::newHttpResponse() }; + const auto response { drogon::HttpResponse::newHttpResponse() }; response->addHeader( "Access-Control-Allow-Headers", "*" ); response->addHeader( "Access-Control-Allow-Origin", "*" ); @@ -57,26 +66,72 @@ void exceptionHandler( const std::exception& e, const drogon::HttpRequestPtr& re // drogon::defaultExceptionHandler( e, request, std::move( callback ) ); } -ServerContext::ServerContext( const ConnectionArguments& arguments ) : - m_postgresql_management( std::make_unique< ManagementConnection >( arguments ) ) +std::shared_ptr< spdlog::logger > ServerContext::createLogger( const ConnectionArguments& arguments ) { - log::server::info( "IDHAN initalization starting" ); + constexpr std::string_view server_format_str { "[%Y-%m-%d %H:%M:%S.%e] [SERVER] [%^%l%$] [thread %t] %v" }; - auto& app { drogon::app() }; + // stdout log disabled + if ( !arguments.use_stdout ) + { + auto file_logger { spdlog::rotating_logger_mt( "file_logger", "./log/info.log", 1048576 * 10, 512, true ) }; + file_logger->set_level( spdlog::level::info ); + + file_logger->set_pattern( std::string( server_format_str ) ); + + spdlog::set_default_logger( file_logger ); + + return file_logger; + } + else + { + auto file_logger { + std::make_shared< spdlog::sinks::rotating_file_sink_mt >( "./log/info.log", 1048576 * 10, 512, true ) + }; + + auto stdout_logger { std::make_shared< spdlog::sinks::stdout_color_sink_mt >() }; + + auto logger { + std::make_shared< spdlog::logger >( "multi_sink", spdlog::sinks_init_list { stdout_logger, file_logger } ) + }; + + logger->set_pattern( std::string( server_format_str ) ); + + spdlog::set_default_logger( logger ); + return logger; + } +} + +ServerContext::ServerContext( const ConnectionArguments& arguments ) : + m_logger( createLogger( arguments ) ), + m_postgresql_management( std::make_unique< ManagementConnection >( arguments ) ), + args( arguments ) +{ + log::server::info( "IDHAN initialization starting" ); spdlog::enable_backtrace( 32 ); log::debug( "Logging show debug" ); log::info( "Logging show info" ); + std::size_t hardware_count { std::min( static_cast< std::size_t >( std::thread::hardware_concurrency() ), 4ul ) }; + std::size_t rest_count { hardware_count / 4 }; + std::size_t db_count { hardware_count }; + + constexpr std::string_view log_directory { "./log/drogon" }; + + std::filesystem::create_directories( log_directory ); + drogon::app() .setLogPath( "./" ) .setLogLevel( trantor::Logger::kInfo ) .addListener( "127.0.0.1", IDHAN_DEFAULT_PORT ) - .setThreadNum( 16 ) + .setThreadNum( rest_count ) .setClientMaxBodySize( std::numeric_limits< std::uint64_t >::max() ) .setDocumentRoot( "./pages" ) - .setExceptionHandler( exceptionHandler ); + .setExceptionHandler( exceptionHandler ) + .setLogPath( std::string( log_directory ), "", 1024 * 1024 * 1024, 8, true ); + + trantor::Logger::enableSpdLog( m_logger ); drogon::orm::PostgresConfig config; config.host = arguments.hostname; @@ -84,7 +139,7 @@ ServerContext::ServerContext( const ConnectionArguments& arguments ) : config.databaseName = arguments.dbname; config.username = arguments.user; config.password = arguments.password; - config.connectionNumber = std::thread::hardware_concurrency(); + config.connectionNumber = db_count; config.name = "default"; config.isFast = false; config.characterSet = "UTF-8"; @@ -93,7 +148,8 @@ ServerContext::ServerContext( const ConnectionArguments& arguments ) : if ( arguments.testmode ) { - config.connectOptions.emplace( std::make_pair( "searchpatch", "test" ) ); + log::warn( "Connecting to database using test mode!" ); + config.connectOptions.emplace( std::make_pair( "search_path", "test" ) ); } drogon::app().addDbClient( config ); diff --git a/IDHANServer/src/ServerContext.hpp b/IDHANServer/src/ServerContext.hpp index ac7fc05..48119b8 100644 --- a/IDHANServer/src/ServerContext.hpp +++ b/IDHANServer/src/ServerContext.hpp @@ -3,24 +3,30 @@ // #pragma once +#include + #include #include +#include "ConnectionArguments.hpp" + namespace idhan { -struct ConnectionArguments; class ManagementConnection; class ServerContext { + std::shared_ptr< spdlog::logger > m_logger; //! Connection to postgresql to be used for management/setup std::unique_ptr< ManagementConnection > m_postgresql_management; + ConnectionArguments args; public: ServerContext() = delete; - void setupCORSSupport(); + void setupCORSSupport() const; + static std::shared_ptr< spdlog::logger > createLogger( const ConnectionArguments& arguments ); ServerContext( const ConnectionArguments& arguments ); void run(); diff --git a/IDHANServer/src/api/IDHANTagAPI.hpp b/IDHANServer/src/api/IDHANTagAPI.hpp index 3a8728c..0deb9c5 100644 --- a/IDHANServer/src/api/IDHANTagAPI.hpp +++ b/IDHANServer/src/api/IDHANTagAPI.hpp @@ -34,22 +34,22 @@ class IDHANTagAPI : public drogon::HttpController< IDHANTagAPI > METHOD_LIST_BEGIN - ADD_METHOD_TO( IDHANTagAPI::getTagInfo, "/tag/{tag_id}/info" ); - ADD_METHOD_TO( IDHANTagAPI::getTagInfo, "/tag/info?tag_id={1}" ); - ADD_METHOD_TO( IDHANTagAPI::getTagInfo, "/tag/info?tag_ids={1}" ); + ADD_METHOD_TO( IDHANTagAPI::getTagInfo, "/tags/{tag_id}/info" ); + ADD_METHOD_TO( IDHANTagAPI::getTagInfo, "/tags/info?tag_id={1}" ); + ADD_METHOD_TO( IDHANTagAPI::getTagInfo, "/tags/info?tag_ids={1}" ); - ADD_METHOD_TO( IDHANTagAPI::search, "/tag/search?tag={1}" ); - // ADD_METHOD_TO( IDHANTagAPI::autocomplete, "/tag/autocomplete?tag={1}" ); + ADD_METHOD_TO( IDHANTagAPI::search, "/tags/search?tag={1}" ); + // ADD_METHOD_TO( IDHANTagAPI::autocomplete, "/tags/autocomplete?tag={1}" ); - ADD_METHOD_TO( IDHANTagAPI::createTagRouter, "/tag/create" ); + ADD_METHOD_TO( IDHANTagAPI::createTagRouter, "/tags/create" ); - ADD_METHOD_TO( IDHANTagAPI::createTagDomain, "/tag/domain/create" ); - ADD_METHOD_TO( IDHANTagAPI::getTagDomains, "/tag/domain/list" ); - ADD_METHOD_TO( IDHANTagAPI::getTagDomainInfo, "/tag/domain/{domain_id}/info" ); - ADD_METHOD_TO( IDHANTagAPI::deleteTagDomain, "/tag/domain/{domain_id}/delete" ); + ADD_METHOD_TO( IDHANTagAPI::createTagDomain, "/tags/domain/create" ); + ADD_METHOD_TO( IDHANTagAPI::getTagDomains, "/tags/domain/list" ); + ADD_METHOD_TO( IDHANTagAPI::getTagDomainInfo, "/tags/domain/{domain_id}/info" ); + ADD_METHOD_TO( IDHANTagAPI::deleteTagDomain, "/tags/domain/{domain_id}/delete" ); - ADD_METHOD_TO( IDHANTagAPI::createTagParents, "/tag/parents/create" ); - ADD_METHOD_TO( IDHANTagAPI::createTagAliases, "/tag/alias/create" ); + ADD_METHOD_TO( IDHANTagAPI::createTagParents, "/tags/parents/create" ); + ADD_METHOD_TO( IDHANTagAPI::createTagAliases, "/tags/alias/create" ); METHOD_LIST_END }; diff --git a/IDHANServer/src/api/helpers/helpers.hpp b/IDHANServer/src/api/helpers/helpers.hpp index bf2b767..27198ef 100644 --- a/IDHANServer/src/api/helpers/helpers.hpp +++ b/IDHANServer/src/api/helpers/helpers.hpp @@ -4,6 +4,7 @@ #pragma once #include +#include #include "IDHANTypes.hpp" #include "drogon/HttpResponse.h" @@ -15,4 +16,40 @@ std::expected< TagDomainID, drogon::HttpResponsePtr > getTagDomainID( drogon::Ht void addFileCacheHeader( drogon::HttpResponsePtr sharedPtr ); +std::string pgEscape( const std::string& s ); + +template < typename T > +std::string pgArrayify( const std::vector< T >& vec ) +{ + std::string data { "{" }; + data.reserve( vec.size() * 8 ); + + std::size_t counter { 0 }; + + for ( const auto& v : vec ) + { + if constexpr ( std::same_as< T, std::string > ) + { + data += pgEscape( v ); + } + else if constexpr ( std::is_integral_v< T > ) + { + data += std::to_string( v ); + } + else + static_assert( false, "Unknown type for pgArraify" ); + + if ( counter < vec.size() - 1 ) + { + data += ","; + } + + counter += 1; + } + + data += "}"; + + return data; +} + } // namespace idhan::api::helpers diff --git a/IDHANServer/src/api/helpers/tags/namespaces.cpp b/IDHANServer/src/api/helpers/tags/namespaces.cpp index 45ea796..ba2beaf 100644 --- a/IDHANServer/src/api/helpers/tags/namespaces.cpp +++ b/IDHANServer/src/api/helpers/tags/namespaces.cpp @@ -23,19 +23,23 @@ drogon::Task< std::optional< NamespaceID > > searchNamespace( const std::string& drogon::Task< std::expected< NamespaceID, drogon::HttpResponsePtr > > findOrCreateNamespace( const std::string& str, drogon::orm::DbClientPtr db ) { - const auto id_search { co_await searchNamespace( str, db ) }; - if ( id_search.has_value() ) - { - co_return id_search.value(); + NamespaceID namespace_id { 0 }; + + do { + if ( const auto id_search = co_await searchNamespace( str, db ); id_search.has_value() ) + { + co_return id_search.value(); + } + + const auto id_creation { co_await db->execSqlCoro( + "INSERT INTO tag_namespaces (namespace_text) VALUES ($1) ON CONFLICT DO NOTHING RETURNING namespace_id", + str ) }; + + if ( id_creation.size() > 0 ) namespace_id = id_creation[ 0 ][ 0 ].as< NamespaceID >(); } + while ( namespace_id == 0 ); - const auto id_creation { co_await db->execSqlCoro( - "INSERT INTO tag_namespaces (namespace_text) VALUES ($1) ON CONFLICT DO NOTHING RETURNING namespace_id", - str ) }; - - if ( id_creation.size() > 0 ) co_return id_creation[ 0 ][ 0 ].as< NamespaceID >(); - - co_return std::unexpected( createInternalError( "Failed to create namespace: {}", str ) ); + co_return namespace_id; } } // namespace idhan \ No newline at end of file diff --git a/IDHANServer/src/api/helpers/tags/subtags.cpp b/IDHANServer/src/api/helpers/tags/subtags.cpp index b21940d..8821562 100644 --- a/IDHANServer/src/api/helpers/tags/subtags.cpp +++ b/IDHANServer/src/api/helpers/tags/subtags.cpp @@ -21,32 +21,22 @@ drogon::Task< std::optional< SubtagID > > searchSubtag( const std::string& str, drogon::Task< std::expected< SubtagID, drogon::HttpResponsePtr > > findOrCreateSubtag( const std::string& str, const drogon::orm::DbClientPtr db ) { - const auto id_search { co_await searchSubtag( str, db ) }; - if ( id_search.has_value() ) - { - co_return id_search.value(); - } + SubtagID subtag_id { 0 }; + + do { + if ( const auto id_search = co_await searchSubtag( str, db ); id_search.has_value() ) + { + co_return id_search.value(); + } - try - { const auto id_creation { co_await db->execSqlCoro( "INSERT INTO tag_subtags (subtag_text) VALUES ($1) ON CONFLICT DO NOTHING RETURNING subtag_id", str ) }; - if ( id_creation.size() > 0 ) - { - co_return id_creation[ 0 ][ 0 ].as< SubtagID >(); - } - - const auto second_search { co_await searchSubtag( str, db ) }; - - if ( second_search.has_value() ) co_return second_search.value(); - - co_return std::unexpected( createInternalError( "Failed to create subtag: {}", str ) ); - } - catch ( std::exception& e ) - { - co_return std::unexpected( createInternalError( "Failed to create subtag: {}: {}", str, e.what() ) ); + if ( id_creation.size() > 0 ) subtag_id = id_creation[ 0 ][ 0 ].as< SubtagID >(); } + while ( subtag_id == 0 ); + + co_return subtag_id; } } // namespace idhan \ No newline at end of file diff --git a/IDHANServer/src/api/record/createRecord.cpp b/IDHANServer/src/api/record/createRecord.cpp index 7033d81..08a4a64 100644 --- a/IDHANServer/src/api/record/createRecord.cpp +++ b/IDHANServer/src/api/record/createRecord.cpp @@ -27,12 +27,12 @@ ResponseTask createRecordFromJson( const drogon::HttpRequestPtr req ) const Json::Value& json { *json_ptr }; auto db { drogon::app().getDbClient() }; - auto transaction { db->newTransaction() }; //test if sha256 is a list or 1 item const auto& sha256s { json[ "sha256" ] }; if ( sha256s.isArray() ) { + // auto transaction { db->newTransaction() }; Json::Value json_array {}; Json::ArrayIndex idx { 0 }; @@ -43,9 +43,11 @@ ResponseTask createRecordFromJson( const drogon::HttpRequestPtr req ) const auto& str { value.asString() }; // dehexify the string. - SHA256 sha256 { SHA256::fromHex( str ) }; - auto result { co_await createRecord( sha256, transaction ) }; + const auto expected_hash { SHA256::fromHex( str ) }; + if ( !expected_hash.has_value() ) co_return expected_hash.error(); + + const auto result { co_await createRecord( *expected_hash, db ) }; json_array[ idx++ ] = result; } @@ -55,8 +57,11 @@ ResponseTask createRecordFromJson( const drogon::HttpRequestPtr req ) else if ( sha256s.isString() ) // HEX string { Json::Value json_out {}; - const SHA256 sha256 { SHA256::fromHex( sha256s.asString() ) }; - const RecordID record_id { co_await createRecord( sha256, db ) }; + const auto sha256 { SHA256::fromHex( sha256s.asString() ) }; + + if ( !sha256.has_value() ) co_return sha256.error(); + + const RecordID record_id { co_await createRecord( *sha256, db ) }; json_out[ "record_id" ] = record_id; co_return drogon::HttpResponse::newHttpJsonResponse( json_out ); diff --git a/IDHANServer/src/api/record/searchHash.cpp b/IDHANServer/src/api/record/searchHash.cpp index 1a79731..e9140a2 100644 --- a/IDHANServer/src/api/record/searchHash.cpp +++ b/IDHANServer/src/api/record/searchHash.cpp @@ -20,7 +20,10 @@ drogon::Task< drogon::HttpResponsePtr > IDHANRecordAPI::searchHash( [[maybe_unus if ( hash_str.size() != expected_hash_size ) co_return createBadRequest( "Hash size was invalid, must be {}", expected_hash_size ); - const SHA256 hash { SHA256::fromHex( hash_str ) }; + const auto expected_hash { SHA256::fromHex( hash_str ) }; + if ( !expected_hash.has_value() ) co_return expected_hash.error(); + + const auto& hash { expected_hash.value() }; const auto db { drogon::app().getDbClient() }; diff --git a/IDHANServer/src/api/record/tags/addTags.cpp b/IDHANServer/src/api/record/tags/addTags.cpp index 5ad0dd5..c3a93d2 100644 --- a/IDHANServer/src/api/record/tags/addTags.cpp +++ b/IDHANServer/src/api/record/tags/addTags.cpp @@ -174,24 +174,19 @@ drogon::Task< drogon::HttpResponsePtr > IDHANRecordAPI:: if ( !tag_pairs.has_value() ) co_return tag_pairs.error(); - auto tag_pair_ids { co_await getIDsFromPairs( std::move( tag_pairs.value() ), transaction ) }; + const auto tag_pair_ids { co_await getIDsFromPairs( std::move( tag_pairs.value() ), transaction ) }; - for ( const TagID& tag : tag_pair_ids.value() ) - { - const auto tag_domain_id { helpers::getTagDomainID( request ) }; + if ( !tag_pair_ids.has_value() ) co_return tag_pair_ids.error(); - if ( !tag_domain_id.has_value() ) co_return tag_domain_id.error(); + const auto tag_domain_id { helpers::getTagDomainID( request ) }; - const auto tag_id { co_await getIDFromPair( tag, transaction ) }; + if ( !tag_domain_id.has_value() ) co_return tag_domain_id.error(); - if ( !tag_id.has_value() ) co_return tag_id.error(); - - const auto insert_result { co_await transaction->execSqlCoro( - "INSERT INTO tag_mappings (record_id, tag_id, domain_id) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", - record_id, - tag_id.value(), - tag_domain_id.value() ) }; - } + const auto insert_result { co_await transaction->execSqlCoro( + "INSERT INTO tag_mappings (record_id, tag_id, domain_id) VALUES ($1, UNNEST($2::INTEGER[]), $3) ON CONFLICT DO NOTHING", + record_id, + helpers::pgArrayify( tag_pair_ids.value() ), + tag_domain_id.value() ) }; co_return drogon::HttpResponse::newHttpResponse(); } @@ -206,19 +201,32 @@ drogon::Task< drogon::HttpResponsePtr > IDHANRecordAPI::addMultipleTags( drogon: const auto& json { *json_ptr }; - auto tag_pairs { co_await getTagPairs( json[ "tags" ] ) }; + if ( !json[ "records" ].isArray() ) + co_return createBadRequest( "Invalid json: Array of ids called 'records' must be present." ); - if ( !tag_pairs.has_value() ) co_return tag_pairs.error(); + const auto tag_domain_id { helpers::getTagDomainID( request ) }; - auto tag_pair_ids { co_await getIDsFromPairs( std::move( tag_pairs.value() ), transaction ) }; + if ( !tag_domain_id.has_value() ) co_return tag_domain_id.error(); - const auto records_json { json[ "records" ] }; + const auto domain_search { + co_await db + ->execSqlCoro( "SELECT tag_domain_id FROM tag_domains WHERE tag_domain_id = $1", tag_domain_id.value() ) + }; - for ( const TagID tag_id : tag_pair_ids.value() ) + if ( domain_search.empty() ) co_return createBadRequest( "Invalid domain ID given" ); + + const auto& records_json { json[ "records" ] }; + + // This list of tags is applied to all records. If it's null then there is no tags to apply from it. + if ( const auto& tags_json = json[ "tags" ]; tags_json.isArray() ) { - const auto tag_domain_id { helpers::getTagDomainID( request ) }; + const auto tag_pairs { co_await getTagPairs( tags_json ) }; - if ( !tag_domain_id.has_value() ) co_return tag_domain_id.error(); + if ( !tag_pairs.has_value() ) co_return tag_pairs.error(); + + const auto tag_pair_ids { co_await getIDsFromPairs( std::move( tag_pairs.value() ), transaction ) }; + + if ( !tag_pair_ids.has_value() ) co_return tag_pair_ids.error(); for ( const auto& record_json : records_json ) { @@ -226,12 +234,51 @@ drogon::Task< drogon::HttpResponsePtr > IDHANRecordAPI::addMultipleTags( drogon: co_return createBadRequest( "Invalid json item in records list: Expected integral" ); const auto insert_result { co_await transaction->execSqlCoro( - "INSERT INTO tag_mappings (record_id, tag_id, domain_id) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", + "INSERT INTO tag_mappings (record_id, tag_id, domain_id) VALUES ($1, UNNEST($2::INTEGER[]), $3) ON CONFLICT DO NOTHING", static_cast< RecordID >( record_json.asInt64() ), - tag_id, + helpers::pgArrayify( tag_pair_ids.value() ), tag_domain_id.value() ) }; } } + else if ( !tags_json.isNull() ) + { + co_return createBadRequest( "Invalid json: Tags must be array or null (not present)" ); + } + + if ( const auto& sets_json = json[ "sets" ]; sets_json.isArray() ) + { + if ( sets_json.size() != records_json.size() ) + co_return createBadRequest( + "Sets vs Records size mismatch. Number of sets must match number of records. Got {} expected {}: Json: {}", + sets_json.size(), + records_json.size(), + json.toStyledString() ); + + for ( Json::ArrayIndex i = 0; i < sets_json.size(); ++i ) + { + // each set will be an array of tags + const auto tags { co_await getTagPairs( sets_json[ i ] ) }; + + if ( !tags.has_value() ) co_return tags.error(); + + const auto tag_ids { co_await getIDsFromPairs( std::move( tags.value() ), db ) }; + + if ( !tag_ids.has_value() ) co_return tag_ids.error(); + + // for ( const TagID tag_id : tag_ids.value() ) + // { + co_await transaction->execSqlCoro( + "INSERT INTO tag_mappings (record_id, tag_id, domain_id) VALUES ($1, unnest($2::INTEGER[]), $3) ON CONFLICT DO NOTHING", + static_cast< RecordID >( records_json[ i ].asInt64() ), + helpers::pgArrayify( tag_ids.value() ), + tag_domain_id.value() ); + // } + } + } + else if ( !sets_json.isNull() ) + { + co_return createBadRequest( "Invalid json: Sets must be array or null (not present)" ); + } co_return drogon::HttpResponse::newHttpResponse(); } diff --git a/IDHANServer/src/api/tagSearch.cpp b/IDHANServer/src/api/tagSearch.cpp index 8f299ed..0032da0 100644 --- a/IDHANServer/src/api/tagSearch.cpp +++ b/IDHANServer/src/api/tagSearch.cpp @@ -19,7 +19,7 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI:: if ( result.size() > 0 ) { const auto tag_id { result[ 0 ][ 0 ].as< TagID >() }; - log::debug( "Result of /tag/search?tag={}: {}", tag_text, tag_id ); + log::debug( "Result of /tags/search?tag={}: {}", tag_text, tag_id ); json[ "tag_id" ] = tag_id; json[ "found" ] = true; } diff --git a/IDHANServer/src/api/tags/createTag.cpp b/IDHANServer/src/api/tags/createTag.cpp index c42d4b4..3c644b4 100644 --- a/IDHANServer/src/api/tags/createTag.cpp +++ b/IDHANServer/src/api/tags/createTag.cpp @@ -91,11 +91,13 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI::createTagRouter( drogon::Ht if ( json_obj->isArray() ) { + log::debug( "Tag router creating multiple tags" ); co_return co_await createBatchedTag( request ); } if ( json_obj->isObject() ) { + log::debug( "Tag router creating single tag" ); co_return co_await createSingleTag( request ); } @@ -114,15 +116,27 @@ std::string pgEscape( const std::string& str ) for ( const auto& c : str ) { - if ( c == '}' ) cleaned.push_back( '\\' ); - if ( c == '{' ) cleaned.push_back( '\\' ); - if ( c == '\"' ) cleaned.push_back( '\\' ); - // if ( c == '\'' ) cleaned.push_back( '\'' ); - if ( c == '\\' ) cleaned.push_back( '\\' ); - if ( c == ',' ) contains_comma = true; - cleaned.push_back( c ); + switch ( c ) + { + case '}': + [[fallthrough]]; + case '{': + [[fallthrough]]; + case '\"': + [[fallthrough]]; + case '\\': + [[fallthrough]]; + cleaned.push_back( '\\' ); + case ',': + [[fallthrough]]; + contains_comma = contains_comma || ( c == ',' ); + default: + cleaned.push_back( c ); + } } + log::debug( "Escaped \'{}\' to \'{}\'", str, cleaned ); + if ( contains_comma ) return std::format( "\"{}\"", cleaned ); return cleaned; } @@ -156,16 +170,35 @@ drogon::Task< std::expected< std::vector< TagID >, drogon::HttpResponsePtr > > namespaces += "}"; subtags += "}"; - const auto result { co_await db->execSqlCoro( "SELECT createBatchedTag($1, $2)", namespaces, subtags ) }; + log::debug( "Namespace array string: {}", namespaces ); + log::debug( "Subtag array string: {}", subtags ); - if ( result.size() != tag_pairs.size() ) - co_return std::unexpected( createInternalError( - "Expected number of tag returns does not match {} == {}", result.size(), tag_pairs.size() ) ); + log::debug( "SubtagTESRJAIOWDKOAWKDarray string: {}", subtags ); std::vector< TagID > tag_ids {}; - tag_ids.reserve( result.size() ); - for ( const auto& tag : result ) tag_ids.emplace_back( tag[ 0 ].as< TagID >() ); + const auto search_path { co_await db->execSqlCoro( "SHOW search_path" ) }; + log::debug( "Search path: {}", search_path[ 0 ][ 0 ].as< std::string >() ); + + try + { + const auto result { co_await db->execSqlCoro( "SELECT createBatchedTag($1, $2)", namespaces, subtags ) }; + + log::debug( "Got {} tags returned", result.size() ); + + if ( result.size() != tag_pairs.size() ) + co_return std::unexpected( createInternalError( + "Expected number of tag returns does not match {} == {}", result.size(), tag_pairs.size() ) ); + + tag_ids.reserve( result.size() ); + + for ( const auto& tag : result ) tag_ids.emplace_back( tag[ 0 ].as< TagID >() ); + } + catch ( std::exception& e ) + { + co_return std:: + unexpected( createInternalError( "Failed to create tags using createBatchedTags(): {}", e.what() ) ); + } co_return tag_ids; } @@ -199,6 +232,8 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI::createBatchedTag( drogon::H tag_pairs.emplace_back( namespace_j.asString(), subtag_j.asString() ); } + log::debug( "Need to create {} tags", tag_pairs.size() ); + Json::Value out {}; Json::ArrayIndex index { 0 }; @@ -206,6 +241,8 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI::createBatchedTag( drogon::H if ( !result.has_value() ) co_return result.error(); + log::debug( "Create tags returned with {} tags", result.value().size() ); + for ( const auto& tag_id : result.value() ) { out[ index ][ "tag_id" ] = tag_id; @@ -224,7 +261,7 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI::createSingleTag( drogon::Ht { if ( request == nullptr ) { - log::error( "/tag/create: null request" ); + log::error( "/tags/create: null request" ); throw std::runtime_error( "Null request" ); } @@ -233,7 +270,7 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI::createSingleTag( drogon::Ht if ( input_json == nullptr ) { - log::error( "/tag/create: no json data" ); + log::error( "/tags/create: no json data" ); throw std::runtime_error( "No json data" ); } @@ -270,8 +307,15 @@ drogon::Task< drogon::HttpResponsePtr > IDHANTagAPI::createSingleTag( drogon::Ht if ( !namespace_id.has_value() ) co_return namespace_id.error(); if ( !subtag_id.has_value() ) co_return subtag_id.error(); + log::debug( "Got namespace id {} for {} ", namespace_id.value(), namespace_c.asString() ); + log::debug( "Got subtag id {} for {}", subtag_id.value(), subtag_c.asString() ); + const auto tag_id { co_await createTagID( namespace_id.value(), subtag_id.value(), db ) }; + if ( !tag_id.has_value() ) co_return tag_id.error(); + + log::debug( "Got tag id {} for tag ({}, {})", tag_id.value(), namespace_id.value(), subtag_id.value() ); + Json::Value json {}; json[ "namespace" ] = namespace_id.value(); diff --git a/IDHANServer/src/crypto/SHA256.cpp b/IDHANServer/src/crypto/SHA256.cpp index 4a21964..3ef2bfe 100644 --- a/IDHANServer/src/crypto/SHA256.cpp +++ b/IDHANServer/src/crypto/SHA256.cpp @@ -4,24 +4,13 @@ #include "SHA256.hpp" +#include #include #include +#include "api/helpers/createBadRequest.hpp" #include "fgl/defines.hpp" -namespace idhan -{ -inline drogon::orm::internal::SqlBinder& operator<<( drogon::orm::internal::SqlBinder& binder, idhan::SHA256&& sha256 ) -{ - const auto data { sha256.data() }; - std::vector< char > binary {}; - binary.resize( data.size() ); - std::memcpy( binary.data(), data.data(), data.size() ); - - return binder << binary; -} -} // namespace idhan - namespace idhan { @@ -37,7 +26,8 @@ SHA256::SHA256( const drogon::orm::Field& field ) { const auto data { field.as< std::vector< char > >() }; - assert( data.size() == m_data.size() ); + FGL_ASSERT( + data.size() == m_data.size(), std::format( "Invalid size. Expected {} got {}", m_data.size(), data.size() ) ); std::memcpy( m_data.data(), data.data(), data.size() ); } @@ -51,10 +41,12 @@ std::string SHA256::hex() const return str; } -SHA256 SHA256::fromHex( const std::string& str ) +std::expected< SHA256, drogon::HttpResponsePtr > SHA256::fromHex( const std::string& str ) { // 0xFF = 0b11111111 FGL_ASSERT( str.size() == ( 256 / 8 * 2 ), "Hex string must be exactly 64 characters log" ); + if ( str.size() != ( 256 / 8 * 2 ) ) + return std::unexpected( createBadRequest( "Hex string must be exactly 64 characters long" ) ); std::array< std::byte, ( 256 / 8 ) > bytes {}; diff --git a/IDHANServer/src/crypto/SHA256.hpp b/IDHANServer/src/crypto/SHA256.hpp index c96c299..0e39da3 100644 --- a/IDHANServer/src/crypto/SHA256.hpp +++ b/IDHANServer/src/crypto/SHA256.hpp @@ -6,11 +6,13 @@ #include +#include #include #include #include #include +#include #include #include @@ -54,7 +56,7 @@ class SHA256 std::string hex() const; //! Turns a HEX string into a SHA256 object. Str must be exactly (256 / 8) * 2, 64 characters long - static SHA256 fromHex( const std::string& str ); + static std::expected< SHA256, drogon::HttpResponsePtr > fromHex( const std::string& str ); static SHA256 fromBuffer( const std::vector< std::byte >& data ); inline static SHA256 hash( const std::vector< std::byte >& data ) { return hash( data.data(), data.size() ); } diff --git a/IDHANServer/src/db/setup/ManagementConnection.cpp b/IDHANServer/src/db/setup/ManagementConnection.cpp index 124c7d4..9ada9a6 100644 --- a/IDHANServer/src/db/setup/ManagementConnection.cpp +++ b/IDHANServer/src/db/setup/ManagementConnection.cpp @@ -30,7 +30,6 @@ ManagementConnection::ManagementConnection( const ConnectionArguments& arguments tx.exec( "DROP SCHEMA IF EXISTS test CASCADE" ); tx.exec( "CREATE SCHEMA test" ); tx.exec( "SET schema 'test'" ); - // This function is a NOOP unless a define is enabled for it. constexpr std::string_view schema { "test" }; db::updateMigrations( tx, schema ); } diff --git a/IDHANServer/src/db/setup/migration/12-url_map.sql b/IDHANServer/src/db/setup/migration/12-url_map.sql index b8b8148..1938239 100644 --- a/IDHANServer/src/db/setup/migration/12-url_map.sql +++ b/IDHANServer/src/db/setup/migration/12-url_map.sql @@ -1,6 +1,6 @@ CREATE TABLE url_map ( - record_id INTEGER, - url_id INTEGER REFERENCES urls (url_id), + record_id INTEGER REFERENCES records (record_id) NOT NULL, + url_id INTEGER REFERENCES urls (url_id) NOT NULL, UNIQUE (record_id, url_id) -- Simply to prevent duplicates of the same thing ); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/13-file_clusters.sql b/IDHANServer/src/db/setup/migration/13-file_clusters.sql index 6b80885..3e035b0 100644 --- a/IDHANServer/src/db/setup/migration/13-file_clusters.sql +++ b/IDHANServer/src/db/setup/migration/13-file_clusters.sql @@ -1,4 +1,4 @@ -CREATE TABLE file_clusters +CREATE TABLE IF NOT EXISTS file_clusters ( cluster_id SMALLSERIAL NOT NULL PRIMARY KEY, ratio_number SMALLINT NOT NULL DEFAULT 1, -- Ratio of the cluster ( Ratio / TotalRatio ) diff --git a/IDHANServer/src/db/setup/migration/14-file_meta.sql b/IDHANServer/src/db/setup/migration/14-file_meta.sql index 96b08cb..59633f1 100644 --- a/IDHANServer/src/db/setup/migration/14-file_meta.sql +++ b/IDHANServer/src/db/setup/migration/14-file_meta.sql @@ -1,8 +1,7 @@ CREATE TABLE file_metadata ( - cluster_id SMALLINT REFERENCES file_clusters (cluster_id), - record_id INTEGER, - file_size BIGINT, - obtained TIMESTAMP WITHOUT TIME ZONE, - UNIQUE (record_id) + cluster_id SMALLINT REFERENCES file_clusters (cluster_id) NOT NULL, + record_id INTEGER REFERENCES records (record_id) NOT NULL, + file_size BIGINT NOT NULL, + obtained TIMESTAMP WITHOUT TIME ZONE NOT NULL ); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/15-deleted_files.sql b/IDHANServer/src/db/setup/migration/15-deleted_files.sql index 376b41a..621f418 100644 --- a/IDHANServer/src/db/setup/migration/15-deleted_files.sql +++ b/IDHANServer/src/db/setup/migration/15-deleted_files.sql @@ -1,5 +1,5 @@ CREATE TABLE deleted_files ( - record_id INTEGER, - deleted_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP + record_id INTEGER UNIQUE REFERENCES records (record_id) NOT NULL, + deleted_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP ); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/16-file_notes.sql b/IDHANServer/src/db/setup/migration/16-file_notes.sql index 98c0af3..b0a1367 100644 --- a/IDHANServer/src/db/setup/migration/16-file_notes.sql +++ b/IDHANServer/src/db/setup/migration/16-file_notes.sql @@ -1,5 +1,5 @@ CREATE TABLE file_notes ( - record_id INTEGER, + record_id INTEGER UNIQUE REFERENCES records (record_id), note TEXT NOT NULL ); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/2-records.sql b/IDHANServer/src/db/setup/migration/2-records.sql index f67ddbe..7425326 100644 --- a/IDHANServer/src/db/setup/migration/2-records.sql +++ b/IDHANServer/src/db/setup/migration/2-records.sql @@ -1,23 +1,6 @@ CREATE TABLE records ( - record_id SERIAL NOT NULL, + record_id SERIAL PRIMARY KEY NOT NULL, sha256 BYTEA UNIQUE NOT NULL, - creation_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (record_id, sha256) -) PARTITION BY hash (sha256); -CREATE TABLE records_0 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 0); -CREATE TABLE records_1 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 1); -CREATE TABLE records_2 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 2); -CREATE TABLE records_3 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 3); -CREATE TABLE records_4 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 4); -CREATE TABLE records_5 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 5); -CREATE TABLE records_6 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 6); -CREATE TABLE records_7 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 7); -CREATE TABLE records_8 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 8); -CREATE TABLE records_9 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 9); -CREATE TABLE records_10 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 10); -CREATE TABLE records_11 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 11); -CREATE TABLE records_12 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 12); -CREATE TABLE records_13 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 13); -CREATE TABLE records_14 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 14); -CREATE TABLE records_15 PARTITION OF records FOR VALUES WITH (MODULUS 16, REMAINDER 15); \ No newline at end of file + creation_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP +); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/20-tag_mappings.sql b/IDHANServer/src/db/setup/migration/20-tag_mappings.sql index 72e49cd..c1126e3 100644 --- a/IDHANServer/src/db/setup/migration/20-tag_mappings.sql +++ b/IDHANServer/src/db/setup/migration/20-tag_mappings.sql @@ -1,6 +1,7 @@ CREATE TABLE tag_mappings ( - record_id INTEGER, + record_id INTEGER REFERENCES records (record_id), tag_id INTEGER REFERENCES tags (tag_id), - domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) -); \ No newline at end of file + domain_id SMALLINT REFERENCES tag_domains (tag_domain_id), + UNIQUE (record_id, tag_id, domain_id) +) PARTITION BY LIST (domain_id); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/23-file_info.sql b/IDHANServer/src/db/setup/migration/23-file_info.sql index 1a5e0d1..4800e84 100644 --- a/IDHANServer/src/db/setup/migration/23-file_info.sql +++ b/IDHANServer/src/db/setup/migration/23-file_info.sql @@ -1,8 +1,8 @@ CREATE TABLE file_info ( - record_id INTEGER NOT NULL, - size BIGINT NOT NULL, - mime_id INTEGER REFERENCES mime (mime_id) NOT NULL, + record_id INTEGER UNIQUE REFERENCES records (record_id) NOT NULL, + size BIGINT NOT NULL, + mime_id INTEGER REFERENCES mime (mime_id) NOT NULL, cluster_id SMALLINT REFERENCES file_clusters (cluster_id), -- Will be null if we have not obtained the file before. cluster_store_time TIMESTAMP WITHOUT TIME ZONE -- Will be set if the file has been stored in a cluster. ); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/25-tag_domains.sql b/IDHANServer/src/db/setup/migration/25-tag_domains.sql index fa98dc7..595f979 100644 --- a/IDHANServer/src/db/setup/migration/25-tag_domains.sql +++ b/IDHANServer/src/db/setup/migration/25-tag_domains.sql @@ -1,2 +1,81 @@ +CREATE OR REPLACE FUNCTION createTagMappingsDomain() RETURNS TRIGGER + language plpgsql +AS +$$ +DECLARE + domain_id INTEGER; +BEGIN + FOR domain_id IN SELECT tag_domain_id FROM tag_domains + LOOP + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = FORMAT('tag_mappings_%s', domain_id)) THEN + CONTINUE; + END IF; + + -- mappings + EXECUTE FORMAT( + 'CREATE TABLE IF NOT EXISTS tag_mappings_%s PARTITION OF tag_mappings FOR VALUES IN (%s)', + domain_id, domain_id); + -- aliases + EXECUTE FORMAT( + 'CREATE TABLE IF NOT EXISTS tag_aliases_%s PARTITION OF tag_aliases FOR VALUES IN (%s)', + domain_id, domain_id); + -- siblings + EXECUTE FORMAT( + 'CREATE TABLE IF NOT EXISTS tag_siblings_%s PARTITION OF tag_siblings FOR VALUES IN (%s)', + domain_id, domain_id); + -- parents + EXECUTE FORMAT( + 'CREATE TABLE IF NOT EXISTS tag_parents_%s PARTITION OF tag_parents FOR VALUES IN (%s)', + domain_id, domain_id); + end loop; + RETURN NEW; +END; +$$; + +-- CREATE OR REPLACE FUNCTION destroyTagMappingsDomain() RETURNS TRIGGER +-- language plpgsql +-- AS +-- $$ +-- DECLARE +-- domain_id INTEGER; +-- BEGIN +-- FOR domain_id IN SELECT tag_domain_id FROM tag_domains +-- LOOP +-- -- mappings +-- EXECUTE FORMAT( +-- 'DROP TABLE IF EXISTS tag_mappings_%s', +-- domain_id); +-- -- aliases +-- EXECUTE FORMAT( +-- 'DROP TABLE IF EXISTS tag_aliases_%s', +-- domain_id); +-- -- siblings +-- EXECUTE FORMAT( +-- 'DROP TABLE IF EXISTS tag_siblings_%s', +-- domain_id); +-- -- parents +-- EXECUTE FORMAT( +-- 'DROP TABLE IF EXISTS tag_parents_%s', +-- domain_id); +-- end loop; +-- +-- RETURN NEW; +-- END; +-- $$; + +CREATE TRIGGER tag_domain_insert + AFTER INSERT + ON tag_domains + REFERENCING NEW TABLE AS inserted + FOR EACH STATEMENT +EXECUTE FUNCTION createTagMappingsDomain(); + +-- CREATE TRIGGER tag_domain_delete +-- AFTER DELETE +-- ON tag_domains +-- REFERENCING OLD TABLE AS deleted +-- FOR EACH STATEMENT +-- EXECUTE FUNCTION destroyTagMappingsDomain(); + INSERT INTO tag_domains(domain_name) VALUES ('default'); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/7-tag_aliases.sql b/IDHANServer/src/db/setup/migration/7-tag_aliases.sql index 66d5c1d..847600d 100644 --- a/IDHANServer/src/db/setup/migration/7-tag_aliases.sql +++ b/IDHANServer/src/db/setup/migration/7-tag_aliases.sql @@ -1,7 +1,7 @@ CREATE TABLE tag_aliases ( - alias_id INTEGER REFERENCES tags (tag_id), - aliased_id INTEGER REFERENCES tags (tag_id), - domain_id SMALLINT REFERENCES tag_domains (tag_domain_id), - UNIQUE (alias_id, aliased_id, domain_id) -); \ No newline at end of file + alias_id INTEGER REFERENCES tags (tag_id), + aliased_id INTEGER REFERENCES tags (tag_id), + domain_id SMALLINT REFERENCES tag_domains (tag_domain_id), + UNIQUE (alias_id, aliased_id, domain_id) +) PARTITION BY LIST (domain_id); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/8-tag_parents.sql b/IDHANServer/src/db/setup/migration/8-tag_parents.sql index e713a69..fe8682b 100644 --- a/IDHANServer/src/db/setup/migration/8-tag_parents.sql +++ b/IDHANServer/src/db/setup/migration/8-tag_parents.sql @@ -4,4 +4,4 @@ CREATE TABLE tag_parents child_id INTEGER REFERENCES tags (tag_id), domain_id SMALLINT REFERENCES tag_domains (tag_domain_id), UNIQUE (parent_id, child_id, domain_id) -); \ No newline at end of file +) PARTITION BY LIST (domain_id); \ No newline at end of file diff --git a/IDHANServer/src/db/setup/migration/9-tag_siblings.sql b/IDHANServer/src/db/setup/migration/9-tag_siblings.sql index 691ddc3..0a83adb 100644 --- a/IDHANServer/src/db/setup/migration/9-tag_siblings.sql +++ b/IDHANServer/src/db/setup/migration/9-tag_siblings.sql @@ -1,7 +1,7 @@ CREATE TABLE tag_siblings ( - older_id INTEGER REFERENCES tags (tag_id), - younger_id INTEGER REFERENCES tags (tag_id), - domain_id SMALLINT REFERENCES tag_domains (tag_domain_id), - UNIQUE (older_id, younger_id, domain_id) -); \ No newline at end of file + older_id INTEGER REFERENCES tags (tag_id), + younger_id INTEGER REFERENCES tags (tag_id), + domain_id SMALLINT REFERENCES tag_domains (tag_domain_id), + UNIQUE (older_id, younger_id, domain_id) +) PARTITION BY LIST (domain_id); \ No newline at end of file diff --git a/IDHANServer/src/hyapi/HyAPI.cpp b/IDHANServer/src/hyapi/HyAPI.cpp index 00dbb93..540ea44 100644 --- a/IDHANServer/src/hyapi/HyAPI.cpp +++ b/IDHANServer/src/hyapi/HyAPI.cpp @@ -6,9 +6,11 @@ #include "IDHANTypes.hpp" #include "api/IDHANSearchAPI.hpp" +#include "api/helpers/createBadRequest.hpp" #include "constants/SearchOrder.hpp" #include "constants/hydrus_version.hpp" #include "core/SearchBuilder.hpp" +#include "crypto/SHA256.hpp" #include "fixme.hpp" #include "logging/log.hpp" #include "versions.hpp" @@ -16,7 +18,7 @@ namespace idhan::hyapi { -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::unsupported( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::unsupported( drogon::HttpRequestPtr request ) { Json::Value root; root[ "status" ] = 410; @@ -24,7 +26,7 @@ drogon::Task< drogon::HttpResponsePtr > HydrusAPI::unsupported( const drogon::Ht } // /hyapi/api_version -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::apiVersion( [[maybe_unused]] const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::apiVersion( [[maybe_unused]] drogon::HttpRequestPtr request ) { Json::Value json; json[ "version" ] = HYDRUS_MIMICED_API_VERSION; @@ -41,19 +43,19 @@ drogon::Task< drogon::HttpResponsePtr > HydrusAPI::apiVersion( [[maybe_unused]] } // /hyapi/access/request_new_permissions -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::requestNewPermissions( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::requestNewPermissions( drogon::HttpRequestPtr request ) { idhan::fixme(); } // /hyapi/access/session_key -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::sessionKey( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::sessionKey( drogon::HttpRequestPtr request ) { idhan::fixme(); } // /hyapi/access/verify_access_key -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::verifyAccessKey( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::verifyAccessKey( drogon::HttpRequestPtr request ) { Json::Value json; json[ "basic_permissions" ] = 0; @@ -64,17 +66,17 @@ drogon::Task< drogon::HttpResponsePtr > HydrusAPI::verifyAccessKey( const drogon co_return response; } -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::getService( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::getService( drogon::HttpRequestPtr request ) {} -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::getServices( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::getServices( drogon::HttpRequestPtr request ) {} -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::addFile( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::addFile( drogon::HttpRequestPtr request ) {} template < typename T > -T getDefaultedValue( const std::string name, const drogon::HttpRequestPtr& request, const T default_value ) +T getDefaultedValue( const std::string name, drogon::HttpRequestPtr request, const T default_value ) { return request->getOptionalParameter< T >( name ).value_or( default_value ); } @@ -134,16 +136,92 @@ drogon::Task< drogon::HttpResponsePtr > HydrusAPI::searchFiles( drogon::HttpRequ std::string query { builder.construct() }; } -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::fileHashes( const drogon::HttpRequestPtr& request ) +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::fileHashes( drogon::HttpRequestPtr request ) {} -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::fileMetadata( const drogon::HttpRequestPtr& request ) -{} +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::fileMetadata( drogon::HttpRequestPtr request ) +{ + const auto file_ids { request->getOptionalParameter< std::string >( "file_ids" ) }; + if ( !file_ids.has_value() ) co_return createBadRequest( "Must provide file_ids array" ); -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::file( const drogon::HttpRequestPtr& request ) -{} + std::string file_ids_str { file_ids.value() }; + //file_ids will be in a json string format + std::vector< RecordID > record_ids {}; + file_ids_str = file_ids_str.substr( 1, file_ids_str.size() - 2 ); // cut off the [] + while ( !file_ids_str.empty() ) + { + const auto end_itter { file_ids_str.find_first_of( ',' ) }; + if ( end_itter == std::string::npos ) + { + record_ids.push_back( std::stoi( file_ids_str ) ); + file_ids_str.clear(); + } + else + { + record_ids.push_back( std::stoi( file_ids_str.substr( 0, end_itter ) ) ); + file_ids_str = file_ids_str.substr( end_itter + 1 ); + } + } -drogon::Task< drogon::HttpResponsePtr > HydrusAPI::thumbnail( const drogon::HttpRequestPtr& request ) -{} + // we've gotten all the ids. For now we'll just return them + Json::Value metadata {}; + + auto db { drogon::app().getDbClient() }; + + for ( const auto& id : record_ids ) + { + Json::Value data {}; + + const auto hash_result { co_await db->execSqlCoro( "SELECT sha256 FROM records WHERE record_id = $1", id ) }; + + data[ "file_id" ] = id; + const SHA256 sha256 { hash_result[ 0 ][ "sha256" ] }; + data[ "hash" ] = sha256.hex(); + + metadata.append( std::move( data ) ); + } + + Json::Value out {}; + out[ "metadata" ] = std::move( metadata ); + + co_return drogon::HttpResponse::newHttpJsonResponse( std::move( out ) ); +} + +/* +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::file( drogon::HttpRequestPtr request ) +{ + co_return drogon::HttpResponse::newHttpResponse(); +} +*/ + +drogon::Task< drogon::HttpResponsePtr > HydrusAPI::file( drogon::HttpRequestPtr request ) +{ + auto file_id { request->getOptionalParameter< RecordID >( "file_id" ) }; + const auto hash { request->getOptionalParameter< std::string >( "hash" ) }; + + if ( hash.has_value() ) + { + auto db { drogon::app().getDbClient() }; + const auto sha256 { SHA256::fromHex( hash.value() ) }; + + if ( !sha256.has_value() ) co_return sha256.error(); + + const auto record_result { + co_await db->execSqlCoro( "SELECT record_id FROM records WHERE sha256 = $1", sha256->toVec() ) + }; + + if ( record_result.empty() ) co_return createNotFound( "No record with hash {} found", hash.value() ); + + file_id = record_result[ 0 ][ "record_id" ].as< RecordID >(); + } + + if ( !file_id.has_value() && !hash.has_value() ) co_return createBadRequest( "No hash of file_id specified" ); + + const RecordID id { file_id.value() }; + + request->setPath( std::format( "/records/{}/file", id ) ); + + co_return co_await drogon::app().forwardCoro( request ); +} } // namespace idhan::hyapi diff --git a/IDHANServer/src/hyapi/HyAPI.hpp b/IDHANServer/src/hyapi/HyAPI.hpp index 1f4ab61..331dc6e 100644 --- a/IDHANServer/src/hyapi/HyAPI.hpp +++ b/IDHANServer/src/hyapi/HyAPI.hpp @@ -10,25 +10,25 @@ namespace idhan::hyapi { class HydrusAPI : public drogon::HttpController< HydrusAPI > { - drogon::Task< drogon::HttpResponsePtr > unsupported( const drogon::HttpRequestPtr& request ); + drogon::Task< drogon::HttpResponsePtr > unsupported( drogon::HttpRequestPtr request ); // Access management (access) - drogon::Task< drogon::HttpResponsePtr > apiVersion( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > requestNewPermissions( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > sessionKey( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > verifyAccessKey( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > getService( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > getServices( const drogon::HttpRequestPtr& request ); + drogon::Task< drogon::HttpResponsePtr > apiVersion( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > requestNewPermissions( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > sessionKey( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > verifyAccessKey( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > getService( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > getServices( drogon::HttpRequestPtr request ); // Importing and deleting files (import) - drogon::Task< drogon::HttpResponsePtr > addFile( const drogon::HttpRequestPtr& request ); + drogon::Task< drogon::HttpResponsePtr > addFile( drogon::HttpRequestPtr request ); // Searching and Fetching files (search) drogon::Task< drogon::HttpResponsePtr > searchFiles( drogon::HttpRequestPtr request ); - drogon::Task< drogon::HttpResponsePtr > fileHashes( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > fileMetadata( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > file( const drogon::HttpRequestPtr& request ); - drogon::Task< drogon::HttpResponsePtr > thumbnail( const drogon::HttpRequestPtr& request ); + drogon::Task< drogon::HttpResponsePtr > fileHashes( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > fileMetadata( drogon::HttpRequestPtr request ); + drogon::Task< drogon::HttpResponsePtr > file( drogon::HttpRequestPtr request ); +// drogon::Task< drogon::HttpResponsePtr > thumbnail( drogon::HttpRequestPtr request ); public: @@ -50,7 +50,7 @@ class HydrusAPI : public drogon::HttpController< HydrusAPI > ADD_METHOD_TO( HydrusAPI::fileHashes, "/hyapi/get_files/file_hashes", drogon::Get, HyAPIAuthName ); ADD_METHOD_TO( HydrusAPI::fileMetadata, "/hyapi/get_files/file_metadata", drogon::Get, HyAPIAuthName ); ADD_METHOD_TO( HydrusAPI::file, "/hyapi/get_files/file", drogon::Get, HyAPIAuthName ); - ADD_METHOD_TO( HydrusAPI::thumbnail, "/hyapi/get_files/thumbnail", drogon::Get, HyAPIAuthName ); + ADD_METHOD_TO( HydrusAPI::file, "/hyapi/get_files/thumbnail", drogon::Get, HyAPIAuthName ); ADD_METHOD_TO( HydrusAPI::unsupported, "/hyapi/get_files/file_path", drogon::Get, HyAPIAuthName ); // UNSUPPORTED ADD_METHOD_TO( HydrusAPI::unsupported, "/hyapi/get_files/thumbnail_path", drogon::Get, HyAPIAuthName ); // UNSUPPORTED diff --git a/IDHANServer/src/logging/log.hpp b/IDHANServer/src/logging/log.hpp index 2abedf8..756365e 100644 --- a/IDHANServer/src/logging/log.hpp +++ b/IDHANServer/src/logging/log.hpp @@ -18,37 +18,43 @@ namespace idhan::log { template < typename... Ts > -void trace( const std::string str, Ts&&... ts ) +void trace( const spdlog::format_string_t< Ts... > str, Ts&&... ts ) { ::spdlog::trace( str, std::forward< Ts >( ts )... ); } template < typename... Ts > -void debug( const std::string str, Ts&&... ts ) +void debug( const ::spdlog::format_string_t< Ts... > str, Ts&&... ts ) { ::spdlog::debug( str, std::forward< Ts >( ts )... ); } template < typename... Ts > -void info( const std::string str, Ts&&... ts ) +void info( const spdlog::format_string_t< Ts... > str, Ts&&... ts ) { ::spdlog::info( str, std::forward< Ts >( ts )... ); } template < typename... Ts > -void warn( const std::string str, Ts&&... ts ) +void warn( const spdlog::format_string_t< Ts... > str, Ts&&... ts ) { ::spdlog::warn( str, std::forward< Ts >( ts )... ); } template < typename... Ts > -void error( const std::string str, Ts&&... ts ) +void error( const spdlog::format_string_t< Ts... > str, Ts&&... ts ) { ::spdlog::error( str, std::forward< Ts >( ts )... ); } +template < typename T > +void error( const T& val ) +{ + ::spdlog::error( val ); +} + template < typename... Ts > -void critical( const std::string str, Ts&&... ts ) +void critical( const spdlog::format_string_t< Ts... > str, Ts&&... ts ) { ::spdlog::critical( str, std::forward< Ts >( ts )... ); } @@ -56,9 +62,9 @@ void critical( const std::string str, Ts&&... ts ) namespace server { template < typename... Ts > -void info( const std::string str, Ts&&... ts ) +void info( const spdlog::format_string_t< Ts... > str, Ts&&... ts ) { - ::spdlog::info( std::format( "[SERVER]: {}", str ), std::forward< Ts >( ts )... ); + ::spdlog::info( str, std::forward< Ts >( ts )... ); } } // namespace server diff --git a/IDHANServer/src/main.cpp b/IDHANServer/src/main.cpp index a573071..e31bdbd 100644 --- a/IDHANServer/src/main.cpp +++ b/IDHANServer/src/main.cpp @@ -2,6 +2,8 @@ // Created by kj16609 on 7/23/24. // +#include + #include #include "ConnectionArguments.hpp" @@ -10,24 +12,95 @@ int main( int argc, char** argv ) { + QCommandLineParser parser {}; + parser.addHelpOption(); + parser.addVersionOption(); + +#ifndef NDEBUG + + QCommandLineOption testmode_option { "testmode", "Enables testmode if present" }; + parser.addOption( testmode_option ); + +#endif + + QCommandLineOption use_stdout_option { "use_stdout", + "Enables the logger to output to stdout (Default: 1)", + "flag" }; + use_stdout_option.setDefaultValue( "1" ); + parser.addOption( use_stdout_option ); + + QCommandLineOption log_level { "log_level", + "Dictates the log level used (trace, debug, info, warning, error, critical", + "level" }; + log_level.setDefaultValue( "info" ); + parser.addOption( log_level ); + + QCommandLineOption pg_user { "pg_user", "The user to connect to the database with (default: 'idhan')", "pg_user" }; + pg_user.setDefaultValue( "idhan" ); + parser.addOption( pg_user ); + + QCommandLineOption pg_host { "pg_host", + "The host to connect to the database with (default: 'localhost')", + "pg_host" }; + pg_host.setDefaultValue( "localhost" ); + parser.addOption( pg_host ); + + QCoreApplication app { argc, argv }; + app.setApplicationName( "IDHAN" ); + + parser.process( app ); + spdlog::set_level( spdlog::level::debug ); idhan::ConnectionArguments arguments {}; - //arguments.hydrus_info.hydrus_db_path = "/home/kj16609/.local/share/hydrus/db/"; - arguments.user = "idhan"; - arguments.hostname = "localhost"; + arguments.user = parser.value( pg_user ).toStdString(); + arguments.hostname = parser.value( pg_host ).toStdString(); - for ( std::size_t i = 0; i < argc; ++i ) + if ( !parser.isSet( "log_level" ) ) { - idhan::log::debug( "{}: {}", i, argv[ i ] ); - if ( argv[ i ] == "--testmode" ) arguments.testmode = true; + spdlog::set_level( spdlog::level::info ); + } + else + { + const auto level { parser.value( log_level ).toStdString() }; + + if ( level == "trace" ) + spdlog::set_level( spdlog::level::trace ); + else if ( level == "debug" ) + spdlog::set_level( spdlog::level::debug ); + else if ( level == "info" ) + spdlog::set_level( spdlog::level::info ); + else if ( level == "warning" || level == "warn" ) + spdlog::set_level( spdlog::level::warn ); + else if ( level == "error" ) + spdlog::set_level( spdlog::level::err ); + else if ( level == "critical" ) + spdlog::set_level( spdlog::level::critical ); + else + { + // invalid level, throw + spdlog:: + critical( "Invalid log level, Expected one of: (trace, debug, info, (warning/warn), error, critical)" ); + std::terminate(); + } + } + +#ifndef NDEBUG + if ( parser.isSet( testmode_option ) ) + { + arguments.testmode = true; + } +#else + arguments.testmode = false; +#endif + + if ( parser.value( use_stdout_option ).toInt() > 0 ) + { + arguments.use_stdout = true; } idhan::ServerContext context { arguments }; - // context.cloneHydrusData( "/home/kj16609/.local/share/hydrus/db" ); - // context.cloneHydrusData( "/home/kj16609/Desktop/Projects/cxx/IDHAN/3rd-party/hydrus/db" ); - context.run(); idhan::log::info( "Shutting down..." ); diff --git a/docs/api/paths/records/tags/add.yaml b/docs/api/paths/records/tags/add.yaml index fab59c1..02c2530 100644 --- a/docs/api/paths/records/tags/add.yaml +++ b/docs/api/paths/records/tags/add.yaml @@ -44,6 +44,7 @@ post: - namespace: 1 subtag: 1 + 200: description: All tags added successfully 400: diff --git a/docs/api/paths/records/tags/addgroup.yaml b/docs/api/paths/records/tags/addgroup.yaml index 4acd5f2..5a259e4 100644 --- a/docs/api/paths/records/tags/addgroup.yaml +++ b/docs/api/paths/records/tags/addgroup.yaml @@ -24,6 +24,11 @@ post: type: array items: $ref: '../../../schemas/TagInput.yaml' + sets: + type: array + items: + type: array + $ref: '../../../schemas/TagInput.yaml' examples: IDs: summary: An array of multiple tag ids @@ -59,7 +64,20 @@ post: subtag: "toujou koneko" - namespace: 1 subtag: 1 - + Sets: + summary: A set of records and an array of tag sets to add to each individual record + value: + records: + - 1 + - 2 + - 3 + sets: + - - { namespace: "character", subtag: "toujou koneko" } + - { namespace: 1, subtag: 1 } + - - { namespace: "character", subtag: "toujou koneko" } + - { namespace: 3, subtag: 5 } + - - { namespace: "character", subtag: "toujou koneko" } + - { namespace: 9, subtag: 2 } 200: description: All tags added successfully 400: