Switch to using gtest for testing and rework migrations system

This commit is contained in:
2025-08-18 13:16:59 -04:00
parent fed4f5c4cc
commit 5c0630f806
78 changed files with 640 additions and 813 deletions

2
.gitignore vendored
View File

@@ -35,4 +35,4 @@ read.lock
/IDHAN/include/hydrus_client_constants_gen.hpp
# Used for allowing easy setup of development environments for testing
/IDHANServer/src/db/setup/migration/9999-dev.sql
/IDHANServer/src/db/setup/migration/dev.sql

View File

@@ -36,6 +36,7 @@ include(postgres)
include(toml)
add_subdirectory(IDHAN)
add_subdirectory(IDHANMigration)
add_subdirectory(IDHANModules)
add_subdirectory(IDHANServer)
add_subdirectory(IDHANClient)

View File

@@ -263,6 +263,8 @@ auto defaultErrorHandler( TPromise&& promise )
const std::runtime_error exception { format_ns::format( "{}", server_msg ) };
logging::error( server_msg );
promise->setException( std::make_exception_ptr( exception ) );
promise->finish();

View File

@@ -14,7 +14,6 @@
#include <spdlog/spdlog.h>
#pragma GCC diagnostic pop
#include "idhan/IDHANClient.hpp"
#include "logging/format_ns.hpp"
class QNetworkReply;

View File

@@ -6,6 +6,7 @@
#include <moc_Network.cpp>
#include <QFuture>
#include <QThread>
#include "logging/logger.hpp"

View File

@@ -8,6 +8,8 @@
#include <QJsonValue>
#include <QNetworkReply>
#include "IDHANClient.hpp"
namespace idhan::logging
{

View File

@@ -12,7 +12,12 @@ namespace idhan
QFuture< TagID > IDHANClient::createTag( const std::string&& namespace_text, const std::string&& subtag_text )
{
return createTags( { std::make_pair( namespace_text, subtag_text ) } )
.then( []( const std::vector< TagID >& tag_ids ) -> TagID { return tag_ids.at( 0 ); } );
.then(
[]( const std::vector< TagID >& tag_ids ) -> TagID
{
if ( tag_ids.empty() ) return INVALID_TAG_ID;
return tag_ids.at( 0 );
} );
}
QFuture< TagID > IDHANClient::createTag( const std::string& tag_text )

View File

@@ -34,16 +34,20 @@ QFuture< std::vector< TagID > > IDHANClient::
QJsonArray array {};
if ( tags.size() == 0 ) throw std::runtime_error( "Needs more then 1 tag to make" );
for ( const auto& [ namespace_text, subtag_text ] : tags )
{
QJsonObject obj;
QJsonObject obj {};
obj[ "namespace" ] = QString::fromStdString( namespace_text );
obj[ "subtag" ] = QString::fromStdString( subtag_text );
array.append( std::move( obj ) );
}
auto handleResponse = [ promise ]( auto* response )
const auto expected_count { tags.size() };
auto handleResponse = [ promise, expected_count ]( auto* response )
{
// reply will give us a body of json
const auto data { response->readAll() };
@@ -66,6 +70,13 @@ QFuture< std::vector< TagID > > IDHANClient::
tag_ids.emplace_back( tag_id );
}
if ( tag_ids.size() != expected_count )
throw std::runtime_error(
format_ns::format(
"IDHAN did not return the correct number of tags back. Expected {} got {}",
tag_ids.size(),
expected_count ) );
promise->addResult( std::move( tag_ids ) );
promise->finish();

View File

@@ -0,0 +1,22 @@
project(IDHANMigration LANGUAGES C CXX)
set(MIGRATION_SOURCE "${CMAKE_CURRENT_BINARY_DIR}/doMigration.cpp")
file(REMOVE ${MIGRATION_SOURCE})
set(MIGRATION_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src)
file(GLOB_RECURSE MIGRATION_SQLS CONFIGURE_DEPENDS ${MIGRATION_DIR}/*.sql)
add_custom_command(
OUTPUT ${MIGRATION_SOURCE}
DEPENDS ${MIGRATION_SQLS}
COMMAND ${CMAKE_COMMAND} -DMIGRATION_DIR=${MIGRATION_DIR} -DOUT=${MIGRATION_SOURCE} -P "${CMAKE_CURRENT_SOURCE_DIR}/cmake_modules/GenerateMigrations.cmake"
COMMENT "${CMAKE_CURRENT_SOURCE_DIR}/cmake_modules/GenerateMigrations.cmake: Generating doMigration.cpp")
file(GLOB_RECURSE CPP_SOURCES CONFIGURE_DEPENDS
${CMAKE_CURRENT_SOURCE_DIR}/src/**.cpp
)
add_library(IDHANMigration STATIC ${MIGRATION_SOURCE} ${CPP_SOURCES})
target_include_directories(IDHANMigration PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
target_link_libraries(IDHANMigration PRIVATE pqxx spdlog::spdlog)

View File

@@ -15,7 +15,6 @@
namespace idhan::db
{
std::size_t doMigration( pqxx::nontransaction& tx, std::size_t migration_id );
void updateMigrations( pqxx::nontransaction& tx, std::string_view schema );
} // namespace idhan::db

View File

@@ -3,4 +3,6 @@ CREATE TABLE idhan_info
table_name TEXT PRIMARY KEY UNIQUE NOT NULL,
last_migration_id INTEGER NOT NULL,
queries TEXT[] NOT NULL
);
);
CREATE EXTENSION IF NOT EXISTS pgcrypto;

View File

@@ -0,0 +1,6 @@
CREATE TABLE records
(
record_id SERIAL PRIMARY KEY,
sha256 bytea UNIQUE NOT NULL,
CHECK ( LENGTH(sha256) = 32 )
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE tag_namespaces
(
namespace_id SERIAL PRIMARY KEY,
namespace_text TEXT NOT NULL UNIQUE
);

View File

@@ -1,5 +1,5 @@
CREATE TABLE tag_subtags
(
subtag_id BIGSERIAL PRIMARY KEY,
subtag_id SERIAL PRIMARY KEY,
subtag_text TEXT NOT NULL UNIQUE
);

View File

@@ -0,0 +1,7 @@
CREATE TABLE tags
(
tag_id SERIAL PRIMARY KEY,
subtag_id INTEGER REFERENCES tag_subtags (subtag_id),
namespace_id INTEGER REFERENCES tag_namespaces (namespace_id),
UNIQUE (namespace_id, subtag_id)
);

View File

@@ -1,6 +1,6 @@
CREATE TABLE tags_combined
(
tag_id tagid REFERENCES tags (tag_id),
tag_id INTEGER REFERENCES tags (tag_id),
tag_text TEXT,
PRIMARY KEY (tag_id)
);

View File

@@ -2,6 +2,6 @@ CREATE SEQUENCE tag_domains_tag_domain_id_seq;
CREATE TABLE tag_domains
(
tag_domain_id tagdomainid PRIMARY KEY DEFAULT NEXTVAL('tag_domains_tag_domain_id_seq'),
tag_domain_id INTEGER PRIMARY KEY DEFAULT NEXTVAL('tag_domains_tag_domain_id_seq'),
domain_name TEXT UNIQUE NOT NULL
);

View File

@@ -1,7 +1,7 @@
CREATE TABLE file_info
(
size BIGINT NOT NULL,
record_id recordid UNIQUE REFERENCES records (record_id) NOT NULL,
size BIGINT NOT NULL,
record_id INTEGER UNIQUE REFERENCES records (record_id) NOT NULL,
mime_id INTEGER REFERENCES mime (mime_id),
cluster_store_time TIMESTAMP WITHOUT TIME ZONE, -- Will be set if the file has been stored in a cluster.
cluster_delete_time TIMESTAMP WITHOUT TIME ZONE,

View File

@@ -0,0 +1,7 @@
CREATE TABLE metadata
(
record_id INTEGER REFERENCES records (record_id) NOT NULL,
simple_mime_type SMALLINT NOT NULL,
json json DEFAULT NULL,
UNIQUE (record_id)
);

View File

@@ -0,0 +1,8 @@
CREATE TABLE image_metadata
(
record_id INTEGER REFERENCES records (record_id) NOT NULL,
width INTEGER NOT NULL,
height INTEGER NOT NULL,
channels SMALLINT NOT NULL,
UNIQUE (record_id)
);

View File

@@ -1,7 +1,7 @@
CREATE TABLE tag_mappings
(
record_id recordid REFERENCES records (record_id) NOT NULL,
tag_id tagid REFERENCES tags (tag_id) NOT NULL,
record_id INTEGER REFERENCES records (record_id) NOT NULL,
tag_id INTEGER REFERENCES tags (tag_id) NOT NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
PRIMARY KEY (record_id, tag_id, tag_domain_id)
) PARTITION BY LIST (tag_domain_id);
@@ -9,9 +9,9 @@ CREATE TABLE tag_mappings
CREATE TABLE active_tag_mappings
(
record_id recordid REFERENCES records (record_id) NOT NULL,
tag_id tagid REFERENCES tags (tag_id) NOT NULL,
ideal_tag_id tagid REFERENCES tags (tag_id) NULL,
record_id INTEGER REFERENCES records (record_id) NOT NULL,
tag_id INTEGER REFERENCES tags (tag_id) NOT NULL,
ideal_tag_id INTEGER REFERENCES tags (tag_id) NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
PRIMARY KEY (record_id, tag_id, tag_domain_id),
FOREIGN KEY (record_id, tag_id, tag_domain_id) REFERENCES tag_mappings (record_id, tag_id, tag_domain_id) ON DELETE CASCADE

View File

@@ -1,8 +1,8 @@
CREATE TABLE tag_aliases
(
aliased_id tagid REFERENCES tags (tag_id) NOT NULL,
alias_id tagid REFERENCES tags (tag_id) NOT NULL,
ideal_alias_id tagid REFERENCES tags (tag_id) NULL,
aliased_id INTEGER REFERENCES tags (tag_id) NOT NULL,
alias_id INTEGER REFERENCES tags (tag_id) NOT NULL,
ideal_alias_id INTEGER REFERENCES tags (tag_id) NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
PRIMARY KEY (aliased_id, alias_id, tag_domain_id),
UNIQUE (tag_domain_id, aliased_id),

View File

@@ -0,0 +1,9 @@
CREATE TABLE tag_parents
(
parent_id INTEGER REFERENCES tags (tag_id) NOT NULL,
ideal_parent_id INTEGER REFERENCES tags (tag_id) NULL,
child_id INTEGER REFERENCES tags (tag_id) NOT NULL,
ideal_child_id INTEGER REFERENCES tags (tag_id) NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
PRIMARY KEY (parent_id, child_id, tag_domain_id)
) PARTITION BY LIST (tag_domain_id);

View File

@@ -2,8 +2,6 @@ CREATE OR REPLACE FUNCTION func_tag_mappings_after_insert()
RETURNS TRIGGER AS
$$
BEGIN
INSERT INTO active_tag_mappings(record_id, tag_id, tag_domain_id, ideal_tag_id)
SELECT new_rows.record_id,
new_rows.tag_id,

View File

@@ -4,7 +4,7 @@ CREATE OR REPLACE FUNCTION createbatchtags(
)
RETURNS TABLE
(
tag_id tagid,
tag_id INTEGER,
namespace_text TEXT,
subtag_text TEXT
)

View File

@@ -0,0 +1,10 @@
CREATE TABLE active_tag_mappings_parents
(
record_id INTEGER REFERENCES records (record_id) NOT NULL,
tag_id BIGINT REFERENCES tags (tag_id) NOT NULL,
origin_id BIGINT REFERENCES tags (tag_id) NOT NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
internal BOOLEAN DEFAULT FALSE NOT NULL,
internal_count INTEGER DEFAULT 0,
PRIMARY KEY (record_id, tag_id, origin_id, tag_domain_id)
);

View File

@@ -20,6 +20,29 @@ CREATE TRIGGER trg_insert_active_tag_mapping_parent
FOR EACH ROW
EXECUTE FUNCTION insert_active_tag_mapping_parent();
CREATE OR REPLACE FUNCTION delete_active_tag_mapping_parent()
RETURNS TRIGGER AS
$$
BEGIN
-- Delete the corresponding parent mappings from active_tag_mappings_parents
DELETE
FROM active_tag_mappings_parents
WHERE tag_id = old.parent_id
AND origin_id = old.child_id
AND tag_domain_id = old.tag_domain_id;
RETURN old;
END;
$$ LANGUAGE plpgsql;
-- Create the trigger for deletion
CREATE TRIGGER trg_delete_active_tag_mapping_parent
AFTER DELETE
ON tag_parents
FOR EACH ROW
EXECUTE FUNCTION delete_active_tag_mapping_parent();
CREATE OR REPLACE FUNCTION insert_active_tag_mappings_parents_from_mappings()
RETURNS TRIGGER AS
$$
@@ -47,4 +70,20 @@ FROM active_tag_mappings tm
ON
COALESCE(tm.ideal_tag_id, tm.tag_id) = COALESCE(tp.ideal_child_id, tp.child_id)
AND tm.tag_domain_id = tp.tag_domain_id
ON CONFLICT DO NOTHING;
ON CONFLICT DO NOTHING;
CREATE OR REPLACE FUNCTION intercept_active_tag_mappings_parents()
RETURNS TRIGGER AS
$$
BEGIN
new.ideal_tag_id = (SELECT ta.ideal_alias_id FROM tag_aliases ta WHERE ta.tag_domain_id = new.tag_domain_id AND ta.aliased_id = new.tag_id);
RETURN new;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trg_intercept_active_tag_mappings_parents
BEFORE INSERT
ON active_tag_mappings
FOR EACH ROW
EXECUTE FUNCTION intercept_active_tag_mappings_parents();

View File

@@ -46,6 +46,8 @@ BEGIN
WHEN 'DELETE' THEN EXECUTE remove_count(old.tag_id, old.ideal_tag_id, old.tag_domain_id);
END CASE;
RETURN new;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1,76 @@
-- =========================
-- Insert trigger function
-- =========================
CREATE OR REPLACE FUNCTION atmp_internal_on_insert()
RETURNS trigger
AS
$$
DECLARE
row RECORD;
BEGIN
-- FOR new IN SELECT * FROM new_rows
-- LOOP
-- Process each new row here
RAISE NOTICE 'Inserted: %', new;
RAISE NOTICE 'Count: %', (SELECT COUNT(*) FROM tag_parents tp2);
FOR row IN SELECT * FROM tag_parents tp2
LOOP
RAISE NOTICE 'TP Row: %', row;
END LOOP;
-- INSERT INTO active_tag_mappings_parents (record_id, tag_id, origin_id, tag_domain_id)
SELECT new.record_id AS record_id,
COALESCE(tp.ideal_parent_id, tp.parent_id) AS tag_id,
new.tag_id AS origin_id,
new.tag_domain_id AS tag_domain_id
INTO row
FROM tag_parents tp
WHERE COALESCE(tp.ideal_child_id, tp.child_id) = new.tag_id
AND tp.tag_domain_id = new.tag_domain_id
LIMIT 1;
RAISE NOTICE 'INSERT: %', row;
-- END LOOP;
RETURN new;
END;
$$ LANGUAGE plpgsql;
-- =========================
-- Delete trigger function
-- =========================
CREATE OR REPLACE FUNCTION atmp_internal_on_delete()
RETURNS trigger
AS
$$
BEGIN
RETURN new;
END;
$$ LANGUAGE plpgsql;
-- =========================
-- Triggers
-- =========================
-- Create trigger for INSERTs on active_tag_mappings_parents
DROP TRIGGER IF EXISTS trg_atmp_internal_insert ON active_tag_mappings_parents;
CREATE TRIGGER trg_atmp_internal_insert
AFTER INSERT
ON active_tag_mappings_parents
FOR EACH ROW
EXECUTE FUNCTION atmp_internal_on_insert();
-- Create trigger for DELETEs on active_tag_mappings_parents
DROP TRIGGER IF EXISTS trg_atmp_internal_delete ON active_tag_mappings_parents;
CREATE TRIGGER trg_atmp_internal_delete
AFTER DELETE
ON active_tag_mappings_parents
FOR EACH ROW
EXECUTE FUNCTION atmp_internal_on_delete();

View File

@@ -9,7 +9,7 @@
constexpr std::string_view query { R"(MIGRATION_QUERY)" };
#endif
log::info("Performing migration Current:{} -> Target:{}", migration_id, MIGRATION_TARGET_ID);
spdlog::info("Performing migration Current:{} -> Target:{}", migration_id, MIGRATION_TARGET_ID);
migration_id = MIGRATION_TARGET_ID;
tx.exec( query );

View File

@@ -2,7 +2,7 @@
// Created by kj16609 on 9/8/24.
//
#include "management.hpp"
#include "../include/management.hpp"
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
@@ -12,8 +12,6 @@
#include <pqxx/result>
#pragma GCC diagnostic pop
#include "logging/log.hpp"
namespace idhan::db
{

View File

@@ -7,13 +7,13 @@
#pragma GCC diagnostic ignored "-Wuseless-cast"
#pragma GCC diagnostic ignored "-Wswitch-enum"
#include <pqxx/pqxx>
#include <spdlog/spdlog.h>
#pragma GCC diagnostic pop
#include "management.hpp"
#include "migrations.hpp"
#include <cstdint>
#include "db/setup/management.hpp"
#include "fgl/features.hpp"
#include "logging/log.hpp"
#include <string_view>
namespace idhan::db
{

View File

@@ -2,7 +2,7 @@
// Created by kj16609 on 11/7/24.
//
#include "migrations.hpp"
#include "../include/migrations.hpp"
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
@@ -14,7 +14,7 @@
#include <cstdint>
#include "db/setup/management.hpp"
#include "../include/management.hpp"
namespace idhan::db
{

View File

@@ -2,22 +2,10 @@ project(IDHANServer LANGUAGES CXX C)
AddFGLExecutable(IDHANServer ${CMAKE_CURRENT_SOURCE_DIR}/src)
set(MIGRATION_SOURCE "${CMAKE_CURRENT_BINARY_DIR}/doMigration.cpp")
file(REMOVE ${MIGRATION_SOURCE})
target_sources(IDHANServer PRIVATE ${MIGRATION_SOURCE})
set(MIGRATION_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src/db/setup/migration)
file(GLOB_RECURSE MIGRATION_SQLS CONFIGURE_DEPENDS ${MIGRATION_DIR}/*.sql)
add_custom_command(
OUTPUT ${MIGRATION_SOURCE}
DEPENDS ${MIGRATION_SQLS}
COMMAND ${CMAKE_COMMAND} -DMIGRATION_DIR=${MIGRATION_DIR} -DOUT=${MIGRATION_SOURCE} -P "${CMAKE_CURRENT_SOURCE_DIR}/cmake_modules/GenerateMigrations.cmake"
COMMENT "${CMAKE_CURRENT_SOURCE_DIR}/cmake_modules/GenerateMigrations.cmake: Generating doMigration.cpp")
# Gui is needed for QImage for whatever reason
find_package(Qt6 REQUIRED COMPONENTS Core Multimedia)
find_package(spdlog REQUIRED)
@@ -31,7 +19,7 @@ endif ()
target_link_libraries(IDHANServer PUBLIC spdlog drogon)
target_link_libraries(IDHANServer PRIVATE pqxx Qt6::Core Qt6::Multimedia tomlplusplus::tomlplusplus uring)
target_link_libraries(IDHANServer PRIVATE IDHAN)
target_link_libraries(IDHANServer PUBLIC IDHANModules)
target_link_libraries(IDHANServer PUBLIC IDHANModules IDHANMigration)
target_compile_definitions(IDHANServer PUBLIC IDHAN_USE_STD_FORMAT)
if (DEFINED ALLOW_TABLE_DESTRUCTION AND ALLOW_TABLE_DESTRUCTION)

View File

@@ -1 +0,0 @@
;

View File

@@ -49,12 +49,17 @@ drogon::Task< std::expected< std::vector< TagID >, drogon::HttpResponsePtr > > c
static std::binary_semaphore sem { 1 };
if ( tag_pairs.size() == 0 )
{
co_return std::unexpected( createBadRequest( "No tags to create" ) );
}
try
{
sem.acquire();
const auto result { co_await db->execSqlCoro(
"SELECT * FROM createBatchTags($1::TEXT[], $2::TEXT[])",
"SELECT tag_id FROM createBatchTags($1::TEXT[], $2::TEXT[])",
std::move( namespace_params ),
std::move( subtag_params ) ) };

View File

@@ -5,9 +5,9 @@
#include "ManagementConnection.hpp"
#include "ConnectionArguments.hpp"
#include "db/setup/management.hpp"
#include "db/setup/migration/migrations.hpp"
#include "logging/log.hpp"
#include "management.hpp"
#include "migrations.hpp"
namespace idhan
{
@@ -35,6 +35,7 @@ ManagementConnection::ManagementConnection( const ConnectionArguments& arguments
}
else
{
tx.exec( "CREATE SCHEMA IF NOT EXISTS public" );
constexpr std::string_view schema { "public" };
db::destroyTables( tx );
db::updateMigrations( tx, schema );

View File

@@ -1,5 +0,0 @@
INSERT INTO file_clusters (cluster_name, folder_path)
VALUES ('SDD2 HydrusFiles', '/run/media/kj16609/SDD1/HydrusFiles/'),
('SDD3 HydrusFiles', '/run/media/kj16609/SDD2/HydrusFiles/'),
('HDD4 HydrusFiles', '/run/media/kj16609/HDD3/HydrusFiles/'),
('HDD5 HydrusFiles', '/run/media/kj16609/HDD4/HydrusFiles/');

View File

@@ -1,31 +0,0 @@
//
// Created by kj16609 on 9/13/24.
//
#include "indexes.hpp"
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Weffc++"
#pragma GCC diagnostic ignored "-Wuseless-cast"
#pragma GCC diagnostic ignored "-Wswitch-enum"
#include <pqxx/nontransaction>
#pragma GCC diagnostic pop
#include <array>
#include <string_view>
namespace idhan::db
{
constexpr std::array< std::string_view, 0 > index_sql {
};
void prepareInitalIndexes( pqxx::nontransaction& tx )
{
for ( const auto& sql : index_sql )
{
tx.exec( sql );
}
}
} // namespace idhan::db

View File

@@ -1,17 +0,0 @@
//
// Created by kj16609 on 9/13/24.
//
#pragma once
namespace pqxx
{
class nontransaction;
}
namespace idhan::db
{
void prepareInitalIndexes( pqxx::nontransaction& );
}

View File

@@ -1,9 +0,0 @@
CREATE DOMAIN tagid AS BIGINT;
CREATE DOMAIN subtagid AS BIGINT CONSTRAINT subtag_id_greater_than_zero CHECK (value > 0);
CREATE DOMAIN namespaceid AS INTEGER CONSTRAINT namespace_id_greater_than_zero CHECK (value > 0);
CREATE DOMAIN recordid AS INTEGER CONSTRAINT record_id_greater_than_zero CHECK (value > 0);
CREATE DOMAIN tagdomainid AS SMALLINT CONSTRAINT tag_domain_id_greater_than_zero CHECK (value > 0);

View File

@@ -1,8 +0,0 @@
CREATE SEQUENCE records_record_id_seq;
CREATE TABLE records
(
record_id recordid PRIMARY KEY DEFAULT NEXTVAL('records_record_id_seq'),
sha256 bytea UNIQUE NOT NULL,
CHECK ( LENGTH(sha256) = 32 )
);

View File

@@ -1,7 +0,0 @@
CREATE SEQUENCE tag_namespaces_namespace_id_seq;
CREATE TABLE tag_namespaces
(
namespace_id namespaceid PRIMARY KEY DEFAULT NEXTVAL('tag_namespaces_namespace_id_seq'),
namespace_text TEXT NOT NULL UNIQUE
);

View File

@@ -1,9 +0,0 @@
CREATE SEQUENCE tags_tag_id_seq;
CREATE TABLE tags
(
tag_id tagid PRIMARY KEY DEFAULT NEXTVAL('tags_tag_id_seq'),
subtag_id subtagid REFERENCES tag_subtags (subtag_id),
namespace_id namespaceid REFERENCES tag_namespaces (namespace_id),
UNIQUE (namespace_id, subtag_id)
);

View File

@@ -1,7 +0,0 @@
CREATE TABLE metadata
(
record_id recordid REFERENCES records (record_id) NOT NULL,
simple_mime_type SMALLINT NOT NULL,
json json DEFAULT NULL,
UNIQUE (record_id)
);

View File

@@ -1,8 +0,0 @@
CREATE TABLE image_metadata
(
record_id recordid REFERENCES records (record_id) NOT NULL,
width INTEGER NOT NULL,
height INTEGER NOT NULL,
channels SMALLINT NOT NULL,
UNIQUE (record_id)
);

View File

@@ -1,10 +0,0 @@
CREATE TABLE tag_parents
(
parent_id tagid REFERENCES tags (tag_id) NOT NULL,
ideal_parent_id tagid REFERENCES tags (tag_id) NULL,
child_id tagid REFERENCES tags (tag_id) NOT NULL,
ideal_child_id tagid REFERENCES tags (tag_id) NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
PRIMARY KEY (parent_id, child_id, tag_domain_id)
) PARTITION BY LIST (tag_domain_id);

View File

@@ -1,8 +0,0 @@
CREATE TABLE active_tag_mappings_parents
(
record_id INTEGER REFERENCES records (record_id) NOT NULL,
tag_id BIGINT REFERENCES tags (tag_id) NOT NULL,
origin_id BIGINT REFERENCES tags (tag_id) NOT NULL,
tag_domain_id SMALLINT REFERENCES tag_domains (tag_domain_id) NOT NULL,
PRIMARY KEY (record_id, tag_id, origin_id, tag_domain_id)
);

View File

@@ -1,14 +1,13 @@
find_package(Qt6 REQUIRED COMPONENTS Core Network Concurrent)
find_package(Catch2 REQUIRED)
find_package(GTest REQUIRED)
enable_testing()
AddFGLExecutable(IDHANTests ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_link_libraries(IDHANTests PUBLIC IDHAN Catch2::Catch2WithMain IDHANClient libpqxx::pqxx)
target_link_libraries(IDHANTests PUBLIC IDHAN GTest::gtest_main IDHANClient libpqxx::pqxx IDHANMigration)
add_dependencies(IDHANTests IDHANServer)
include(CTest)
include(Catch)
catch_discover_tests(IDHANTests)
include(GoogleTest)
gtest_discover_tests(IDHANTests)

View File

@@ -4,7 +4,7 @@
#include <QCoreApplication>
#include <catch2/catch_all.hpp>
#include <gtest/gtest.h>
#include <random>
@@ -171,141 +171,120 @@ std::pair< std::string, std::string > generateTag()
return { gen_ns, gen_st };
}
TEST_CASE( "Client tests", "[server][client][network]" )
struct ClientTests : public ::testing::Test
{
int argc { 0 };
QCoreApplication app { argc, nullptr };
std::unique_ptr< QCoreApplication > app;
std::unique_ptr< ServerHandle > server_handle;
std::unique_ptr< idhan::IDHANClient > client;
SERVER_HANDLE;
SECTION( "Client connection" )
void SetUp() override
{
idhan::IDHANClient client { "localhost", idhan::IDHAN_DEFAULT_PORT, false };
SECTION( "Function tests" )
{
SECTION( "Tags" )
{
SECTION( "IDHANClient::createTag" )
{
SECTION( "Single string" )
{
auto tag_future { client.createTag( "character:toujou koneko" ) };
qtWaitFuture( tag_future );
REQUIRE( tag_future.resultCount() > 0 );
REQUIRE( tag_future.resultCount() == 1 );
const auto tag_id { tag_future.result() };
idhan::logging::info( "Got tag ID {} for tag {}", "character:toujou koneko", tag_id );
}
SECTION( "Split string" )
{
auto tag_future { client.createTag( "series", "highschool dxd" ) };
qtWaitFuture( tag_future );
REQUIRE( tag_future.resultCount() > 0 );
REQUIRE( tag_future.resultCount() == 1 );
const auto tag_id { tag_future.result() };
idhan::logging::info( "Got tag ID {} for tag {}", "series:highschool dxd", tag_id );
}
SECTION( "Empty namespace" )
{
auto tag_future { client.createTag( "", "highschool dxd" ) };
qtWaitFuture( tag_future );
REQUIRE( tag_future.resultCount() > 0 );
REQUIRE( tag_future.resultCount() == 1 );
}
SECTION( "Existing tag" )
{
auto tag_future { client.createTag( "character", "toujou koneko" ) };
qtWaitFuture( tag_future );
REQUIRE( tag_future.resultCount() > 0 );
REQUIRE( tag_future.resultCount() == 1 );
const auto tag_id { tag_future.result() };
idhan::logging::info( "Got tag ID {} for tag {}", "character:toujou koneko", tag_id );
auto tag_future2 { client.createTag( "character", "toujou koneko" ) };
qtWaitFuture( tag_future2 );
REQUIRE( tag_future2.resultCount() > 0 );
}
}
SECTION( "IDHANClient::createTags" )
{
SECTION( "Split strings" )
{
const std::vector< std::pair< std::string, std::string > > tags {
{ "character", "toujou koneko" }, { "series", "highschool dxd" }
};
auto future { client.createTags( tags ) };
qtWaitFuture( future );
REQUIRE( future.resultCount() > 0 );
const auto tag_ids { future.result() };
REQUIRE( tag_ids.size() == tags.size() );
}
SECTION( "Combined string" )
{
const std::vector< std::string > tags { "character:toujou koneko", "series:highschool dxd" };
auto future { client.createTags( tags ) };
qtWaitFuture( future );
REQUIRE( future.resultCount() > 0 );
const auto tag_ids { future.result() };
REQUIRE( tag_ids.size() == tags.size() );
}
}
}
}
app = std::make_unique< QCoreApplication >( argc, nullptr );
server_handle = std::make_unique< ServerHandle >( startServer() );
client = std::make_unique< idhan::IDHANClient >( "test", "localhost", idhan::IDHAN_DEFAULT_PORT, false );
}
};
TEST_F( ClientTests, ClientTest )
{
SUCCEED();
}
class ClientTagTests : public ClientTests
{
};
TEST_F( ClientTagTests, CreateTagSingleString )
{
auto tag_future { client->createTag( "character:toujou koneko" ) };
qtWaitFuture( tag_future );
ASSERT_GT( tag_future.resultCount(), 0 );
ASSERT_EQ( tag_future.resultCount(), 1 );
const auto tag_id { tag_future.result() };
idhan::logging::info( "Got tag ID {} for tag {}", "character:toujou koneko", tag_id );
}
TEST_F( ClientTagTests, CreateTagSplitString )
{
auto tag_future { client->createTag( "series", "highschool dxd" ) };
qtWaitFuture( tag_future );
ASSERT_GT( tag_future.resultCount(), 0 );
ASSERT_EQ( tag_future.resultCount(), 1 );
const auto tag_id { tag_future.result() };
idhan::logging::info( "Got tag ID {} for tag {}", "series:highschool dxd", tag_id );
}
TEST_F( ClientTagTests, CreateTagEmptyNamespace )
{
auto tag_future { client->createTag( "", "highschool dxd" ) };
qtWaitFuture( tag_future );
ASSERT_GT( tag_future.resultCount(), 0 );
ASSERT_EQ( tag_future.resultCount(), 1 );
}
TEST_F( ClientTagTests, CreateExistingTag )
{
auto tag_future { client->createTag( "character", "toujou koneko" ) };
qtWaitFuture( tag_future );
ASSERT_GT( tag_future.resultCount(), 0 );
ASSERT_EQ( tag_future.resultCount(), 1 );
const auto tag_id { tag_future.result() };
idhan::logging::info( "Got tag ID {} for tag {}", "character:toujou koneko", tag_id );
auto tag_future2 { client->createTag( "character", "toujou koneko" ) };
qtWaitFuture( tag_future2 );
ASSERT_GT( tag_future2.resultCount(), 0 );
}
TEST_F( ClientTagTests, CreateTagsSplitStrings )
{
const std::vector< std::pair< std::string, std::string > > tags {
{ "character", "toujou koneko" }, { "series", "highschool dxd" }
};
auto future { client->createTags( tags ) };
qtWaitFuture( future );
ASSERT_GT( future.resultCount(), 0 );
const auto tag_ids { future.result() };
ASSERT_EQ( tag_ids.size(), tags.size() );
}
TEST_F( ClientTagTests, CreateTagsCombinedStrings )
{
const std::vector< std::string > tags { "character:toujou koneko", "series:highschool dxd" };
auto future { client->createTags( tags ) };
qtWaitFuture( future );
ASSERT_GT( future.resultCount(), 0 );
const auto tag_ids { future.result() };
ASSERT_EQ( tag_ids.size(), tags.size() );
}
// INSTANTIATE_TEST_SUITE_P( Create, ClientTagParamTests, ::testing::Values( "character:toujou koneko", "series:highschool dxd" ) );
/*
TEST_CASE( "Benchmarks" )
TEST( Benchmarks, CreateTags )
{
int argc { 0 };
QCoreApplication app { argc, nullptr };
SERVER_HANDLE;
idhan::IDHANClientConfig config {};
config.hostname = "localhost";
config.port = idhan::IDHAN_DEFAULT_PORT;
config.self_name = "testing suite";
config.use_ssl = false;
idhan::IDHANClient client { "test", "localhost", idhan::IDHAN_DEFAULT_PORT, false };
idhan::IDHANClient client { config };
std::vector< std::pair< std::string, std::string > > tags( 16 );
BENCHMARK_ADVANCED( "Create tags" )( Catch::Benchmark::Chronometer meter )
{
std::vector< std::pair< std::string, std::string > > tags( 16 );
for ( std::size_t i = 0; i < tags.size(); i++ ) tags[ i ] = generateTag();
for ( std::size_t i = 0; i < tags.size(); i++ ) tags[ i ] = generateTag();
testing::internal::CaptureStdout();
auto start = testing::internal::TimeInMillis();
meter.measure(
[ & ]
{
auto future { client.createTags( tags ) };
qtWaitFuture( future );
return future.result();
} );
};
auto future { client.createTags( tags ) };
qtWaitFuture( future );
auto result = future.result();
auto elapsed = testing::internal::TimeInMillis() - start;
testing::internal::GetCapturedStdout();
EXPECT_GT( result.size(), 0 );
EXPECT_LT( elapsed, 5000 ); // 5 second timeout
}
*/

View File

@@ -4,13 +4,13 @@
#include <QCoreApplication>
#include <catch2/catch_all.hpp>
#include <gtest/gtest.h>
#include "NET_CONSTANTS.hpp"
#include "idhan/IDHANClient.hpp"
#include "serverStarterHelper.hpp"
TEST_CASE( "Server setup", "[server][network]" )
TEST( ServerTests, ServerSetup )
{
SERVER_HANDLE;

View File

@@ -1 +0,0 @@
# This folder contains tests that are to be executed after the server has been initalized using testmode without any operations performed on it.

View File

@@ -1,517 +0,0 @@
//
// Created by kj16609 on 7/15/25.
//
#include <pqxx/pqxx>
#include <iostream>
#include <thread>
#include "IDHANTypes.hpp"
#include "catch2/catch_test_macros.hpp"
#include "serverStarterHelper.hpp"
#include "spdlog/spdlog.h"
constexpr idhan::TagDomainID DEFAULT_DOMAIN { 1 };
// Test fixture class to encapsulate common setup
struct TagTestFixture
{
pqxx::connection c;
idhan::RecordID dummy_id;
pqxx::work w;
TagTestFixture() :
c { "host=localhost dbname=idhan-db user=idhan" },
w { c }
{
spdlog::set_level( spdlog::level::debug );
// c.set_verbosity( pqxx::error_verbosity::verbose );
c.set_notice_handler(
[]( const pqxx::zview& message )
{
// if message starts with DEBUG then print it
if ( message.starts_with( "DEBUG" ) )
{
spdlog::debug( message );
}
else
{
spdlog::info( message );
}
} );
// Set search path to test schema
w.exec( "SET search_path='test'" );
// Allow for debugs to print from sql
w.exec( "SET client_min_messages = 'debug1';" );
// Create dummy record for testing
std::string str {};
for ( std::size_t i = 0; i < ( 256 / 8 ); ++i ) str += "F0";
const auto dummy_record {
w.exec( std::format( "INSERT INTO records (sha256) VALUES (\'\\x{}\'::bytea) RETURNING record_id", str ) )
};
dummy_id = dummy_record[ 0 ][ 0 ].as< idhan::RecordID >();
}
// Example refactored functions showing the pattern:
void tryCommit()
{
// w.commit();
}
idhan::TagID createTag( const std::string& namespace_name, const std::string& subtag )
{
auto query = R"( SELECT * FROM createTag($1, $2);)";
CAPTURE( query );
const auto result { w.exec( query, pqxx::params { namespace_name, subtag } ) };
tryCommit();
return result.one_row()[ 0 ].as< idhan::TagID >();
}
void createMapping( const idhan::TagID tag_id )
{
const auto query { "INSERT INTO tag_mappings (domain_id, record_id, tag_id) VALUES ($1, $2, $3)" };
pqxx::params params { DEFAULT_DOMAIN, dummy_id, tag_id };
w.exec( query, params );
tryCommit();
}
void deleteMapping( const idhan::TagID tag_id )
{
const auto query { "DELETE FROM tag_mappings WHERE domain_id = $1 AND record_id = $2 AND tag_id = $3" };
pqxx::params params { DEFAULT_DOMAIN, dummy_id, tag_id };
w.exec( query, params );
tryCommit();
}
void createAlias( const idhan::TagID aliased_id, const idhan::TagID tag_id )
{
const auto query { "INSERT INTO tag_aliases (domain_id, aliased_id, alias_id) VALUES ($1, $2, $3)" };
pqxx::params params { DEFAULT_DOMAIN, aliased_id, tag_id };
w.exec( query, params );
tryCommit();
}
void deleteAlias( const idhan::TagID aliased_id, const idhan::TagID tag_id )
{
const auto query { "DELETE FROM tag_aliases WHERE domain_id = $1 AND aliased_id = $2 AND alias_id = $3" };
pqxx::params params { DEFAULT_DOMAIN, aliased_id, tag_id };
w.exec( query, params );
tryCommit();
}
void createParent( const idhan::TagID parent_id, const idhan::TagID child_id )
{
const auto query { "INSERT INTO tag_parents (domain_id, parent_id, child_id) VALUES ($1, $2, $3)" };
pqxx::params params { DEFAULT_DOMAIN, parent_id, child_id };
w.exec( query, params );
tryCommit();
}
void deleteParent( const idhan::TagID parent_id, const idhan::TagID child_id )
{
const auto query { "DELETE FROM tag_parents WHERE domain_id = $1 AND parent_id = $2 AND child_id = $3" };
pqxx::params params { DEFAULT_DOMAIN, parent_id, child_id };
w.exec( query, params );
tryCommit();
}
};
#define REQUIRE_VIRTUAL_TAG( origin_id_i, tag_id_i ) \
{ \
const auto virtual_mappings { fixture.getVirtualMappings() }; \
REQUIRE( \
std::ranges::find_if( \
virtual_mappings, \
[]( const VirtualTagMapping& mapping ) -> bool \
{ return mapping.tag_id = tag_id_i && mapping.origin_id == origin_id_i; } ) ); \
}
#define REQUIRE_MAPPING_IDEAL( tag_id, ideal_id ) \
{ \
const pqxx::params params { fixture.dummy_id, tag_id, DEFAULT_DOMAIN, ideal_id }; \
const auto result { fixture.w.exec( \
"SELECT 1 FROM tag_mappings WHERE record_id = $1 AND tag_id = $2 AND domain_id = $3 AND ideal_tag_id = $4", \
params ) }; \
REQUIRE( result.size() == 1 ); \
}
#define REQUIRE_MAPPING( tag_id ) \
{ \
const pqxx::params params { fixture.dummy_id, tag_id, DEFAULT_DOMAIN }; \
const auto result { \
fixture.w \
.exec( "SELECT 1 FROM tag_mappings WHERE record_id = $1 AND tag_id = $2 AND domain_id = $3", params ) \
}; \
REQUIRE( result.size() == 1 ); \
}
#define REQUIRE_FLATTENED_ALIAS( aliased_id, alias_id ) \
{ \
const pqxx::params params { aliased_id, alias_id }; \
const auto result { \
fixture.w.exec( "SELECT 1 FROM flattened_aliases WHERE aliased_id = $1 AND alias_id = $2", params ) \
}; \
REQUIRE( result.size() == 1 ); \
}
#define REQUIRE_PARENT_MAPPING( origin_id, parent_id ) \
{ \
const pqxx::params params { fixture.dummy_id, parent_id, origin_id }; \
const auto result { fixture.w.exec( \
"SELECT 1 FROM tag_mappings_virtual WHERE record_id = $1 AND tag_id = $2 AND origin_id = $3", params ) }; \
REQUIRE( result.size() == 1 ); \
}
TEST_CASE( "Tag table existence and basic creation", "[tags][db][server]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
// Test that `tags` table exists
const auto result { fixture.w.exec( "SELECT * FROM tags" ) };
REQUIRE( result.size() == 0 );
// Test tag creation
const auto tag_result { fixture.w.exec( "SELECT createTag('character', 'toujou koneko')" ) };
REQUIRE( tag_result.size() == 1 );
REQUIRE( tag_result[ 0 ][ 0 ].as< idhan::TagID >() == 1 );
}
TEST_CASE( "Tag alias self-reference protection", "[tags][db][server]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
const auto tag_self_ref { fixture.createTag( "", "self_ref" ) };
REQUIRE_THROWS( fixture.createAlias( tag_self_ref, tag_self_ref ) );
}
TEST_CASE( "Basic tag aliases", "[tags][db][server]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
const auto tag_empty_toujou = fixture.createTag( "", "toujou koneko" );
const auto tag_character_toujou = fixture.createTag( "character", "toujou koneko" );
SECTION( "Creating simple alias" )
{
fixture.createAlias( tag_empty_toujou, tag_character_toujou );
REQUIRE_FLATTENED_ALIAS( tag_empty_toujou, tag_character_toujou );
}
SECTION( "Recursive alias prevention" )
{
const auto tag_character_shrione = fixture.createTag( "character", "shrione" );
fixture.createAlias( tag_character_toujou, tag_character_shrione );
REQUIRE_THROWS( fixture.createAlias( tag_character_shrione, tag_character_toujou ) );
}
}
TEST_CASE( "Tag alias chains", "[tags][db][server]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
const auto tag_empty_toujou = fixture.createTag( "", "toujou koneko" );
const auto tag_character_toujou = fixture.createTag( "character", "toujou koneko" );
const auto tag_character_shrione = fixture.createTag( "character", "shrione" );
SECTION( "Two-level alias chain" )
{
fixture.createAlias( tag_empty_toujou, tag_character_toujou );
fixture.createAlias( tag_character_toujou, tag_character_shrione );
REQUIRE_FLATTENED_ALIAS( tag_empty_toujou, tag_character_shrione );
REQUIRE_FLATTENED_ALIAS( tag_character_toujou, tag_character_shrione );
}
SECTION( "Alias chain deletion and restoration" )
{
fixture.createAlias( tag_empty_toujou, tag_character_toujou );
fixture.createAlias( tag_character_toujou, tag_character_shrione );
// Delete middle alias
fixture.deleteAlias( tag_character_toujou, tag_character_shrione );
REQUIRE_FLATTENED_ALIAS( tag_empty_toujou, tag_character_toujou );
// Recreate middle alias
fixture.createAlias( tag_character_toujou, tag_character_shrione );
REQUIRE_FLATTENED_ALIAS( tag_empty_toujou, tag_character_shrione );
REQUIRE_FLATTENED_ALIAS( tag_character_toujou, tag_character_shrione );
}
SECTION( "Circular alias chain prevention" )
{
const auto A = fixture.createTag( "", "A" );
const auto B = fixture.createTag( "", "B" );
const auto C = fixture.createTag( "", "C" );
// A -> B -> C
fixture.createAlias( A, B );
fixture.createAlias( B, C );
THEN( "Circular references must throw" )
{
// Attempt to create circular references
// C -> A
REQUIRE_THROWS( fixture.createAlias( C, A ) );
// C -> B
REQUIRE_THROWS( fixture.createAlias( C, B ) );
// B -> A
REQUIRE_THROWS( fixture.createAlias( B, A ) );
REQUIRE_THROWS( fixture.createAlias( A, A ) );
REQUIRE_THROWS( fixture.createAlias( B, B ) );
REQUIRE_THROWS( fixture.createAlias( C, C ) );
}
SECTION( "New chains are prevented if circular" )
{
// D -> E
const auto D = fixture.createTag( "", "D" );
const auto E = fixture.createTag( "", "E" );
fixture.createAlias( D, E );
fixture.createAlias( E, A );
REQUIRE_THROWS( fixture.createAlias( C, D ) );
}
}
}
TEST_CASE( "Complex alias chain repair", "[tags][db][server]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
const auto tag_1 = fixture.createTag( "", "tag_1" );
const auto tag_2 = fixture.createTag( "", "tag_2" );
const auto tag_3 = fixture.createTag( "", "tag_3" );
const auto tag_4 = fixture.createTag( "", "tag_4" );
const auto tag_5 = fixture.createTag( "", "tag_5" );
const auto tag_6 = fixture.createTag( "", "tag_6" );
// Create two separate chains
fixture.createAlias( tag_1, tag_2 );
fixture.createAlias( tag_2, tag_3 );
REQUIRE_FLATTENED_ALIAS( tag_1, tag_3 );
REQUIRE_FLATTENED_ALIAS( tag_2, tag_3 );
fixture.createAlias( tag_4, tag_5 );
fixture.createAlias( tag_5, tag_6 );
REQUIRE_FLATTENED_ALIAS( tag_4, tag_6 );
REQUIRE_FLATTENED_ALIAS( tag_5, tag_6 );
// Connect the chains
fixture.createAlias( tag_3, tag_4 );
REQUIRE_FLATTENED_ALIAS( tag_1, tag_6 );
REQUIRE_FLATTENED_ALIAS( tag_2, tag_6 );
REQUIRE_FLATTENED_ALIAS( tag_3, tag_6 );
REQUIRE_FLATTENED_ALIAS( tag_4, tag_6 );
REQUIRE_FLATTENED_ALIAS( tag_5, tag_6 );
}
TEST_CASE( "Tag parent relationships", "[tags][db][server][parents]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
const auto tag_ahri { fixture.createTag( "character", "ahri (league of legends)" ) };
const auto tag_league { fixture.createTag( "series", "league of legends" ) };
const auto tag_riot { fixture.createTag( "copyright", "riot games" ) };
GIVEN( "A record with mapping `character:ahri (league of legends)" )
{
fixture.createMapping( tag_ahri );
THEN( "The mapping should exist" )
{
REQUIRE_MAPPING( tag_ahri );
}
WHEN( "Adding a parent of 'series:league of legends' to 'character:ahri(league of legends)'" )
{
fixture.createParent( tag_league, tag_ahri );
THEN( "The mapping should exist in the aliased_parents table" )
{
const auto result { fixture.w.exec( "SELECT * FROM aliased_parents" ) };
REQUIRE( result.size() == 1 );
const auto result_row { result[ 0 ] };
REQUIRE( result_row[ "original_parent_id" ].as< idhan::TagID >() == tag_league );
REQUIRE( result_row[ "original_child_id" ].as< idhan::TagID >() == tag_ahri );
REQUIRE( result_row[ "parent_id" ].is_null() );
REQUIRE( result_row[ "child_id" ].is_null() );
}
THEN( "The virtual mappings table should have a tag of `series:league of legends` for record 1" )
{
const auto result { fixture.w.exec(
"SELECT COUNT(*) FROM tag_mappings_virtual WHERE record_id = $1 AND tag_id = $2 AND origin_id = $3",
pqxx::params { fixture.dummy_id, tag_league, tag_ahri } ) };
REQUIRE( result[ 0 ][ 0 ].as< int >() == 1 );
}
AND_WHEN( "Adding a parent of `copyright:riot games` to `series:league of legends`" )
{
fixture.createParent( tag_riot, tag_league );
THEN( "The aliased_parents table should have two rows" )
{
const auto result { fixture.w.exec( "SELECT * FROM aliased_parents" ) };
REQUIRE( result.size() == 2 );
}
THEN( "The virtual mappings table should have a tag of `series:league of legends`" )
{
const auto result1 { fixture.w.exec(
"SELECT COUNT(*) FROM tag_mappings_virtual WHERE record_id = $1 AND tag_id = $2 AND origin_id = $3",
pqxx::params { fixture.dummy_id, tag_league, tag_ahri } ) };
REQUIRE( result1[ 0 ][ 0 ].as< int >() == 1 );
}
THEN( "The virtual mappings table should have a tag of `copyright:riot games`" )
{
const auto result2 { fixture.w.exec(
"SELECT COUNT(*) FROM tag_mappings_virtual WHERE record_id = $1 AND tag_id = $2 AND origin_id = $3",
pqxx::params { fixture.dummy_id, tag_riot, tag_league } ) };
REQUIRE( result2[ 0 ][ 0 ].as< int >() == 1 );
}
AND_WHEN( "The parent `series:league of legends` is removed from `character:ahri (league of legends)" )
{
fixture.deleteParent( tag_league, tag_ahri );
THEN( "The only row that should remain should be league -> riot" )
{
const auto result { fixture.w.exec( "SELECT * FROM aliased_parents" ) };
REQUIRE( result.size() == 1 );
const auto result_row { result[ 0 ] };
REQUIRE( result_row[ "original_parent_id" ].as< idhan::TagID >() == tag_riot );
REQUIRE( result_row[ "original_child_id" ].as< idhan::TagID >() == tag_league );
}
AND_WHEN( "The mapping is re-added" )
{
fixture.createParent( tag_league, tag_ahri );
THEN( "The aliased parents should have two rows" )
{
const auto result { fixture.w.exec( "SELECT * FROM aliased_parents" ) };
REQUIRE( result.size() == 2 );
}
THEN( "The virtual mappings should be restored" )
{
const auto result { fixture.w.exec( "SELECT * FROM tag_mappings_virtual" ) };
// Helps with debugging
CHECKED_IF( result.size() == 1 )
{
const auto result_row { result[ 0 ] };
// Check that the only row is at least the first part of the chain (tag_ahri -> tag_league)
REQUIRE( result_row[ "record_id" ].as< idhan::RecordID >() == fixture.dummy_id );
REQUIRE( result_row[ "tag_id" ].as< idhan::TagID >() == tag_league );
REQUIRE( result_row[ "origin_id" ].as< idhan::TagID >() == tag_ahri );
REQUIRE( result_row[ "domain_id" ].as< idhan::TagDomainID >() == DEFAULT_DOMAIN );
}
REQUIRE( result.size() == 2 );
}
}
}
}
AND_WHEN( "The parent relationship is deleted" )
{
fixture.deleteParent( tag_league, tag_ahri );
THEN( "The virtual table should have no mappings" )
{
const auto total_result { fixture.w.exec( "SELECT COUNT(*) FROM tag_mappings_virtual" ) };
REQUIRE( total_result[ 0 ][ 0 ].as< int >() == 0 );
}
}
}
}
}
TEST_CASE( "Tag mappings and idealization", "[tags][db][server]" )
{
SERVER_HANDLE;
std::this_thread::sleep_for( std::chrono::milliseconds( 25 ) );
TagTestFixture fixture;
const auto tag_toujou = fixture.createTag( "", "toujou koneko" );
const auto tag_character_toujou = fixture.createTag( "character", "toujou koneko" );
const auto tag_character_shrione = fixture.createTag( "character", "shrione" );
SECTION( "Basic mapping without idealization" )
{
fixture.createMapping( tag_toujou );
REQUIRE_MAPPING( tag_toujou );
// Cleanup
fixture.deleteMapping( tag_toujou );
}
SECTION( "Mapping with single alias idealization" )
{
fixture.createMapping( tag_toujou );
fixture.createAlias( tag_toujou, tag_character_toujou );
REQUIRE_MAPPING_IDEAL( tag_toujou, tag_character_toujou );
// fixture.verifyMappingIdealised( tag_toujou, tag_character_toujou );
// Remove alias
fixture.deleteAlias( tag_toujou, tag_character_toujou );
REQUIRE_MAPPING( tag_toujou );
// Cleanup
fixture.deleteMapping( tag_toujou );
}
SECTION( "Mapping with chained alias idealization" )
{
fixture.createMapping( tag_toujou );
fixture.createAlias( tag_toujou, tag_character_toujou );
fixture.createAlias( tag_character_toujou, tag_character_shrione );
REQUIRE_MAPPING_IDEAL( tag_toujou, tag_character_shrione );
// Remove first alias in chain
fixture.deleteAlias( tag_toujou, tag_character_toujou );
REQUIRE_MAPPING( tag_toujou );
// Cleanup
fixture.deleteMapping( tag_toujou );
}
}

View File

@@ -0,0 +1,54 @@
//
// Created by kj16609 on 8/18/25.
//
#include "MappingFixture.hpp"
void MappingFixture::createMapping( TagID tag_id )
{
pqxx::work tx { *conn };
tx.exec_params( "INSERT INTO tag_mappings (tag_id, record_id, tag_domain_id) VALUES ($1, $2, $3)", pqxx::params { tag_id, default_record_id, default_domain_id } );
tx.commit();
}
void MappingFixture::deleteMapping( TagID tag_id )
{
pqxx::work tx { *conn };
tx.exec_params( "DELETE FROM tag_mappings WHERE tag_id = $1 AND record_id = $2 AND tag_domain_id = $3", pqxx::params { tag_id, default_record_id, default_domain_id } );
tx.commit();
}
bool MappingFixture::mappingExists( TagID tag_id )
{
pqxx::work tx { *conn };
const auto result { tx.exec_params( "SELECT EXISTS(SELECT 1 FROM tag_mappings WHERE tag_id = $1 AND record_id = $2 AND tag_domain_id = $3)", pqxx::params { tag_id, default_record_id, default_domain_id } ) };
tx.commit();
return result[ 0 ][ 0 ].as< bool >();
}
RecordID MappingFixture::createRecord( const std::string_view data )
{
pqxx::work tx { *conn };
const auto result { tx.exec_params( "INSERT INTO records (sha256) VALUES (digest($1, 'sha256')) RETURNING record_id", pqxx::params { data } ) };
tx.commit();
if ( result.empty() ) throw std::runtime_error( "Failed to create record" );
return result[ 0 ][ 0 ].as< RecordID >();
}
void MappingFixture::SetUp()
{
ServerTagFixture::SetUp();
default_record_id = createRecord( "test" );
}

View File

@@ -0,0 +1,19 @@
//
// Created by kj16609 on 8/18/25.
//
#pragma once
#include "ServerTagFixture.hpp"
class MappingFixture : public ServerTagFixture
{
RecordID default_record_id { 0 };
protected:
void createMapping( TagID tag_id );
void deleteMapping( TagID tag_id );
bool mappingExists( TagID tag_id );
RecordID createRecord( const std::string_view data );
void SetUp() override;
};

View File

@@ -0,0 +1,37 @@
//
// Created by kj16609 on 8/18/25.
//
#include "ServerDBFixture.hpp"
#include <pqxx/connection>
#include <pqxx/nontransaction>
#include <memory>
#include "migrations.hpp"
void ServerDBFixture::SetUp()
{
conn = std::make_unique< pqxx::connection >(
"dbname=idhan-test "
"user=idhan "
"host=localhost "
"port=5432" );
pqxx::nontransaction tx { *conn };
tx.exec( "DROP SCHEMA IF EXISTS public CASCADE" );
tx.exec( "CREATE SCHEMA public" );
idhan::db::updateMigrations( tx, "public" );
}
void ServerDBFixture::TearDown()
{
if ( conn )
{
conn->close();
conn.reset();
}
}

View File

@@ -0,0 +1,18 @@
//
// Created by kj16609 on 8/18/25.
//
#pragma once
#include <gtest/gtest.h>
#include <pqxx/pqxx>
class ServerDBFixture : public testing::Test
{
protected:
std::unique_ptr< pqxx::connection > conn;
void SetUp() override;
void TearDown() override;
};

View File

@@ -0,0 +1,76 @@
//
// Created by kj16609 on 8/18/25.
//
#include "ServerTagFixture.hpp"
#include "ServerDBFixture.hpp"
#include "migrations.hpp"
#include "splitTag.hpp"
void ServerTagFixture::SetUp()
{
ServerDBFixture::SetUp();
default_domain_id = createDomain( "default" );
}
idhan::TagDomainID ServerTagFixture::createDomain( const std::string_view name ) const
{
if ( !conn ) throw std::runtime_error( "Connection was nullptr" );
pqxx::work tx { *conn };
const auto result { tx.exec_params( "INSERT INTO tag_domains (domain_name) VALUES ($1) ON CONFLICT DO NOTHING RETURNING tag_domain_id", pqxx::params { name } ) };
if ( result.empty() )
{
const auto search_result { tx.exec_params( "SELECT tag_domain_id FROM tag_domains WHERE domain_name = $1", pqxx::params { name } ) };
if ( search_result.empty() ) throw std::runtime_error( "Failed to create domain" );
return search_result[ 0 ][ 0 ].as< idhan::TagDomainID >();
}
return result[ 0 ][ 0 ].as< idhan::TagDomainID >();
}
idhan::TagID ServerTagFixture::createTag( const std::string_view text ) const
{
if ( !conn ) throw std::runtime_error( "Connection was nullptr" );
pqxx::work tx { *conn };
const auto [ namespace_text, subtag_text ] = idhan::splitTag( text );
std::vector< std::string > namespace_texts {};
namespace_texts.emplace_back( namespace_text );
std::vector< std::string > subtag_texts {};
subtag_texts.emplace_back( subtag_text );
const auto result { tx.exec_params( "SELECT tag_id FROM createBatchTags($1, $2) ", pqxx::params { namespace_texts, subtag_texts } ) };
tx.commit();
if ( result.empty() ) throw std::runtime_error( "No tags returned" );
return result[ 0 ][ 0 ].as< idhan::TagID >();
}
void ServerTagFixture::createAlias( const TagID aliased_id, const TagID alias_id )
{
if ( !conn ) throw std::runtime_error( "Connection was nullptr" );
pqxx::work tx { *conn };
tx.exec_params( "INSERT INTO tag_aliases (aliased_id, alias_id, tag_domain_id) VALUES ($1, $2, $3)", pqxx::params { aliased_id, alias_id, default_domain_id } );
tx.commit();
}
bool ServerTagFixture::aliasExists( const TagID aliased_id, const TagID alias_id )
{
if ( !conn ) throw std::runtime_error( "Connection was nullptr" );
pqxx::work tx { *conn };
const auto result { tx.exec_params( "SELECT EXISTS(SELECT 1 FROM tag_aliases WHERE aliased_id = $1 AND alias_id = $2 AND tag_domain_id = $3)", pqxx::params { aliased_id, alias_id, default_domain_id } ) };
tx.commit();
return result[ 0 ][ 0 ].as< bool >();
}

View File

@@ -0,0 +1,29 @@
//
// Created by kj16609 on 8/18/25.
//
#pragma once
#include <gtest/gtest.h>
#include <pqxx/pqxx>
#include "IDHANTypes.hpp"
#include "ServerDBFixture.hpp"
using namespace idhan;
class ServerTagFixture : public ServerDBFixture
{
protected:
void SetUp() override;
TagDomainID createDomain( std::string_view name ) const;
TagID createTag( std::string_view text ) const;
void createAlias( TagID aliased_id, TagID alias_id );
bool aliasExists( TagID aliased_id, TagID alias_id );
TagDomainID default_domain_id { 0 };
};

View File

@@ -0,0 +1,32 @@
//
// Created by kj16609 on 8/18/25.
//
#include "ServerTagFixture.hpp"
TEST_F( ServerTagFixture, TagAliasCreation )
{
const auto tag_1 { createTag( "tag:1" ) };
const auto tag_2 { createTag( "tag:2" ) };
createAlias( tag_1, tag_2 );
ASSERT_TRUE( aliasExists( tag_1, tag_2 ) );
SUCCEED();
}
TEST_F( ServerTagFixture, RecursiveProtection )
{
const auto tag_1 { createTag( "tag:1" ) };
const auto tag_2 { createTag( "tag:2" ) };
const auto tag_3 { createTag( "tag:3" ) };
ASSERT_NO_THROW( createAlias( tag_1, tag_2 ) );
ASSERT_NO_THROW( createAlias( tag_2, tag_3 ) );
ASSERT_ANY_THROW( createAlias( tag_2, tag_1 ) );
ASSERT_ANY_THROW( createAlias( tag_3, tag_1 ) );
SUCCEED();
}

View File

@@ -0,0 +1,19 @@
//
// Created by kj16609 on 8/18/25.
//
#include "MappingFixture.hpp"
TEST_F( MappingFixture, StorageMapping )
{
const auto tag_1 { createTag( "tag:1" ) };
const auto tag_2 { createTag( "tag:2" ) };
createMapping( tag_1 );
createMapping( tag_2 );
ASSERT_TRUE( mappingExists( tag_1 ) );
ASSERT_TRUE( mappingExists( tag_2 ) );
SUCCEED();
}