API migrations and docs

This commit is contained in:
2024-11-08 10:27:52 -05:00
parent 6d71341166
commit a2146f4335
41 changed files with 362 additions and 388 deletions

3
.gitmodules vendored
View File

@@ -37,3 +37,6 @@
[submodule "3rd-party/hydrus"]
path = 3rd-party/hydrus
url = https://github.com/hydrusnetwork/hydrus.git
[submodule "docs/doxygen-awesome-css"]
path = docs/doxygen-awesome-css
url = https://github.com/jothepro/doxygen-awesome-css.git

View File

@@ -6,16 +6,21 @@ project(IDHAN LANGUAGES CXX)
enable_testing()
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/fgl_cmake_modules)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/modules")
set(CMAKE_CXX_STANDARD 23)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(SPDLOG_USE_STD_FORMAT ON)
PreSetup()
option(BUILD_IDHAN_CLIENT "Builds the shared library for the IDHANClient" ON)
option(BUILD_IDHAN_TESTS "Builds all tests relating to IDHAN" ON)
option(BUILD_HYDRUS_IMPORTER "Builds the hydrus importer" ON)
set(SPDLOG_USE_STD_FORMAT ON)
if (BUILD_HYDRUS_IMPORTER)
set(BUILD_IDHAN_CLIENT ON)
endif ()
set(CMAKE_POSITION_INDEPENDENT_CODE ON)

View File

@@ -943,9 +943,9 @@ WARN_LOGFILE =
# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
# Note: If this tag is empty the current directory is searched.
INPUT = ./src/IDHAN \
./src/IDHANClient \
./src/IDHANServer \
INPUT = ./IDHAN \
./IDHANClient \
./IDHANServer \
./README.md
# This tag can be used to specify the character encoding of the source files

View File

@@ -0,0 +1,35 @@
//
// Created by kj16609 on 11/8/24.
//
#pragma once
#include <limits>
namespace idhan
{
enum KeyPermissions
{
//! Allows for api access in general
eApiAccess = 1 << 0,
//! Allows for API access beyond localhost
eRemoteAccess = 1 << 1,
//! Allows for requesting thumbnails
eThumbnailAccess = 1 << 2,
//! Allows for retrieval of files
eFileAccess = 1 << 3,
//! Allows for importing files
eImportAccess = 1 << 4,
//! Allows for deleting files
eDeleteAccess = 1 << 5,
//! Allows for deleting records
eRecordDeleteAccess = 1 << 6,
//! Allow for making of new permissions (cannot give permissions it does not have)
eCreateAccessKey = 1 << 7,
//! Allows for creating session keys.
eIssueSessionKeys = 1 << 8,
eEditFileTags = 1 << 9,
eEditFileMetadata = 1 << 10,
//! Gives all API permissions
eAllPermissions = ~( 0 )
};
} // namespace idhan

View File

@@ -1,40 +1,20 @@
AddFGLExecutable(IDHANServer ${CMAKE_CURRENT_SOURCE_DIR}/src)
set(MIGRATION_SOURCE "${CMAKE_CURRENT_BINARY_DIR}/doMigration.cpp")
file(REMOVE ${MIGRATION_SOURCE})
target_sources(IDHANServer PRIVATE ${MIGRATION_SOURCE})
set(MIGRATION_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src/db/setup/migration)
# Process each migration file
file(GLOB_RECURSE MIGRATIONS "${MIGRATION_DIR}/*.sql")
file(GLOB_RECURSE MIGRATION_SQLS ${MIGRATION_DIR}/*.sql)
file(READ ${MIGRATION_DIR}/migration-template.cpp.unused TEMPLATE_CONTENT)
file(READ ${MIGRATION_DIR}/check-template.cpp.unused CHECK_TEMPLATE_CONTENT)
string(CONFIGURE "${CHECK_TEMPLATE_CONTENT}" CHECK_TEMPLATE_CONTENT)
message("\n\n${TEMPLATE_CONTENT}")
foreach (MIGRATION ${MIGRATIONS})
get_filename_component(FILENAME ${MIGRATION} NAME_WLE)
string(REGEX MATCH "^[0-9]+" MIGRATION_ID ${FILENAME})
string(REGEX MATCH "[A-Za-z].*$" MIGRATION_TABLE ${FILENAME})
string(REPLACE "NEXT_MIGRATION" "${CHECK_TEMPLATE_CONTENT}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
file(READ ${MIGRATION} FILE_CONTENT)
set(FILE_CONTENT "\n${FILE_CONTENT}")
string(REPLACE "\n" "\n\t\t\t" FILE_CONTENT "${FILE_CONTENT}")
set(FILE_CONTENT "${FILE_CONTENT}\n\t\t")
string(REPLACE "MIGRATION_TABLE" "${MIGRATION_TABLE}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
string(REPLACE "MIGRATION_TARGET_ID" "${MIGRATION_ID}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
string(REPLACE "MIGRATION_QUERY" "${FILE_CONTENT}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
endforeach ()
string(REPLACE "NEXT_MIGRATION" "\treturn migration_id;" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
file(WRITE ${MIGRATION_DIR}/doMigration.cpp "${TEMPLATE_CONTENT}")
AddFGLExecutable(IDHANServer ${CMAKE_CURRENT_SOURCE_DIR}/src)
add_custom_command(
OUTPUT ${MIGRATION_SOURCE}
DEPENDS ${MIGRATION_SQLS}
COMMAND ${CMAKE_COMMAND} -DMIGRATION_DIR=${MIGRATION_DIR} -DOUT=${MIGRATION_SOURCE} -P "${CMAKE_CURRENT_SOURCE_DIR}/modules/GenerateMigrations.cmake"
COMMENT "${CMAKE_CURRENT_SOURCE_DIR}/modules/GenerateMigrations.cmake: Generating doMigration.cpp")
target_link_libraries(IDHANServer PUBLIC spdlog fmt drogon)
target_link_libraries(IDHANServer PRIVATE pqxx sqlite3 Qt6::Core)

View File

@@ -0,0 +1,37 @@
message("Processing migrations dir ${MIGRATION_DIR}")
# Process each migration file
file(GLOB_RECURSE MIGRATIONS "${MIGRATION_DIR}/*.sql")
file(READ ${MIGRATION_DIR}/migration-template.cpp.unused TEMPLATE_CONTENT)
file(READ ${MIGRATION_DIR}/check-template.cpp.unused CHECK_TEMPLATE_CONTENT)
string(CONFIGURE "${CHECK_TEMPLATE_CONTENT}" CHECK_TEMPLATE_CONTENT)
list(SORT MIGRATIONS COMPARE NATURAL)
foreach (MIGRATION ${MIGRATIONS})
get_filename_component(FILENAME ${MIGRATION} NAME_WLE)
string(REGEX MATCH "^[0-9]+" MIGRATION_ID ${FILENAME})
string(REGEX MATCH "[A-Za-z].*$" MIGRATION_TABLE ${FILENAME})
string(REPLACE "NEXT_MIGRATION" "${CHECK_TEMPLATE_CONTENT}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
file(READ ${MIGRATION} FILE_CONTENT)
set(FILE_CONTENT "\n${FILE_CONTENT}")
string(REPLACE "\n" "\n\t\t\t" FILE_CONTENT "${FILE_CONTENT}")
set(FILE_CONTENT "${FILE_CONTENT}\n\t\t")
string(REPLACE "MIGRATION_TABLE" "${MIGRATION_TABLE}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
string(REPLACE "MIGRATION_TARGET_ID" "${MIGRATION_ID}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
string(REPLACE "MIGRATION_QUERY" "${FILE_CONTENT}" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
endforeach ()
string(REPLACE "NEXT_MIGRATION" "\treturn migration_id;" TEMPLATE_CONTENT "${TEMPLATE_CONTENT}")
message("--Write out to ${OUT}")
file(WRITE ${OUT} "${TEMPLATE_CONTENT}")

View File

@@ -7,7 +7,6 @@
#include <fixme.hpp>
#include "NET_CONSTANTS.hpp"
#include "api/api.hpp"
#include "core/Database.hpp"
#include "drogon/HttpAppFramework.h"
#include "logging/log.hpp"
@@ -53,7 +52,6 @@ namespace idhan
auto& app { drogon::app() };
spdlog::set_level( spdlog::level::debug );
log::trace( "Logging show trace" );
log::debug( "Logging show debug" );
log::info( "Logging show info" );
@@ -68,9 +66,7 @@ namespace idhan
setupCORSSupport();
api::registerApi();
log::server::info( "IDHAN initalization finished" );
log::server::info( "IDHAN initialization finished" );
}
void ServerContext::run()

View File

@@ -0,0 +1,24 @@
//
// Created by kj16609 on 11/8/24.
//
#pragma once
#include "drogon/HttpController.h"
#include "helpers/ResponseCallback.hpp"
namespace idhan::api
{
class IDHANApi : public drogon::HttpController< IDHANApi >
{
void version( const drogon::HttpRequestPtr& request, ResponseFunction&& callback );
public:
METHOD_LIST_BEGIN
ADD_METHOD_TO( IDHANApi::version, "/version" );
METHOD_LIST_END
};
} // namespace idhan::api

View File

@@ -0,0 +1,16 @@
//
// Created by kj16609 on 11/8/24.
//
#include "IDHANAuth.hpp"
namespace idhan::api
{
void IDHANAuth::invoke(
const drogon::HttpRequestPtr& req, drogon::MiddlewareNextCallback&& nextCb, drogon::MiddlewareCallback&& mcb )
{
// continue
nextCb( std::move( mcb ) );
}
} // namespace idhan::api

View File

@@ -0,0 +1,53 @@
//
// Created by kj16609 on 11/8/24.
//
#pragma once
#include "drogon/HttpMiddleware.h"
/**
* @page IDHANAuth IDHAN Authorization
* IDHAN uses a key system to authorize access to the API. There are various ways to do so however.
* First, Some terminology.
* - `Access Key`: A key that is set and capable of being used until it's deleted. Permanant life.
* - `Session Key`: A key that is temporarily issued using an access key. Temporary life.
* - `Permissions`: The integer that represents various permissions (masked using `idhan::KeyPermissions`)
*/
/**
*
* @page IDHANAuth Auth Tables
* There are currently 4 tables that deal with authorization for the API:
* `access_keys`, `session_keys`, `hydrus_keys`, and `access_domains`
*
* @subpage access_keys "Access Keys Table"
* This table contains the access key, it's internal id, and the @ref idhan::KeyPermissions "permissions" applied to it.
*
* Generation query:\n
* @code
CREATE TABLE access_keys
(
access_key_id SERIAL PRIMARY KEY,
access_key BYTEA UNIQUE NOT NULL,
permissions INT NOT NULL DEFAULT 0
);
* @endcode
*
*
*/
namespace idhan::api
{
class IDHANAuth : public drogon::HttpMiddleware< IDHANAuth >
{
public:
IDHANAuth() = default;
void invoke(
const drogon::HttpRequestPtr& req,
drogon::MiddlewareNextCallback&& nextCb,
drogon::MiddlewareCallback&& mcb ) override;
};
} // namespace idhan::api

View File

@@ -1,28 +0,0 @@
//
// Created by kj16609 on 11/6/24.
//
#pragma once
#include "drogon/HttpRequest.h"
#include "helpers/ResponseCallback.hpp"
namespace idhan::api
{
//! /
void getIndex( const drogon::HttpRequestPtr& request, ResponseFunction&& callback );
//! GET /version
void getVersion( const drogon::HttpRequestPtr& request, ResponseFunction&& callback );
inline void registerApi()
{
auto& app { drogon::app() };
// free-access API.
// app.registerHandler( "/", &getIndex );
app.registerHandler( "/version", &getVersion );
//Anything past this point should be blocked unless an access key is given
}
} // namespace idhan::api

View File

@@ -1,68 +0,0 @@
//
// Created by kj16609 on 11/6/24.
//
#ifndef IDHAN_VERSION
#define IDHAN_VERSION "SOURCE TESTBUILD"
#endif
#ifndef IDHAN_API_VERSION
#define IDHAN_API_VERSION "SOURCE TESTBUILD"
#endif
#ifndef HYDRUS_API_VERSION
#define HYDRUS_API_VERSION "SOURCE TESTBUILD"
#endif
#include <filesystem>
#include <fstream>
#include "drogon/HttpRequest.h"
#include "helpers/ResponseCallback.hpp"
#include "logging/log.hpp"
namespace idhan::api
{
/*
void getIndex( const drogon::HttpRequestPtr& request, ResponseFunction&& callback )
{
log::debug( "/" );
const auto working_dir { std::filesystem::current_path() };
const auto filepath { working_dir / "pages" / "index.html" };
if ( auto ifs = std::ifstream( filepath ); ifs )
{
std::vector< char > buffer {};
buffer.resize( std::filesystem::file_size( filepath ) );
ifs.read( buffer.data(), buffer.size() );
auto response { drogon::HttpResponse::newHttpResponse() };
response->setStatusCode( drogon::k200OK );
response->setContentTypeCode( drogon::CT_TEXT_HTML );
std::string_view str { buffer.data(), buffer.size() };
response->setBody( std::string( str ) );
callback( response );
}
}
*/
void getVersion( const drogon::HttpRequestPtr& request, ResponseFunction&& callback )
{
log::debug( "/version" );
Json::Value json;
json[ "idhan_version" ] = IDHAN_VERSION;
json[ "idhan_api_version" ] = IDHAN_API_VERSION;
json[ "hydrus_api_version" ] = HYDRUS_API_VERSION;
callback( drogon::HttpResponse::newHttpJsonResponse( json ) );
}
} // namespace idhan::api

View File

@@ -0,0 +1,25 @@
//
// Created by kj16609 on 11/8/24.
//
#include "IDHANApi.hpp"
#include "hyapi/constants/hydrus_version.hpp"
#include "logging/log.hpp"
#include "versions.hpp"
namespace idhan::api
{
void IDHANApi::version( const drogon::HttpRequestPtr& request, ResponseFunction&& callback )
{
log::debug( "/version" );
Json::Value json;
json[ "idhan_version" ] = IDHAN_VERSION;
json[ "idhan_api_version" ] = IDHAN_API_VERSION;
json[ "hydrus_api_version" ] = HYDRUS_MIMICED_API_VERSION;
callback( drogon::HttpResponse::newHttpJsonResponse( json ) );
}
} // namespace idhan::api

View File

@@ -15,8 +15,6 @@ namespace idhan
{
log::info( "Starting inital table setup" );
db::updateMigrations( tx );
tx.commit();
}
@@ -27,19 +25,12 @@ namespace idhan
{
log::info( "Postgres connection made: {}", connection.dbname() );
// Determine if we should do our inital setup (if the idhan_info table is missing then we should do our setup)
{
pqxx::nontransaction tx { connection };
pqxx::nontransaction tx { connection };
db::destroyTables( tx );
// This function does nothing if the proper define is not enabled.
// This should be used in order to make it easy to do fresh testing.
// This function is a NOOP unless a define is enabled for it by default.
db::destroyTables( tx );
if ( !db::tableExists( tx, "idhan_info" ) )
{
initalSetup( tx );
}
}
db::updateMigrations( tx );
log::info( "Database loading finished" );
}

View File

@@ -44,8 +44,8 @@ namespace idhan::db
tx.exec_params(
R"(
INSERT INTO idhan_info (table_name, last_migration_id, queries)
VALUES( $1, $2, $3 )
ON CONFLICT DO UPDATE SET
VALUES( $1, $2, ARRAY[$3] )
ON CONFLICT (table_name) DO UPDATE SET
queries = idhan_info.queries || EXCLUDED.queries,
last_migration_id = EXCLUDED.last_migration_id;)",
name,

View File

@@ -0,0 +1,5 @@
CREATE TABLE url_domains
(
url_domain_id SERIAL PRIMARY KEY,
url_domain TEXT UNIQUE NOT NULL
);

View File

@@ -0,0 +1,6 @@
CREATE TABLE urls
(
url_id SERIAL PRIMARY KEY,
url_domain_id INTEGER NOT NULL REFERENCES url_domains (url_domain_id),
url TEXT UNIQUE NOT NULL
);

View File

@@ -0,0 +1,6 @@
CREATE TABLE url_map
(
record_id INTEGER REFERENCES records (record_id),
url_id INTEGER REFERENCES urls (url_id),
UNIQUE (record_id, url_id) -- Simply to prevent duplicates of the same thing
);

View File

@@ -0,0 +1,7 @@
CREATE TABLE file_clusters
(
cluster_id SMALLSERIAL PRIMARY KEY,
cluster_path TEXT UNIQUE NOT NULL,
percentage_allowed SMALLINT NOT NULL,
readonly BOOLEAN NOT NULL
);

View File

@@ -0,0 +1,8 @@
CREATE TABLE file_metadata
(
cluster_id SMALLINT REFERENCES file_clusters (cluster_id),
record_id INTEGER REFERENCES records (record_id),
file_size BIGINT,
obtained TIMESTAMP WITHOUT TIME ZONE,
UNIQUE (record_id)
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE deleted_files
(
record_id INTEGER REFERENCES records (record_id),
deleted_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE file_notes
(
record_id SERIAL REFERENCES records (record_id),
note TEXT NOT NULL
);

View File

@@ -0,0 +1,6 @@
CREATE TABLE mime
(
mime_id SERIAL PRIMARY KEY,
http_mime TEXT UNIQUE NOT NULL,
best_extension TEXT NOT NULL
);

View File

@@ -0,0 +1,6 @@
CREATE TABLE access_keys
(
access_key_id SERIAL PRIMARY KEY,
access_key BYTEA UNIQUE NOT NULL,
permissions INT NOT NULL DEFAULT 0
);

View File

@@ -0,0 +1,6 @@
CREATE TABLE session_keys
(
issuer_id INTEGER REFERENCES access_keys (access_key_id) NOT NULL,
session_key BYTEA UNIQUE NOT NULL,
valid_until TIMESTAMP WITHOUT TIME ZONE NOT NULL
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE hydrus_keys
(
access_key_id INTEGER NOT NULL REFERENCES access_keys (access_key_id),
permissions JSON NOT NULL
);

View File

@@ -0,0 +1,6 @@
CREATE TABLE access_domains
(
access_key_id INTEGER NOT NULL REFERENCES access_keys (access_key_id),
tag_domains INTEGER [] NOT NULL,
file_domains INTEGER [] NOT NULL
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE tag_namespaces
(
namespace_id SERIAL PRIMARY KEY,
namespace_text TEXT UNIQUE NOT NULL
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE tag_subtags
(
subtag_id SERIAL PRIMARY KEY,
subtag_text TEXT UNIQUE NOT NULL
);

View File

@@ -0,0 +1,5 @@
CREATE TABLE tag_domains
(
tag_domain_id SMALLSERIAL PRIMARY KEY,
domain_name TEXT UNIQUE NOT NULL
);

View File

@@ -0,0 +1,7 @@
CREATE TABLE tags
(
tag_id BIGSERIAL PRIMARY KEY,
namespace_id INTEGER REFERENCES tag_namespaces (namespace_id),
subtag_id INTEGER REFERENCES tag_subtags (subtag_id),
UNIQUE (namespace_id, subtag_id)
);

View File

@@ -0,0 +1,7 @@
CREATE TABLE tag_aliases
(
alias_id INTEGER REFERENCES tags (tag_id),
aliased_id INTEGER REFERENCES tags (tag_id),
domain_id SMALLINT REFERENCES tag_domains (tag_domain_id),
UNIQUE (alias_id, aliased_id, domain_id)
);

View File

@@ -0,0 +1,7 @@
CREATE TABLE tag_parents
(
parent_id INTEGER REFERENCES tags (tag_id),
child_id INTEGER REFERENCES tags (tag_id),
domain_id SMALLINT REFERENCES tag_domains (tag_domain_id),
UNIQUE (parent_id, child_id, domain_id)
);

View File

@@ -0,0 +1,7 @@
CREATE TABLE tag_siblings
(
older_id INTEGER REFERENCES tags (tag_id),
younger_id INTEGER REFERENCES tags (tag_id),
domain_id SMALLINT REFERENCES tag_domains (tag_domain_id),
UNIQUE (older_id, younger_id, domain_id)
);

View File

@@ -1,7 +1,10 @@
if( migration_id == MIGRATION_TARGET_ID )
if( migration_id <= MIGRATION_TARGET_ID )
{
constexpr std::string_view query { R"(MIGRATION_QUERY)" };
log::debug("Performing migration {}->{}", migration_id, MIGRATION_TARGET_ID);
migration_id = MIGRATION_TARGET_ID;
tx.exec( query );
addTableToInfo(tx, "MIGRATION_TABLE", query, MIGRATION_TARGET_ID);
}

View File

@@ -7,6 +7,7 @@
#include <cstdint>
#include "db/setup/management.hpp"
#include "logging/log.hpp"
namespace idhan::db
{

View File

@@ -2,10 +2,15 @@
// Created by kj16609 on 11/7/24.
//
#include "migrations.hpp"
#include <pqxx/nontransaction>
#include <pqxx/pqxx>
#include <cstdint>
#include "db/setup/management.hpp"
namespace idhan::db
{
@@ -14,6 +19,17 @@ namespace idhan::db
std::size_t current_id { 0 };
// attempt to get the most recent update id
if ( tableExists( tx, "idhan_info" ) )
{
auto ret { tx.exec( "SELECT last_migration_id FROM idhan_info ORDER BY last_migration_id DESC limit 1" ) };
if ( ret.size() > 0 )
{
current_id = ret[ 0 ][ 0 ].as< std::uint32_t >() + 1;
}
}
doMigration( tx, current_id );
}
} // namespace idhan::db

View File

@@ -2,7 +2,7 @@
// Created by kj16609 on 11/7/24.
//
#pragma once
#include <pqxx/nontransaction.hxx>
#include <pqxx/nontransaction>
#include <cstddef>

View File

@@ -1,233 +0,0 @@
//
// Created by kj16609 on 9/8/24.
//
#include <pqxx/nontransaction>
#include <array>
#include <string_view>
#include <tuple>
#include "logging/log.hpp"
#include "management.hpp"
namespace idhan::db
{
// Inital tables
// clang-format off
constexpr std::array< std::tuple< std::string_view, std::string_view >, 20 > table_creation_sql {
{
{
"idhan_info",
R"(
CREATE TABLE idhan_info (
table_version INTEGER NOT NULL,
table_name TEXT UNIQUE NOT NULL,
creation_query TEXT NOT NULL
)
)"
},
{
"records",
R"(
CREATE TABLE records (
record_id SERIAL PRIMARY KEY,
sha256 BYTEA UNIQUE NOT NULL,
creation_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
)
)"
},
{
"file_domains",
R"(
CREATE TABLE file_domains (
)"
},
/*======================== TAGS =======================================*/
{
"tag_namespaces",
R"(
CREATE TABLE tag_namespaces (
namespace_id SERIAL PRIMARY KEY,
namespace_text TEXT UNIQUE NOT NULL
)
)"
},
{
"tag_subtags",
R"(
CREATE TABLE tag_subtags (
subtag_id SERIAL PRIMARY KEY,
subtag_text TEXT UNIQUE NOT NULL
)
)"
},
{
"tag_domains",
R"(
CREATE TABLE tag_domains (
tag_domain_id SMALLSERIAL PRIMARY KEY,
domain_name TEXT UNIQUE NOT NULL
)
)"
},
{
"tags",
R"(
CREATE TABLE tags (
tag_id BIGSERIAL PRIMARY KEY,
namespace_id INTEGER REFERENCES tag_namespaces(namespace_id),
subtag_id INTEGER REFERENCES tag_subtags(subtag_id),
UNIQUE(namespace_id, subtag_id)
)
)"
},
{
"tag_aliases",
R"(
CREATE TABLE tag_aliases (
alias_id INTEGER REFERENCES tags(tag_id),
aliased_id INTEGER REFERENCES tags(tag_id) UNIQUE,
domain_id SMALLINT REFERENCES tag_domains(tag_domain_id)
)
)"
},
{
"tag_parents",
R"(
CREATE TABLE tag_parents (
parent_id INTEGER REFERENCES tags(tag_id),
child_id INTEGER REFERENCES tags(tag_id),
domain_id SMALLINT REFERENCES tag_domains(tag_domain_id),
UNIQUE(domain_id, parent_id, child_id)
)
)"
},
{
"tag_siblings",
R"(
CREATE TABLE tag_siblings (
older_id INTEGER REFERENCES tags(tag_id),
younger_id INTEGER REFERENCES tags(tag_id),
domain_id SMALLINT REFERENCES tag_domains(tag_domain_id),
UNIQUE(domain_id, older_id, younger_id)
)
)"
},
{
"tag_mappings",
R"(
CREATE TABLE tag_mappings (
record_id INTEGER REFERENCES records(record_id),
tag_id INTEGER REFERENCES tags(tag_id),
domain_id SMALLINT REFERENCES tag_domains(tag_domain_id),
UNIQUE(domain_id, record_id, tag_id)
)
)"
},
/*========================== URLS =====================================*/
{
"url_domains",
R"(
CREATE TABLE url_domains (
url_domain_id SERIAL PRIMARY KEY,
url_domain TEXT UNIQUE NOT NULL
)
)"
},
{
"urls",
R"(
CREATE TABLE urls (
url_id SERIAL PRIMARY KEY,
url_domain_id INTEGER NOT NULL REFERENCES url_domains(url_domain_id),
url TEXT UNIQUE NOT NULL
)
)"
},
{
"url_map",
R"(
CREATE TABLE url_map (
record_id INTEGER REFERENCES records(record_id),
url_id INTEGER REFERENCES urls(url_id)
)
)"
},
/*========================== CLUSTERS ==================================*/
{
"file_clusters",
R"(
CREATE TABLE file_clusters (
cluster_id SMALLSERIAL PRIMARY KEY,
cluster_path TEXT UNIQUE NOT NULL
)
)"
},
{
"file_meta",
R"(
CREATE TABLE cluster_info (
cluster_id SMALLINT REFERENCES file_clusters(cluster_id),
record_id INTEGER REFERENCES records(record_id),
file_size BIGINT,
obtained TIMESTAMP WITHOUT TIME ZONE
)
)"
},
/*========================== META =====================================*/
{
"deleted_files",
R"(
CREATE TABLE deleted_files (
record_id INTEGER REFERENCES records(record_id),
deleted_time TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP
)
)"
},
{
"file_notes",
R"(
CREATE TABLE file_notes (
record_id SERIAL REFERENCES records(record_id),
note TEXT NOT NULL
)
)"
},
/*========================== MIME ======================================*/
{
"mime",
R"(
CREATE TABLE mime (
mime_id SERIAL PRIMARY KEY,
http_mime TEXT UNIQUE NOT NULL,
best_extension TEXT NOT NULL
)
)"
},
{
"record_mime",
R"(
CREATE TABLE record_mime (
record_id INTEGER REFERENCES records(record_id),
mime_id INTEGER REFERENCES mime(mime_id),
UNIQUE(record_id)
)
)"
}
}
};
// clang-format on
/*
constexpr std::array< std::string_view, 2 > table_creation_sql
{
// Tag creation/Selection
R"()",
R"()"
};
*/
} // namespace idhan::db

View File

@@ -6,9 +6,12 @@
#include "ConnectionArguments.hpp"
#include "ServerContext.hpp"
#include "logging/log.hpp"
int main( int argc, char** argv )
{
spdlog::set_level( spdlog::level::debug );
idhan::ConnectionArguments arguments {};
//arguments.hydrus_info.hydrus_db_path = "/home/kj16609/.local/share/hydrus/db/";
arguments.user = "idhan";