Labrinth ID cleanup (#3681)

* Put all ID types in the labrinth::models::ids, and reduce code duplication with them

* Rewrite labrinth::database::models::ids and rename most DB interface ID structs to be prefixed with DB

* Run sqlx prepare

---------

Co-authored-by: Alejandro González <7822554+AlexTMjugador@users.noreply.github.com>
This commit is contained in:
Josiah Glosson 2025-05-22 03:34:36 -05:00 committed by GitHub
parent c6022ad977
commit 9e527ff141
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
111 changed files with 1477 additions and 1965 deletions

1
Cargo.lock generated
View File

@ -4352,6 +4352,7 @@ dependencies = [
"maxminddb",
"meilisearch-sdk",
"murmur2",
"paste",
"prometheus",
"rand 0.8.5",
"rand_chacha 0.3.1",

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id \n FROM versions v\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE\n WHERE v.id = $1\n ",
"query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id\n FROM versions v\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE\n WHERE v.id = $1\n ",
"describe": {
"columns": [
{
@ -79,5 +79,5 @@
false
]
},
"hash": "06bf1b34b70f5e61bf619c4d7706d07d6db413751ecab86896a708c8539e38b6"
"hash": "0fcbb92442bdab97fd1b44a0eecb28c05a4c6d482b3fa7874902ee8e846d20b0"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable \n FROM loader_field_enums lfe\n WHERE lfe.enum_name = $1\n ORDER BY lfe.ordering ASC\n ",
"query": "\n SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable\n FROM loader_field_enums lfe\n WHERE lfe.enum_name = $1\n ORDER BY lfe.ordering ASC\n ",
"describe": {
"columns": [
{
@ -36,5 +36,5 @@
false
]
},
"hash": "1243d13d622a9970240c8f26b5031b4c68d08607f7a0142b662b53eb05b4723a"
"hash": "282ec0ee4a4ef739fc24af40d2c9be2a335ea9a32a1d19a450aef02f1c58334e"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split, role,\n ordering, user_id\n \n FROM team_members\n WHERE (team_id = $1 AND user_id = $2)\n ORDER BY ordering\n ",
"query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split, role,\n ordering, user_id\n\n FROM team_members\n WHERE (team_id = $1 AND user_id = $2)\n ORDER BY ordering\n ",
"describe": {
"columns": [
{
@ -79,5 +79,5 @@
false
]
},
"hash": "9abdd9a2018e7bfe26836dd5463ba0923ef0a76c32ca258faf55fc3301c567bf"
"hash": "2d2cb03a63c4692080dd58f88f34ac63b20f89c9e7d6f520865133de60b9e911"
}

View File

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS(SELECT 1 FROM payouts WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
},
"hash": "46c32719ad68b667e816f58f55b5b4829bff8f0fbb039a0eda7bf08a430ba466"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT m.id AS pid, NULL AS oid\n FROM mods m\n WHERE m.team_id = $1\n \n UNION ALL\n \n SELECT NULL AS pid, o.id AS oid\n FROM organizations o\n WHERE o.team_id = $1 \n ",
"query": "\n SELECT m.id AS pid, NULL AS oid\n FROM mods m\n WHERE m.team_id = $1\n\n UNION ALL\n\n SELECT NULL AS pid, o.id AS oid\n FROM organizations o\n WHERE o.team_id = $1\n ",
"describe": {
"columns": [
{
@ -24,5 +24,5 @@
null
]
},
"hash": "061a3e43df9464263aaf1555a27c1f4b6a0f381282f4fa75cc13b1d354857578"
"hash": "a91d793778d92844cfe338007e8f25daca0d711e0569b99f14d820ddb1685405"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT l.id id, l.loader loader, l.icon icon, l.metadata metadata,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games\n FROM loaders l \n LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id\n LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id\n LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id\n LEFT OUTER JOIN games g ON lptg.game_id = g.id\n GROUP BY l.id;\n ",
"query": "\n SELECT l.id id, l.loader loader, l.icon icon, l.metadata metadata,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games\n FROM loaders l\n LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id\n LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id\n LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id\n LEFT OUTER JOIN games g ON lptg.game_id = g.id\n GROUP BY l.id;\n ",
"describe": {
"columns": [
{
@ -46,5 +46,5 @@
null
]
},
"hash": "cdb2f18f826097f0f17a1f7295d7c45eb1987b63c1a21666c6ca60c52217ba4d"
"hash": "ab9b4b383ce8431214eb26abde734d36eff6902ae55b7ed4d89a10c4d69506a9"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO collections (\n id, user_id, name, description, \n created, icon_url, raw_icon_url, status\n )\n VALUES (\n $1, $2, $3, $4, \n $5, $6, $7, $8\n )\n ",
"query": "\n INSERT INTO collections (\n id, user_id, name, description,\n created, icon_url, raw_icon_url, status\n )\n VALUES (\n $1, $2, $3, $4,\n $5, $6, $7, $8\n )\n ",
"describe": {
"columns": [],
"parameters": {
@ -17,5 +17,5 @@
},
"nullable": []
},
"hash": "8d7746fedec4c2339352a3acd934b13c351b8a1fdb05bf982bab1a5b7ed17f3b"
"hash": "e5c4275ebfa60938505ef8b32b54efa569c6b516b547faec9a10943ddef6266e"
}

View File

@ -31,6 +31,7 @@ futures.workspace = true
futures-util.workspace = true
async-trait.workspace = true
dashmap.workspace = true
paste.workspace = true
meilisearch-sdk = { workspace = true, features = ["reqwest"] }
rust-s3.workspace = true

View File

@ -90,7 +90,7 @@ pub async fn filter_visible_project_ids(
user_option: &Option<User>,
pool: &PgPool,
hide_unlisted: bool,
) -> Result<Vec<crate::database::models::ProjectId>, ApiError> {
) -> Result<Vec<crate::database::models::DBProjectId>, ApiError> {
let mut return_projects = Vec::new();
let mut check_projects = Vec::new();
@ -129,11 +129,11 @@ pub async fn filter_enlisted_projects_ids(
projects: Vec<&Project>,
user_option: &Option<User>,
pool: &PgPool,
) -> Result<Vec<crate::database::models::ProjectId>, ApiError> {
) -> Result<Vec<crate::database::models::DBProjectId>, ApiError> {
let mut return_projects = vec![];
if let Some(user) = user_option {
let user_id: models::ids::UserId = user.id.into();
let user_id: models::ids::DBUserId = user.id.into();
use futures::TryStreamExt;
@ -154,7 +154,7 @@ pub async fn filter_enlisted_projects_ids(
.iter()
.filter_map(|x| x.organization_id.map(|x| x.0))
.collect::<Vec<_>>(),
user_id as database::models::ids::UserId,
user_id as database::models::ids::DBUserId,
)
.fetch(pool)
.map_ok(|row| {
@ -236,7 +236,7 @@ pub async fn filter_visible_version_ids(
user_option: &Option<User>,
pool: &PgPool,
redis: &RedisPool,
) -> Result<Vec<crate::database::models::VersionId>, ApiError> {
) -> Result<Vec<crate::database::models::DBVersionId>, ApiError> {
let mut return_versions = Vec::new();
let mut check_versions = Vec::new();
@ -291,7 +291,7 @@ pub async fn filter_enlisted_version_ids(
user_option: &Option<User>,
pool: &PgPool,
redis: &RedisPool,
) -> Result<Vec<crate::database::models::VersionId>, ApiError> {
) -> Result<Vec<crate::database::models::DBVersionId>, ApiError> {
let mut return_versions = Vec::new();
// Get project ids of versions

View File

@ -116,7 +116,7 @@ pub enum OAuthErrorType {
AuthenticationError(#[from] AuthenticationError),
#[error("Client {} has no redirect URIs specified", .client_id.0)]
ClientMissingRedirectURI {
client_id: crate::database::models::OAuthClientId,
client_id: crate::database::models::DBOAuthClientId,
},
#[error(
"The provided redirect URI did not match any configured in the client"
@ -133,7 +133,7 @@ pub enum OAuthErrorType {
#[error("The provided flow id was invalid")]
InvalidAcceptFlowId,
#[error("The provided client id was invalid")]
InvalidClientId(crate::database::models::OAuthClientId),
InvalidClientId(crate::database::models::DBOAuthClientId),
#[error("The provided ID could not be decoded: {0}")]
MalformedId(#[from] DecodingError),
#[error("Failed to authenticate client")]

View File

@ -6,7 +6,7 @@ use crate::database::models::oauth_client_authorization_item::OAuthClientAuthori
use crate::database::models::oauth_client_item::OAuthClient as DBOAuthClient;
use crate::database::models::oauth_token_item::OAuthAccessToken;
use crate::database::models::{
OAuthClientAuthorizationId, generate_oauth_access_token_id,
DBOAuthClientAuthorizationId, generate_oauth_access_token_id,
generate_oauth_client_authorization_id,
};
use crate::database::redis::RedisPool;
@ -417,9 +417,9 @@ fn generate_access_token() -> String {
}
async fn init_oauth_code_flow(
user_id: crate::database::models::UserId,
user_id: crate::database::models::DBUserId,
client_id: OAuthClientId,
authorization_id: OAuthClientAuthorizationId,
authorization_id: DBOAuthClientAuthorizationId,
scopes: Scopes,
redirect_uris: OAuthRedirectUris,
state: Option<String>,

View File

@ -1,6 +1,6 @@
use super::errors::OAuthError;
use crate::auth::oauth::OAuthErrorType;
use crate::database::models::OAuthClientId;
use crate::database::models::DBOAuthClientId;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
@ -16,7 +16,7 @@ impl ValidatedRedirectUri {
pub fn validate<'a>(
to_validate: &Option<String>,
validate_against: impl IntoIterator<Item = &'a str> + Clone,
client_id: OAuthClientId,
client_id: DBOAuthClientId,
) -> Result<Self, OAuthError> {
if let Some(first_client_redirect_uri) =
validate_against.clone().into_iter().next()
@ -61,7 +61,7 @@ mod tests {
let validated = ValidatedRedirectUri::validate(
&None,
validate_against.clone(),
OAuthClientId(0),
DBOAuthClientId(0),
)
.unwrap();
@ -82,7 +82,7 @@ mod tests {
let validated = ValidatedRedirectUri::validate(
&Some(to_validate.clone()),
validate_against,
OAuthClientId(0),
DBOAuthClientId(0),
)
.unwrap();
@ -97,7 +97,7 @@ mod tests {
let validated = ValidatedRedirectUri::validate(
&Some(to_validate),
validate_against,
OAuthClientId(0),
DBOAuthClientId(0),
);
assert!(validated.is_err_and(|e| matches!(

View File

@ -1,5 +1,5 @@
use crate::database::models::{
ChargeId, DatabaseError, ProductPriceId, UserId, UserSubscriptionId,
DBChargeId, DBProductPriceId, DBUserId, DBUserSubscriptionId, DatabaseError,
};
use crate::models::billing::{
ChargeStatus, ChargeType, PaymentPlatform, PriceDuration,
@ -8,9 +8,9 @@ use chrono::{DateTime, Utc};
use std::convert::{TryFrom, TryInto};
pub struct ChargeItem {
pub id: ChargeId,
pub user_id: UserId,
pub price_id: ProductPriceId,
pub id: DBChargeId,
pub user_id: DBUserId,
pub price_id: DBProductPriceId,
pub amount: i64,
pub currency_code: String,
pub status: ChargeStatus,
@ -18,13 +18,13 @@ pub struct ChargeItem {
pub last_attempt: Option<DateTime<Utc>>,
pub type_: ChargeType,
pub subscription_id: Option<UserSubscriptionId>,
pub subscription_id: Option<DBUserSubscriptionId>,
pub subscription_interval: Option<PriceDuration>,
pub payment_platform: PaymentPlatform,
pub payment_platform_id: Option<String>,
pub parent_charge_id: Option<ChargeId>,
pub parent_charge_id: Option<DBChargeId>,
// Net is always in USD
pub net: Option<i64>,
@ -53,22 +53,22 @@ impl TryFrom<ChargeResult> for ChargeItem {
fn try_from(r: ChargeResult) -> Result<Self, Self::Error> {
Ok(ChargeItem {
id: ChargeId(r.id),
user_id: UserId(r.user_id),
price_id: ProductPriceId(r.price_id),
id: DBChargeId(r.id),
user_id: DBUserId(r.user_id),
price_id: DBProductPriceId(r.price_id),
amount: r.amount,
currency_code: r.currency_code,
status: ChargeStatus::from_string(&r.status),
due: r.due,
last_attempt: r.last_attempt,
type_: ChargeType::from_string(&r.charge_type),
subscription_id: r.subscription_id.map(UserSubscriptionId),
subscription_id: r.subscription_id.map(DBUserSubscriptionId),
subscription_interval: r
.subscription_interval
.map(|x| PriceDuration::from_string(&x)),
payment_platform: PaymentPlatform::from_string(&r.payment_platform),
payment_platform_id: r.payment_platform_id,
parent_charge_id: r.parent_charge_id.map(ChargeId),
parent_charge_id: r.parent_charge_id.map(DBChargeId),
net: r.net,
})
}
@ -100,7 +100,7 @@ impl ChargeItem {
pub async fn upsert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ChargeId, DatabaseError> {
) -> Result<DBChargeId, DatabaseError> {
sqlx::query!(
r#"
INSERT INTO charges (id, user_id, price_id, amount, currency_code, charge_type, status, due, last_attempt, subscription_id, subscription_interval, payment_platform, payment_platform_id, parent_charge_id, net)
@ -144,7 +144,7 @@ impl ChargeItem {
}
pub async fn get(
id: ChargeId,
id: DBChargeId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ChargeItem>, DatabaseError> {
let id = id.0;
@ -156,7 +156,7 @@ impl ChargeItem {
}
pub async fn get_from_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ChargeItem>, DatabaseError> {
let user_id = user_id.0;
@ -174,7 +174,7 @@ impl ChargeItem {
}
pub async fn get_children(
charge_id: ChargeId,
charge_id: DBChargeId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ChargeItem>, DatabaseError> {
let charge_id = charge_id.0;
@ -192,7 +192,7 @@ impl ChargeItem {
}
pub async fn get_open_subscription(
user_subscription_id: UserSubscriptionId,
user_subscription_id: DBUserSubscriptionId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ChargeItem>, DatabaseError> {
let user_subscription_id = user_subscription_id.0;
@ -255,7 +255,7 @@ impl ChargeItem {
}
pub async fn remove(
id: ChargeId,
id: DBChargeId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
sqlx::query!(

View File

@ -12,19 +12,19 @@ const COLLECTIONS_NAMESPACE: &str = "collections";
#[derive(Clone)]
pub struct CollectionBuilder {
pub collection_id: CollectionId,
pub user_id: UserId,
pub collection_id: DBCollectionId,
pub user_id: DBUserId,
pub name: String,
pub description: Option<String>,
pub status: CollectionStatus,
pub projects: Vec<ProjectId>,
pub projects: Vec<DBProjectId>,
}
impl CollectionBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<CollectionId, DatabaseError> {
) -> Result<DBCollectionId, DatabaseError> {
let collection_struct = Collection {
id: self.collection_id,
name: self.name,
@ -45,8 +45,8 @@ impl CollectionBuilder {
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Collection {
pub id: CollectionId,
pub user_id: UserId,
pub id: DBCollectionId,
pub user_id: DBUserId,
pub name: String,
pub description: Option<String>,
pub created: DateTime<Utc>,
@ -55,7 +55,7 @@ pub struct Collection {
pub raw_icon_url: Option<String>,
pub color: Option<u32>,
pub status: CollectionStatus,
pub projects: Vec<ProjectId>,
pub projects: Vec<DBProjectId>,
}
impl Collection {
@ -66,16 +66,16 @@ impl Collection {
sqlx::query!(
"
INSERT INTO collections (
id, user_id, name, description,
id, user_id, name, description,
created, icon_url, raw_icon_url, status
)
VALUES (
$1, $2, $3, $4,
$1, $2, $3, $4,
$5, $6, $7, $8
)
",
self.id as CollectionId,
self.user_id as UserId,
self.id as DBCollectionId,
self.user_id as DBUserId,
&self.name,
self.description.as_ref(),
self.created,
@ -104,7 +104,7 @@ impl Collection {
}
pub async fn remove(
id: CollectionId,
id: DBCollectionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -116,7 +116,7 @@ impl Collection {
DELETE FROM collections_mods
WHERE collection_id = $1
",
id as CollectionId,
id as DBCollectionId,
)
.execute(&mut **transaction)
.await?;
@ -126,7 +126,7 @@ impl Collection {
DELETE FROM collections
WHERE id = $1
",
id as CollectionId,
id as DBCollectionId,
)
.execute(&mut **transaction)
.await?;
@ -140,7 +140,7 @@ impl Collection {
}
pub async fn get<'a, 'b, E>(
id: CollectionId,
id: DBCollectionId,
executor: E,
redis: &RedisPool,
) -> Result<Option<Collection>, DatabaseError>
@ -153,7 +153,7 @@ impl Collection {
}
pub async fn get_many<'a, E>(
collection_ids: &[CollectionId],
collection_ids: &[DBCollectionId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Collection>, DatabaseError>
@ -181,8 +181,8 @@ impl Collection {
.fetch(exec)
.try_fold(DashMap::new(), |acc, m| {
let collection = Collection {
id: CollectionId(m.id),
user_id: UserId(m.user_id),
id: DBCollectionId(m.id),
user_id: DBUserId(m.user_id),
name: m.name.clone(),
description: m.description.clone(),
icon_url: m.icon_url.clone(),
@ -195,7 +195,7 @@ impl Collection {
.mods
.unwrap_or_default()
.into_iter()
.map(ProjectId)
.map(DBProjectId)
.collect(),
};
@ -213,7 +213,7 @@ impl Collection {
}
pub async fn clear_cache(
id: CollectionId,
id: DBCollectionId,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

View File

@ -17,37 +17,37 @@ const FLOWS_NAMESPACE: &str = "flows";
#[serde(tag = "type", rename_all = "snake_case")]
pub enum Flow {
OAuth {
user_id: Option<UserId>,
user_id: Option<DBUserId>,
url: String,
provider: AuthProvider,
},
Login2FA {
user_id: UserId,
user_id: DBUserId,
},
Initialize2FA {
user_id: UserId,
user_id: DBUserId,
secret: String,
},
ForgotPassword {
user_id: UserId,
user_id: DBUserId,
},
ConfirmEmail {
user_id: UserId,
user_id: DBUserId,
confirm_email: String,
},
MinecraftAuth,
InitOAuthAppApproval {
user_id: UserId,
client_id: OAuthClientId,
existing_authorization_id: Option<OAuthClientAuthorizationId>,
user_id: DBUserId,
client_id: DBOAuthClientId,
existing_authorization_id: Option<DBOAuthClientAuthorizationId>,
scopes: Scopes,
redirect_uris: OAuthRedirectUris,
state: Option<String>,
},
OAuthAuthorizationCodeSupplied {
user_id: UserId,
client_id: OAuthClientId,
authorization_id: OAuthClientAuthorizationId,
user_id: DBUserId,
client_id: DBOAuthClientId,
authorization_id: DBOAuthClientAuthorizationId,
scopes: Scopes,
original_redirect_uri: Option<String>, // Needed for https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3
},

View File

@ -1,9 +1,9 @@
use crate::database::models::UserId;
use crate::database::models::DBUserId;
use chrono::{DateTime, Utc};
pub struct FriendItem {
pub user_id: UserId,
pub friend_id: UserId,
pub user_id: DBUserId,
pub friend_id: DBUserId,
pub created: DateTime<Utc>,
pub accepted: bool,
}
@ -30,8 +30,8 @@ impl FriendItem {
}
pub async fn get_friend<'a, E>(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
exec: E,
) -> Result<Option<FriendItem>, sqlx::Error>
where
@ -49,8 +49,8 @@ impl FriendItem {
.fetch_optional(exec)
.await?
.map(|row| FriendItem {
user_id: UserId(row.user_id),
friend_id: UserId(row.friend_id),
user_id: DBUserId(row.user_id),
friend_id: DBUserId(row.friend_id),
created: row.created,
accepted: row.accepted,
});
@ -59,8 +59,8 @@ impl FriendItem {
}
pub async fn update_friend(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
accepted: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::Error> {
@ -81,7 +81,7 @@ impl FriendItem {
}
pub async fn get_user_friends<'a, E>(
user_id: UserId,
user_id: DBUserId,
accepted: Option<bool>,
exec: E,
) -> Result<Vec<FriendItem>, sqlx::Error>
@ -100,8 +100,8 @@ impl FriendItem {
.await?
.into_iter()
.map(|row| FriendItem {
user_id: UserId(row.user_id),
friend_id: UserId(row.friend_id),
user_id: DBUserId(row.user_id),
friend_id: DBUserId(row.friend_id),
created: row.created,
accepted: row.accepted,
})
@ -112,8 +112,8 @@ impl FriendItem {
}
pub async fn remove(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::Error> {
sqlx::query!(

View File

@ -1,7 +1,15 @@
use super::DatabaseError;
use crate::models::ids::{
ChargeId, CollectionId, FileId, ImageId, NotificationId,
OAuthAccessTokenId, OAuthClientAuthorizationId, OAuthClientId,
OAuthRedirectUriId, OrganizationId, PatId, PayoutId, ProductId,
ProductPriceId, ProjectId, ReportId, SessionId, TeamId, TeamMemberId,
ThreadId, ThreadMessageId, UserSubscriptionId, VersionId,
};
use ariadne::ids::base62_impl::to_base62;
use ariadne::ids::{random_base62_rng, random_base62_rng_range};
use ariadne::ids::{UserId, random_base62_rng, random_base62_rng_range};
use censor::Censor;
use paste::paste;
use rand::SeedableRng;
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
@ -10,12 +18,12 @@ use sqlx::sqlx_macros::Type;
const ID_RETRY_COUNT: usize = 20;
macro_rules! generate_ids {
($vis:vis $function_name:ident, $return_type:ty, $id_length:expr, $select_stmnt:literal, $id_function:expr) => {
$vis async fn $function_name(
($function_name:ident, $return_type:ident, $select_stmnt:expr) => {
pub async fn $function_name(
con: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<$return_type, DatabaseError> {
let mut rng = ChaCha20Rng::from_entropy();
let length = $id_length;
let length = 8;
let mut id = random_base62_rng(&mut rng, length);
let mut retry_count = 0;
let censor = Censor::Standard + Censor::Sex;
@ -26,7 +34,9 @@ macro_rules! generate_ids {
.fetch_one(&mut **con)
.await?;
if results.exists.unwrap_or(true) || censor.check(&*to_base62(id)) {
if results.exists.unwrap_or(true)
|| censor.check(&*to_base62(id))
{
id = random_base62_rng(&mut rng, length);
} else {
break;
@ -38,14 +48,14 @@ macro_rules! generate_ids {
}
}
Ok($id_function(id as i64))
Ok($return_type(id as i64))
}
};
}
macro_rules! generate_bulk_ids {
($vis:vis $function_name:ident, $return_type:ty, $select_stmnt:literal, $id_function:expr) => {
$vis async fn $function_name(
($function_name:ident, $return_type:ident, $select_stmnt:expr) => {
pub async fn $function_name(
count: usize,
con: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Vec<$return_type>, DatabaseError> {
@ -55,14 +65,18 @@ macro_rules! generate_bulk_ids {
// Check if ID is unique
loop {
let base = random_base62_rng_range(&mut rng, 1, 10) as i64;
let ids = (0..count).map(|x| base + x as i64).collect::<Vec<_>>();
let ids =
(0..count).map(|x| base + x as i64).collect::<Vec<_>>();
let results = sqlx::query!($select_stmnt, &ids)
.fetch_one(&mut **con)
.await?;
if !results.exists.unwrap_or(true) {
return Ok(ids.into_iter().map(|x| $id_function(x)).collect());
return Ok(ids
.into_iter()
.map(|x| $return_type(x))
.collect());
}
retry_count += 1;
@ -74,589 +88,167 @@ macro_rules! generate_bulk_ids {
};
}
generate_ids!(
pub generate_project_id,
ProjectId,
8,
"SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)",
ProjectId
);
generate_ids!(
pub generate_version_id,
VersionId,
8,
"SELECT EXISTS(SELECT 1 FROM versions WHERE id=$1)",
VersionId
);
generate_ids!(
pub generate_team_id,
TeamId,
8,
"SELECT EXISTS(SELECT 1 FROM teams WHERE id=$1)",
TeamId
);
generate_ids!(
pub generate_organization_id,
OrganizationId,
8,
"SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1)",
OrganizationId
);
generate_ids!(
pub generate_collection_id,
CollectionId,
8,
"SELECT EXISTS(SELECT 1 FROM collections WHERE id=$1)",
CollectionId
);
generate_ids!(
pub generate_file_id,
FileId,
8,
"SELECT EXISTS(SELECT 1 FROM files WHERE id=$1)",
FileId
);
generate_ids!(
pub generate_team_member_id,
TeamMemberId,
8,
"SELECT EXISTS(SELECT 1 FROM team_members WHERE id=$1)",
TeamMemberId
);
generate_ids!(
pub generate_pat_id,
PatId,
8,
"SELECT EXISTS(SELECT 1 FROM pats WHERE id=$1)",
PatId
);
macro_rules! db_id_interface {
($id_struct:ident $(, generator: $generator_function:ident @ $db_table:expr, $(bulk_generator: $bulk_generator_function:ident,)?)?) => {
paste! {
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[sqlx(transparent)]
pub struct [< DB $id_struct >](pub i64);
generate_ids!(
pub generate_user_id,
UserId,
8,
"SELECT EXISTS(SELECT 1 FROM users WHERE id=$1)",
UserId
);
generate_ids!(
pub generate_report_id,
ReportId,
8,
"SELECT EXISTS(SELECT 1 FROM reports WHERE id=$1)",
ReportId
);
impl From<$id_struct> for [< DB $id_struct >] {
fn from(id: $id_struct) -> Self {
Self(id.0 as i64)
}
}
impl From<[< DB $id_struct >]> for $id_struct {
fn from(id: [< DB $id_struct >]) -> Self {
Self(id.0 as u64)
}
}
generate_ids!(
pub generate_notification_id,
NotificationId,
8,
"SELECT EXISTS(SELECT 1 FROM notifications WHERE id=$1)",
NotificationId
);
$(
generate_ids!(
$generator_function,
[< DB $id_struct >],
"SELECT EXISTS(SELECT 1 FROM " + $db_table + " WHERE id=$1)"
);
generate_bulk_ids!(
pub generate_many_notification_ids,
NotificationId,
"SELECT EXISTS(SELECT 1 FROM notifications WHERE id = ANY($1))",
NotificationId
);
$(
generate_bulk_ids!(
$bulk_generator_function,
[< DB $id_struct >],
"SELECT EXISTS(SELECT 1 FROM " + $db_table + " WHERE id = ANY($1))"
);
)?
)?
}
};
}
generate_ids!(
pub generate_thread_id,
ThreadId,
8,
"SELECT EXISTS(SELECT 1 FROM threads WHERE id=$1)",
ThreadId
);
generate_ids!(
pub generate_thread_message_id,
ThreadMessageId,
8,
"SELECT EXISTS(SELECT 1 FROM threads_messages WHERE id=$1)",
ThreadMessageId
);
macro_rules! short_id_type {
($name:ident) => {
#[derive(
Copy,
Clone,
Debug,
Type,
Serialize,
Deserialize,
Eq,
PartialEq,
Hash,
)]
#[sqlx(transparent)]
pub struct $name(pub i32);
};
}
generate_ids!(
pub generate_session_id,
SessionId,
8,
"SELECT EXISTS(SELECT 1 FROM sessions WHERE id=$1)",
SessionId
);
generate_ids!(
pub generate_image_id,
ImageId,
8,
"SELECT EXISTS(SELECT 1 FROM uploaded_images WHERE id=$1)",
ImageId
);
generate_ids!(
pub generate_oauth_client_authorization_id,
OAuthClientAuthorizationId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_client_authorizations WHERE id=$1)",
OAuthClientAuthorizationId
);
generate_ids!(
pub generate_oauth_client_id,
OAuthClientId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_clients WHERE id=$1)",
OAuthClientId
);
generate_ids!(
pub generate_oauth_redirect_id,
OAuthRedirectUriId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_client_redirect_uris WHERE id=$1)",
OAuthRedirectUriId
);
generate_ids!(
pub generate_oauth_access_token_id,
OAuthAccessTokenId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_access_tokens WHERE id=$1)",
OAuthAccessTokenId
);
generate_ids!(
pub generate_payout_id,
PayoutId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_access_tokens WHERE id=$1)",
PayoutId
);
generate_ids!(
pub generate_product_id,
ProductId,
8,
"SELECT EXISTS(SELECT 1 FROM products WHERE id=$1)",
ProductId
);
generate_ids!(
pub generate_product_price_id,
ProductPriceId,
8,
"SELECT EXISTS(SELECT 1 FROM products_prices WHERE id=$1)",
ProductPriceId
);
generate_ids!(
pub generate_user_subscription_id,
UserSubscriptionId,
8,
"SELECT EXISTS(SELECT 1 FROM users_subscriptions WHERE id=$1)",
UserSubscriptionId
);
generate_ids!(
pub generate_charge_id,
db_id_interface!(
ChargeId,
8,
"SELECT EXISTS(SELECT 1 FROM charges WHERE id=$1)",
ChargeId
generator: generate_charge_id @ "charges",
);
db_id_interface!(
CollectionId,
generator: generate_collection_id @ "collections",
);
db_id_interface!(
FileId,
generator: generate_file_id @ "files",
);
db_id_interface!(
ImageId,
generator: generate_image_id @ "uploaded_images",
);
db_id_interface!(
NotificationId,
generator: generate_notification_id @ "notifications",
bulk_generator: generate_many_notification_ids,
);
db_id_interface!(
OAuthAccessTokenId,
generator: generate_oauth_access_token_id @ "oauth_access_tokens",
);
db_id_interface!(
OAuthClientAuthorizationId,
generator: generate_oauth_client_authorization_id @ "oauth_client_authorizations",
);
db_id_interface!(
OAuthClientId,
generator: generate_oauth_client_id @ "oauth_clients",
);
db_id_interface!(
OAuthRedirectUriId,
generator: generate_oauth_redirect_id @ "oauth_client_redirect_uris",
);
db_id_interface!(
OrganizationId,
generator: generate_organization_id @ "organizations",
);
db_id_interface!(
PatId,
generator: generate_pat_id @ "pats",
);
db_id_interface!(
PayoutId,
generator: generate_payout_id @ "payouts",
);
db_id_interface!(
ProductId,
generator: generate_product_id @ "products",
);
db_id_interface!(
ProductPriceId,
generator: generate_product_price_id @ "products_prices",
);
db_id_interface!(
ProjectId,
generator: generate_project_id @ "mods",
);
db_id_interface!(
ReportId,
generator: generate_report_id @ "reports",
);
db_id_interface!(
SessionId,
generator: generate_session_id @ "sessions",
);
db_id_interface!(
TeamId,
generator: generate_team_id @ "teams",
);
db_id_interface!(
TeamMemberId,
generator: generate_team_member_id @ "team_members",
);
db_id_interface!(
ThreadId,
generator: generate_thread_id @ "threads",
);
db_id_interface!(
ThreadMessageId,
generator: generate_thread_message_id @ "threads_messages",
);
db_id_interface!(
UserId,
generator: generate_user_id @ "users",
);
db_id_interface!(
UserSubscriptionId,
generator: generate_user_subscription_id @ "users_subscriptions",
);
db_id_interface!(
VersionId,
generator: generate_version_id @ "versions",
);
#[derive(
Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct UserId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Eq, Hash, PartialEq, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct TeamId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct TeamMemberId(pub i64);
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct OrganizationId(pub i64);
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct ProjectId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct ProjectTypeId(pub i32);
#[derive(Copy, Clone, Debug, Type)]
#[sqlx(transparent)]
pub struct StatusId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct GameId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct LinkPlatformId(pub i32);
#[derive(
Copy,
Clone,
Debug,
Type,
PartialEq,
Eq,
Hash,
Serialize,
Deserialize,
PartialOrd,
Ord,
)]
#[sqlx(transparent)]
pub struct VersionId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct CategoryId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct CollectionId(pub i64);
#[derive(Copy, Clone, Debug, Type, Deserialize, Serialize)]
#[sqlx(transparent)]
pub struct ReportId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
#[sqlx(transparent)]
pub struct ReportTypeId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize, Serialize,
)]
#[sqlx(transparent)]
pub struct FileId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Deserialize, Serialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct PatId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct NotificationId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct NotificationActionId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq)]
#[sqlx(transparent)]
pub struct ThreadId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ThreadMessageId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct SessionId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ImageId(pub i64);
#[derive(
Copy,
Clone,
Debug,
Type,
Serialize,
Deserialize,
Eq,
PartialEq,
Hash,
PartialOrd,
Ord,
)]
#[sqlx(transparent)]
pub struct LoaderFieldId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderFieldEnumId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderFieldEnumValueId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthClientId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthClientAuthorizationId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthRedirectUriId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthAccessTokenId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct PayoutId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ProductId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ProductPriceId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct UserSubscriptionId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ChargeId(pub i64);
use crate::models::ids;
impl From<ids::ProjectId> for ProjectId {
fn from(id: ids::ProjectId) -> Self {
ProjectId(id.0 as i64)
}
}
impl From<ProjectId> for ids::ProjectId {
fn from(id: ProjectId) -> Self {
ids::ProjectId(id.0 as u64)
}
}
impl From<ids::UserId> for UserId {
fn from(id: ids::UserId) -> Self {
UserId(id.0 as i64)
}
}
impl From<UserId> for ids::UserId {
fn from(id: UserId) -> Self {
ids::UserId(id.0 as u64)
}
}
impl From<ids::TeamId> for TeamId {
fn from(id: ids::TeamId) -> Self {
TeamId(id.0 as i64)
}
}
impl From<TeamId> for ids::TeamId {
fn from(id: TeamId) -> Self {
ids::TeamId(id.0 as u64)
}
}
impl From<ids::OrganizationId> for OrganizationId {
fn from(id: ids::OrganizationId) -> Self {
OrganizationId(id.0 as i64)
}
}
impl From<OrganizationId> for ids::OrganizationId {
fn from(id: OrganizationId) -> Self {
ids::OrganizationId(id.0 as u64)
}
}
impl From<ids::VersionId> for VersionId {
fn from(id: ids::VersionId) -> Self {
VersionId(id.0 as i64)
}
}
impl From<VersionId> for ids::VersionId {
fn from(id: VersionId) -> Self {
ids::VersionId(id.0 as u64)
}
}
impl From<ids::CollectionId> for CollectionId {
fn from(id: ids::CollectionId) -> Self {
CollectionId(id.0 as i64)
}
}
impl From<CollectionId> for ids::CollectionId {
fn from(id: CollectionId) -> Self {
ids::CollectionId(id.0 as u64)
}
}
impl From<ids::ReportId> for ReportId {
fn from(id: ids::ReportId) -> Self {
ReportId(id.0 as i64)
}
}
impl From<ReportId> for ids::ReportId {
fn from(id: ReportId) -> Self {
ids::ReportId(id.0 as u64)
}
}
impl From<ImageId> for ids::ImageId {
fn from(id: ImageId) -> Self {
ids::ImageId(id.0 as u64)
}
}
impl From<ids::ImageId> for ImageId {
fn from(id: ids::ImageId) -> Self {
ImageId(id.0 as i64)
}
}
impl From<ids::NotificationId> for NotificationId {
fn from(id: ids::NotificationId) -> Self {
NotificationId(id.0 as i64)
}
}
impl From<NotificationId> for ids::NotificationId {
fn from(id: NotificationId) -> Self {
ids::NotificationId(id.0 as u64)
}
}
impl From<ids::ThreadId> for ThreadId {
fn from(id: ids::ThreadId) -> Self {
ThreadId(id.0 as i64)
}
}
impl From<ThreadId> for ids::ThreadId {
fn from(id: ThreadId) -> Self {
ids::ThreadId(id.0 as u64)
}
}
impl From<ids::ThreadMessageId> for ThreadMessageId {
fn from(id: ids::ThreadMessageId) -> Self {
ThreadMessageId(id.0 as i64)
}
}
impl From<ThreadMessageId> for ids::ThreadMessageId {
fn from(id: ThreadMessageId) -> Self {
ids::ThreadMessageId(id.0 as u64)
}
}
impl From<SessionId> for ids::SessionId {
fn from(id: SessionId) -> Self {
ids::SessionId(id.0 as u64)
}
}
impl From<PatId> for ids::PatId {
fn from(id: PatId) -> Self {
ids::PatId(id.0 as u64)
}
}
impl From<OAuthClientId> for ids::OAuthClientId {
fn from(id: OAuthClientId) -> Self {
ids::OAuthClientId(id.0 as u64)
}
}
impl From<ids::OAuthClientId> for OAuthClientId {
fn from(id: ids::OAuthClientId) -> Self {
Self(id.0 as i64)
}
}
impl From<OAuthRedirectUriId> for ids::OAuthRedirectUriId {
fn from(id: OAuthRedirectUriId) -> Self {
ids::OAuthRedirectUriId(id.0 as u64)
}
}
impl From<OAuthClientAuthorizationId> for ids::OAuthClientAuthorizationId {
fn from(id: OAuthClientAuthorizationId) -> Self {
ids::OAuthClientAuthorizationId(id.0 as u64)
}
}
impl From<ids::PayoutId> for PayoutId {
fn from(id: ids::PayoutId) -> Self {
PayoutId(id.0 as i64)
}
}
impl From<PayoutId> for ids::PayoutId {
fn from(id: PayoutId) -> Self {
ids::PayoutId(id.0 as u64)
}
}
impl From<ids::ProductId> for ProductId {
fn from(id: ids::ProductId) -> Self {
ProductId(id.0 as i64)
}
}
impl From<ProductId> for ids::ProductId {
fn from(id: ProductId) -> Self {
ids::ProductId(id.0 as u64)
}
}
impl From<ids::ProductPriceId> for ProductPriceId {
fn from(id: ids::ProductPriceId) -> Self {
ProductPriceId(id.0 as i64)
}
}
impl From<ProductPriceId> for ids::ProductPriceId {
fn from(id: ProductPriceId) -> Self {
ids::ProductPriceId(id.0 as u64)
}
}
impl From<ids::UserSubscriptionId> for UserSubscriptionId {
fn from(id: ids::UserSubscriptionId) -> Self {
UserSubscriptionId(id.0 as i64)
}
}
impl From<UserSubscriptionId> for ids::UserSubscriptionId {
fn from(id: UserSubscriptionId) -> Self {
ids::UserSubscriptionId(id.0 as u64)
}
}
impl From<ids::ChargeId> for ChargeId {
fn from(id: ids::ChargeId) -> Self {
ChargeId(id.0 as i64)
}
}
impl From<ChargeId> for ids::ChargeId {
fn from(id: ChargeId) -> Self {
ids::ChargeId(id.0 as u64)
}
}
short_id_type!(CategoryId);
short_id_type!(GameId);
short_id_type!(LinkPlatformId);
short_id_type!(LoaderFieldEnumId);
short_id_type!(LoaderFieldEnumValueId);
short_id_type!(LoaderFieldId);
short_id_type!(LoaderId);
short_id_type!(NotificationActionId);
short_id_type!(ProjectTypeId);
short_id_type!(ReportTypeId);
short_id_type!(StatusId);

View File

@ -9,20 +9,20 @@ const IMAGES_NAMESPACE: &str = "images";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Image {
pub id: ImageId,
pub id: DBImageId,
pub url: String,
pub raw_url: String,
pub size: u64,
pub created: DateTime<Utc>,
pub owner_id: UserId,
pub owner_id: DBUserId,
// context it is associated with
pub context: String,
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub thread_message_id: Option<ThreadMessageId>,
pub report_id: Option<ReportId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub thread_message_id: Option<DBThreadMessageId>,
pub report_id: Option<DBReportId>,
}
impl Image {
@ -39,12 +39,12 @@ impl Image {
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11
);
",
self.id as ImageId,
self.id as DBImageId,
self.url,
self.raw_url,
self.size as i64,
self.created,
self.owner_id as UserId,
self.owner_id as DBUserId,
self.context,
self.project_id.map(|x| x.0),
self.version_id.map(|x| x.0),
@ -58,7 +58,7 @@ impl Image {
}
pub async fn remove(
id: ImageId,
id: DBImageId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -70,7 +70,7 @@ impl Image {
DELETE FROM uploaded_images
WHERE id = $1
",
id as ImageId,
id as DBImageId,
)
.execute(&mut **transaction)
.await?;
@ -98,22 +98,22 @@ impl Image {
ImageContext::Project {
project_id: Some(id),
} => {
project_id = Some(ProjectId::from(id));
project_id = Some(DBProjectId::from(id));
}
ImageContext::Version {
version_id: Some(id),
} => {
version_id = Some(VersionId::from(id));
version_id = Some(DBVersionId::from(id));
}
ImageContext::ThreadMessage {
thread_message_id: Some(id),
} => {
thread_message_id = Some(ThreadMessageId::from(id));
thread_message_id = Some(DBThreadMessageId::from(id));
}
ImageContext::Report {
report_id: Some(id),
} => {
report_id = Some(ReportId::from(id));
report_id = Some(DBReportId::from(id));
}
_ => {}
}
@ -139,7 +139,7 @@ impl Image {
)
.fetch(&mut **transaction)
.map_ok(|row| {
let id = ImageId(row.id);
let id = DBImageId(row.id);
Image {
id,
@ -147,12 +147,12 @@ impl Image {
raw_url: row.raw_url,
size: row.size as u64,
created: row.created,
owner_id: UserId(row.owner_id),
owner_id: DBUserId(row.owner_id),
context: row.context,
project_id: row.mod_id.map(ProjectId),
version_id: row.version_id.map(VersionId),
thread_message_id: row.thread_message_id.map(ThreadMessageId),
report_id: row.report_id.map(ReportId),
project_id: row.mod_id.map(DBProjectId),
version_id: row.version_id.map(DBVersionId),
thread_message_id: row.thread_message_id.map(DBThreadMessageId),
report_id: row.report_id.map(DBReportId),
}
})
.try_collect::<Vec<Image>>()
@ -160,7 +160,7 @@ impl Image {
}
pub async fn get<'a, 'b, E>(
id: ImageId,
id: DBImageId,
executor: E,
redis: &RedisPool,
) -> Result<Option<Image>, DatabaseError>
@ -173,7 +173,7 @@ impl Image {
}
pub async fn get_many<'a, E>(
image_ids: &[ImageId],
image_ids: &[DBImageId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Image>, DatabaseError>
@ -198,17 +198,17 @@ impl Image {
.fetch(exec)
.try_fold(DashMap::new(), |acc, i| {
let img = Image {
id: ImageId(i.id),
id: DBImageId(i.id),
url: i.url,
raw_url: i.raw_url,
size: i.size as u64,
created: i.created,
owner_id: UserId(i.owner_id),
owner_id: DBUserId(i.owner_id),
context: i.context,
project_id: i.mod_id.map(ProjectId),
version_id: i.version_id.map(VersionId),
thread_message_id: i.thread_message_id.map(ThreadMessageId),
report_id: i.report_id.map(ReportId),
project_id: i.mod_id.map(DBProjectId),
version_id: i.version_id.map(DBVersionId),
thread_message_id: i.thread_message_id.map(DBThreadMessageId),
report_id: i.report_id.map(DBReportId),
};
acc.insert(i.id, img);
@ -224,7 +224,7 @@ impl Image {
}
pub async fn clear_cache(
id: ImageId,
id: DBImageId,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

View File

@ -153,7 +153,7 @@ impl Loader {
SELECT l.id id, l.loader loader, l.icon icon, l.metadata metadata,
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games
FROM loaders l
FROM loaders l
LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id
LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id
LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id
@ -293,7 +293,7 @@ impl std::hash::Hash for LoaderFieldEnumValue {
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq, Hash)]
pub struct VersionField {
pub version_id: VersionId,
pub version_id: DBVersionId,
pub field_id: LoaderFieldId,
pub field_name: String,
pub value: VersionFieldValue,
@ -312,7 +312,7 @@ pub enum VersionFieldValue {
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct QueryVersionField {
pub version_id: VersionId,
pub version_id: DBVersionId,
pub field_id: LoaderFieldId,
pub int_value: Option<i32>,
pub enum_value: Option<LoaderFieldEnumValueId>,
@ -524,7 +524,7 @@ impl LoaderFieldEnum {
let result = sqlx::query!(
"
SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable
SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable
FROM loader_field_enums lfe
WHERE lfe.enum_name = $1
ORDER BY lfe.ordering ASC
@ -781,7 +781,7 @@ impl VersionField {
}
pub fn check_parse(
version_id: VersionId,
version_id: DBVersionId,
loader_field: LoaderField,
value: serde_json::Value,
enum_variants: Vec<LoaderFieldEnumValue>,
@ -1032,7 +1032,7 @@ impl VersionFieldValue {
field_type: &LoaderFieldType,
qvfs: Vec<QueryVersionField>,
qlfev: &[QueryLoaderFieldEnumValue],
) -> Result<(VersionId, VersionFieldValue), DatabaseError> {
) -> Result<(DBVersionId, VersionFieldValue), DatabaseError> {
match field_type {
LoaderFieldType::Integer
| LoaderFieldType::Text
@ -1076,7 +1076,7 @@ impl VersionFieldValue {
field_type: &LoaderFieldType,
qvfs: Vec<QueryVersionField>,
qlfev: &[QueryLoaderFieldEnumValue],
) -> Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError> {
) -> Result<Vec<(DBVersionId, VersionFieldValue)>, DatabaseError> {
let field_name = field_type.to_str();
let did_not_exist_error = |field_name: &str, desired_field: &str| {
DatabaseError::SchemaError(format!(
@ -1093,7 +1093,8 @@ impl VersionFieldValue {
// If the field type is a non-array, then the reason for multiple version ids is that there are multiple versions being aggregated, and those version ids are contained within.
// If the field type is an array, then the reason for multiple version ids is that there are multiple values for a single version
// (or a greater aggregation between multiple arrays, in which case the per-field version is lost, so we just take the first one and use it for that)
let version_id = version_id.into_iter().next().unwrap_or(VersionId(0));
let version_id =
version_id.into_iter().next().unwrap_or(DBVersionId(0));
let field_id = qvfs
.iter()
@ -1106,12 +1107,11 @@ impl VersionFieldValue {
)));
}
let mut value =
match field_type {
// Singleton fields
// If there are multiple, we assume multiple versions are being concatenated
LoaderFieldType::Integer => qvfs
.into_iter()
let mut value = match field_type {
// Singleton fields
// If there are multiple, we assume multiple versions are being concatenated
LoaderFieldType::Integer => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@ -1121,11 +1121,12 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Text => qvfs
.into_iter()
>>()?
}
LoaderFieldType::Text => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@ -1135,11 +1136,12 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Boolean => qvfs
.into_iter()
>>()?
}
LoaderFieldType::Boolean => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@ -1152,11 +1154,12 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Enum(id) => qvfs
.into_iter()
>>()?
}
LoaderFieldType::Enum(id) => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@ -1189,90 +1192,86 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
>>()?
}
// Array fields
// We concatenate into one array
LoaderFieldType::ArrayInteger => vec![(
version_id,
VersionFieldValue::ArrayInteger(
qvfs.into_iter()
.map(|qvf| {
// Array fields
// We concatenate into one array
LoaderFieldType::ArrayInteger => vec![(
version_id,
VersionFieldValue::ArrayInteger(
qvfs.into_iter()
.map(|qvf| {
qvf.int_value.ok_or(did_not_exist_error(
field_name,
"int_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayText => vec![(
version_id,
VersionFieldValue::ArrayText(
qvfs.into_iter()
.map(|qvf| {
qvf.string_value.ok_or(did_not_exist_error(
field_name,
"string_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayBoolean => vec![(
version_id,
VersionFieldValue::ArrayBoolean(
qvfs.into_iter()
.map(|qvf| {
Ok::<bool, DatabaseError>(
qvf.int_value.ok_or(did_not_exist_error(
field_name,
"int_value",
))
))? != 0,
)
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayEnum(id) => vec![(
version_id,
VersionFieldValue::ArrayEnum(
*id,
qvfs.into_iter()
.map(|qvf| {
let enum_id = qvf.enum_value.ok_or(
did_not_exist_error(field_name, "enum_value"),
)?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(
field_name,
"enum_value",
))?;
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev
.metadata
.clone()
.unwrap_or_default(),
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayText => vec![(
version_id,
VersionFieldValue::ArrayText(
qvfs.into_iter()
.map(|qvf| {
qvf.string_value.ok_or(did_not_exist_error(
field_name,
"string_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayBoolean => vec![(
version_id,
VersionFieldValue::ArrayBoolean(
qvfs.into_iter()
.map(|qvf| {
Ok::<bool, DatabaseError>(
qvf.int_value.ok_or(
did_not_exist_error(
field_name,
"int_value",
),
)? != 0,
)
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayEnum(id) => vec![(
version_id,
VersionFieldValue::ArrayEnum(
*id,
qvfs.into_iter()
.map(|qvf| {
let enum_id = qvf.enum_value.ok_or(
did_not_exist_error(
field_name,
"enum_value",
),
)?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(
field_name,
"enum_value",
))?;
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev
.metadata
.clone()
.unwrap_or_default(),
})
})
.collect::<Result<_, _>>()?,
),
)],
};
})
.collect::<Result<_, _>>()?,
),
)],
};
// Sort arrayenums by ordering, then by created
for (_, v) in value.iter_mut() {

View File

@ -13,8 +13,8 @@ pub struct NotificationBuilder {
#[derive(Serialize, Deserialize)]
pub struct Notification {
pub id: NotificationId,
pub user_id: UserId,
pub id: DBNotificationId,
pub user_id: DBUserId,
pub body: NotificationBody,
pub read: bool,
pub created: DateTime<Utc>,
@ -23,7 +23,7 @@ pub struct Notification {
#[derive(Serialize, Deserialize)]
pub struct NotificationAction {
pub id: NotificationActionId,
pub notification_id: NotificationId,
pub notification_id: DBNotificationId,
pub name: String,
pub action_route_method: String,
pub action_route: String,
@ -32,7 +32,7 @@ pub struct NotificationAction {
impl NotificationBuilder {
pub async fn insert(
&self,
user: UserId,
user: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
@ -41,7 +41,7 @@ impl NotificationBuilder {
pub async fn insert_many(
&self,
users: Vec<UserId>,
users: Vec<DBUserId>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
@ -80,7 +80,7 @@ impl NotificationBuilder {
impl Notification {
pub async fn get<'a, 'b, E>(
id: NotificationId,
id: DBNotificationId,
executor: E,
) -> Result<Option<Self>, sqlx::error::Error>
where
@ -92,7 +92,7 @@ impl Notification {
}
pub async fn get_many<'a, E>(
notification_ids: &[NotificationId],
notification_ids: &[DBNotificationId],
exec: E,
) -> Result<Vec<Notification>, sqlx::Error>
where
@ -114,11 +114,11 @@ impl Notification {
)
.fetch(exec)
.map_ok(|row| {
let id = NotificationId(row.id);
let id = DBNotificationId(row.id);
Notification {
id,
user_id: UserId(row.user_id),
user_id: DBUserId(row.user_id),
read: row.read,
created: row.created,
body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| {
@ -145,7 +145,7 @@ impl Notification {
}
pub async fn get_many_user<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<Notification>, DatabaseError>
@ -174,15 +174,15 @@ impl Notification {
WHERE n.user_id = $1
GROUP BY n.id, n.user_id;
",
user_id as UserId
user_id as DBUserId
)
.fetch(exec)
.map_ok(|row| {
let id = NotificationId(row.id);
let id = DBNotificationId(row.id);
Notification {
id,
user_id: UserId(row.user_id),
user_id: DBUserId(row.user_id),
read: row.read,
created: row.created,
body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| {
@ -220,7 +220,7 @@ impl Notification {
}
pub async fn read(
id: NotificationId,
id: DBNotificationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -228,7 +228,7 @@ impl Notification {
}
pub async fn read_many(
notification_ids: &[NotificationId],
notification_ids: &[DBNotificationId],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -245,7 +245,7 @@ impl Notification {
&notification_ids_parsed
)
.fetch(&mut **transaction)
.map_ok(|x| UserId(x.user_id))
.map_ok(|x| DBUserId(x.user_id))
.try_collect::<Vec<_>>()
.await?;
@ -259,7 +259,7 @@ impl Notification {
}
pub async fn remove(
id: NotificationId,
id: DBNotificationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -267,7 +267,7 @@ impl Notification {
}
pub async fn remove_many(
notification_ids: &[NotificationId],
notification_ids: &[DBNotificationId],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -293,7 +293,7 @@ impl Notification {
&notification_ids_parsed
)
.fetch(&mut **transaction)
.map_ok(|x| UserId(x.user_id))
.map_ok(|x| DBUserId(x.user_id))
.try_collect::<Vec<_>>()
.await?;
@ -307,7 +307,7 @@ impl Notification {
}
pub async fn clear_user_notifications_cache(
user_ids: impl IntoIterator<Item = &UserId>,
user_ids: impl IntoIterator<Item = &DBUserId>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

View File

@ -4,13 +4,15 @@ use serde::{Deserialize, Serialize};
use crate::models::pats::Scopes;
use super::{DatabaseError, OAuthClientAuthorizationId, OAuthClientId, UserId};
use super::{
DBOAuthClientAuthorizationId, DBOAuthClientId, DBUserId, DatabaseError,
};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthClientAuthorization {
pub id: OAuthClientAuthorizationId,
pub client_id: OAuthClientId,
pub user_id: UserId,
pub id: DBOAuthClientAuthorizationId,
pub client_id: DBOAuthClientId,
pub user_id: DBUserId,
pub scopes: Scopes,
pub created: DateTime<Utc>,
}
@ -26,9 +28,9 @@ struct AuthorizationQueryResult {
impl From<AuthorizationQueryResult> for OAuthClientAuthorization {
fn from(value: AuthorizationQueryResult) -> Self {
OAuthClientAuthorization {
id: OAuthClientAuthorizationId(value.id),
client_id: OAuthClientId(value.client_id),
user_id: UserId(value.user_id),
id: DBOAuthClientAuthorizationId(value.id),
client_id: DBOAuthClientId(value.client_id),
user_id: DBUserId(value.user_id),
scopes: Scopes::from_postgres(value.scopes),
created: value.created,
}
@ -37,8 +39,8 @@ impl From<AuthorizationQueryResult> for OAuthClientAuthorization {
impl OAuthClientAuthorization {
pub async fn get(
client_id: OAuthClientId,
user_id: UserId,
client_id: DBOAuthClientId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<OAuthClientAuthorization>, DatabaseError> {
let value = sqlx::query_as!(
@ -58,7 +60,7 @@ impl OAuthClientAuthorization {
}
pub async fn get_all_for_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClientAuthorization>, DatabaseError> {
let results = sqlx::query_as!(
@ -77,9 +79,9 @@ impl OAuthClientAuthorization {
}
pub async fn upsert(
id: OAuthClientAuthorizationId,
client_id: OAuthClientId,
user_id: UserId,
id: DBOAuthClientAuthorizationId,
client_id: DBOAuthClientId,
user_id: DBUserId,
scopes: Scopes,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
@ -106,8 +108,8 @@ impl OAuthClientAuthorization {
}
pub async fn remove(
client_id: OAuthClientId,
user_id: UserId,
client_id: DBOAuthClientId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<(), DatabaseError> {
sqlx::query!(

View File

@ -3,19 +3,19 @@ use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sha2::Digest;
use super::{DatabaseError, OAuthClientId, OAuthRedirectUriId, UserId};
use super::{DBOAuthClientId, DBOAuthRedirectUriId, DBUserId, DatabaseError};
use crate::models::pats::Scopes;
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthRedirectUri {
pub id: OAuthRedirectUriId,
pub client_id: OAuthClientId,
pub id: DBOAuthRedirectUriId,
pub client_id: DBOAuthClientId,
pub uri: String,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthClient {
pub id: OAuthClientId,
pub id: DBOAuthClientId,
pub name: String,
pub icon_url: Option<String>,
pub raw_icon_url: Option<String>,
@ -23,7 +23,7 @@ pub struct OAuthClient {
pub secret_hash: String,
pub redirect_uris: Vec<OAuthRedirectUri>,
pub created: DateTime<Utc>,
pub created_by: UserId,
pub created_by: DBUserId,
pub url: Option<String>,
pub description: Option<String>,
}
@ -79,14 +79,14 @@ macro_rules! select_clients_with_predicate {
impl OAuthClient {
pub async fn get(
id: OAuthClientId,
id: DBOAuthClientId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<OAuthClient>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[OAuthClientId],
ids: &[DBOAuthClientId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClient>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@ -102,7 +102,7 @@ impl OAuthClient {
}
pub async fn get_all_user_clients(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClient>, DatabaseError> {
let user_id_param = user_id.0;
@ -117,7 +117,7 @@ impl OAuthClient {
}
pub async fn remove(
id: OAuthClientId,
id: DBOAuthClientId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<(), DatabaseError> {
// Cascades to oauth_client_redirect_uris, oauth_client_authorizations
@ -189,7 +189,7 @@ impl OAuthClient {
}
pub async fn remove_redirect_uris(
ids: impl IntoIterator<Item = OAuthRedirectUriId>,
ids: impl IntoIterator<Item = DBOAuthRedirectUriId>,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let ids = ids.into_iter().map(|id| id.0).collect_vec();
@ -243,8 +243,8 @@ impl From<ClientQueryResult> for OAuthClient {
ids.iter()
.zip(uris.iter())
.map(|(id, uri)| OAuthRedirectUri {
id: OAuthRedirectUriId(*id),
client_id: OAuthClientId(r.id),
id: DBOAuthRedirectUriId(*id),
client_id: DBOAuthClientId(r.id),
uri: uri.to_string(),
})
.collect()
@ -253,7 +253,7 @@ impl From<ClientQueryResult> for OAuthClient {
};
OAuthClient {
id: OAuthClientId(r.id),
id: DBOAuthClientId(r.id),
name: r.name,
icon_url: r.icon_url,
raw_icon_url: r.raw_icon_url,
@ -261,7 +261,7 @@ impl From<ClientQueryResult> for OAuthClient {
secret_hash: r.secret_hash,
redirect_uris: redirects,
created: r.created,
created_by: UserId(r.created_by),
created_by: DBUserId(r.created_by),
url: r.url,
description: r.description,
}

View File

@ -1,6 +1,6 @@
use super::{
DatabaseError, OAuthAccessTokenId, OAuthClientAuthorizationId,
OAuthClientId, UserId,
DBOAuthAccessTokenId, DBOAuthClientAuthorizationId, DBOAuthClientId,
DBUserId, DatabaseError,
};
use crate::models::pats::Scopes;
use chrono::{DateTime, Utc};
@ -9,8 +9,8 @@ use sha2::Digest;
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthAccessToken {
pub id: OAuthAccessTokenId,
pub authorization_id: OAuthClientAuthorizationId,
pub id: DBOAuthAccessTokenId,
pub authorization_id: DBOAuthClientAuthorizationId,
pub token_hash: String,
pub scopes: Scopes,
pub created: DateTime<Utc>,
@ -18,8 +18,8 @@ pub struct OAuthAccessToken {
pub last_used: Option<DateTime<Utc>>,
// Stored separately inside oauth_client_authorizations table
pub client_id: OAuthClientId,
pub user_id: UserId,
pub client_id: DBOAuthClientId,
pub user_id: DBUserId,
}
impl OAuthAccessToken {
@ -50,15 +50,15 @@ impl OAuthAccessToken {
.await?;
Ok(value.map(|r| OAuthAccessToken {
id: OAuthAccessTokenId(r.id),
authorization_id: OAuthClientAuthorizationId(r.authorization_id),
id: DBOAuthAccessTokenId(r.id),
authorization_id: DBOAuthClientAuthorizationId(r.authorization_id),
token_hash: r.token_hash,
scopes: Scopes::from_postgres(r.scopes),
created: r.created,
expires: r.expires,
last_used: r.last_used,
client_id: OAuthClientId(r.client_id),
user_id: UserId(r.user_id),
client_id: DBOAuthClientId(r.client_id),
user_id: DBUserId(r.user_id),
}))
}

View File

@ -15,7 +15,7 @@ const ORGANIZATIONS_TITLES_NAMESPACE: &str = "organizations_titles";
/// An organization of users who together control one or more projects and organizations.
pub struct Organization {
/// The id of the organization
pub id: OrganizationId,
pub id: DBOrganizationId,
/// The slug of the organization
pub slug: String,
@ -24,7 +24,7 @@ pub struct Organization {
pub name: String,
/// The associated team of the organization
pub team_id: TeamId,
pub team_id: DBTeamId,
/// The description of the organization
pub description: String,
@ -48,7 +48,7 @@ impl Organization {
self.id.0,
self.slug,
self.name,
self.team_id as TeamId,
self.team_id as DBTeamId,
self.description,
self.icon_url,
self.raw_icon_url,
@ -74,7 +74,7 @@ impl Organization {
}
pub async fn get_id<'a, 'b, E>(
id: OrganizationId,
id: DBOrganizationId,
exec: E,
redis: &RedisPool,
) -> Result<Option<Self>, super::DatabaseError>
@ -87,7 +87,7 @@ impl Organization {
}
pub async fn get_many_ids<'a, 'b, E>(
organization_ids: &[OrganizationId],
organization_ids: &[DBOrganizationId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Self>, super::DatabaseError>
@ -143,10 +143,10 @@ impl Organization {
.fetch(exec)
.try_fold(DashMap::new(), |acc, m| {
let org = Organization {
id: OrganizationId(m.id),
id: DBOrganizationId(m.id),
slug: m.slug.clone(),
name: m.name,
team_id: TeamId(m.team_id),
team_id: DBTeamId(m.team_id),
description: m.description,
icon_url: m.icon_url,
raw_icon_url: m.raw_icon_url,
@ -168,7 +168,7 @@ impl Organization {
// Gets organization associated with a project ID, if it exists and there is one
pub async fn get_associated_organization_project_id<'a, 'b, E>(
project_id: ProjectId,
project_id: DBProjectId,
exec: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@ -182,17 +182,17 @@ impl Organization {
WHERE m.id = $1
GROUP BY o.id;
",
project_id as ProjectId,
project_id as DBProjectId,
)
.fetch_optional(exec)
.await?;
if let Some(result) = result {
Ok(Some(Organization {
id: OrganizationId(result.id),
id: DBOrganizationId(result.id),
slug: result.slug,
name: result.name,
team_id: TeamId(result.team_id),
team_id: DBTeamId(result.team_id),
description: result.description,
icon_url: result.icon_url,
raw_icon_url: result.raw_icon_url,
@ -204,7 +204,7 @@ impl Organization {
}
pub async fn remove(
id: OrganizationId,
id: DBOrganizationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, super::DatabaseError> {
@ -216,7 +216,7 @@ impl Organization {
DELETE FROM organizations
WHERE id = $1
",
id as OrganizationId,
id as DBOrganizationId,
)
.execute(&mut **transaction)
.await?;
@ -228,7 +228,7 @@ impl Organization {
DELETE FROM team_members
WHERE team_id = $1
",
organization.team_id as TeamId,
organization.team_id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@ -238,7 +238,7 @@ impl Organization {
DELETE FROM teams
WHERE id = $1
",
organization.team_id as TeamId,
organization.team_id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@ -250,7 +250,7 @@ impl Organization {
}
pub async fn clear_cache(
id: OrganizationId,
id: DBOrganizationId,
slug: Option<String>,
redis: &RedisPool,
) -> Result<(), super::DatabaseError> {

View File

@ -16,11 +16,11 @@ const PATS_USERS_NAMESPACE: &str = "pats_users";
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct PersonalAccessToken {
pub id: PatId,
pub id: DBPatId,
pub name: String,
pub access_token: String,
pub scopes: Scopes,
pub user_id: UserId,
pub user_id: DBUserId,
pub created: DateTime<Utc>,
pub expires: DateTime<Utc>,
pub last_used: Option<DateTime<Utc>>,
@ -42,11 +42,11 @@ impl PersonalAccessToken {
$6
)
",
self.id as PatId,
self.id as DBPatId,
self.name,
self.access_token,
self.scopes.bits() as i64,
self.user_id as UserId,
self.user_id as DBUserId,
self.expires
)
.execute(&mut **transaction)
@ -73,7 +73,7 @@ impl PersonalAccessToken {
}
pub async fn get_many_ids<'a, E>(
pat_ids: &[PatId],
pat_ids: &[DBPatId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<PersonalAccessToken>, DatabaseError>
@ -126,11 +126,11 @@ impl PersonalAccessToken {
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
let pat = PersonalAccessToken {
id: PatId(x.id),
id: DBPatId(x.id),
name: x.name,
access_token: x.access_token.clone(),
scopes: Scopes::from_bits(x.scopes as u64).unwrap_or(Scopes::NONE),
user_id: UserId(x.user_id),
user_id: DBUserId(x.user_id),
created: x.created,
expires: x.expires,
last_used: x.last_used,
@ -149,10 +149,10 @@ impl PersonalAccessToken {
}
pub async fn get_user_pats<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<PatId>, DatabaseError>
) -> Result<Vec<DBPatId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -166,10 +166,10 @@ impl PersonalAccessToken {
.await?;
if let Some(res) = res {
return Ok(res.into_iter().map(PatId).collect());
return Ok(res.into_iter().map(DBPatId).collect());
}
let db_pats: Vec<PatId> = sqlx::query!(
let db_pats: Vec<DBPatId> = sqlx::query!(
"
SELECT id
FROM pats
@ -179,8 +179,8 @@ impl PersonalAccessToken {
user_id.0,
)
.fetch(exec)
.map_ok(|x| PatId(x.id))
.try_collect::<Vec<PatId>>()
.map_ok(|x| DBPatId(x.id))
.try_collect::<Vec<DBPatId>>()
.await?;
redis
@ -195,7 +195,7 @@ impl PersonalAccessToken {
}
pub async fn clear_cache(
clear_pats: Vec<(Option<PatId>, Option<String>, Option<UserId>)>,
clear_pats: Vec<(Option<DBPatId>, Option<String>, Option<DBUserId>)>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@ -223,14 +223,14 @@ impl PersonalAccessToken {
}
pub async fn remove(
id: PatId,
id: DBPatId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
sqlx::query!(
"
DELETE FROM pats WHERE id = $1
",
id as PatId,
id as DBPatId,
)
.execute(&mut **transaction)
.await?;

View File

@ -3,12 +3,12 @@ use chrono::{DateTime, Utc};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use super::{DatabaseError, PayoutId, UserId};
use super::{DBPayoutId, DBUserId, DatabaseError};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct Payout {
pub id: PayoutId,
pub user_id: UserId,
pub id: DBPayoutId,
pub user_id: DBUserId,
pub created: DateTime<Utc>,
pub status: PayoutStatus,
pub amount: Decimal,
@ -49,7 +49,7 @@ impl Payout {
}
pub async fn get<'a, 'b, E>(
id: PayoutId,
id: DBPayoutId,
executor: E,
) -> Result<Option<Payout>, DatabaseError>
where
@ -61,7 +61,7 @@ impl Payout {
}
pub async fn get_many<'a, E>(
payout_ids: &[PayoutId],
payout_ids: &[DBPayoutId],
exec: E,
) -> Result<Vec<Payout>, DatabaseError>
where
@ -79,8 +79,8 @@ impl Payout {
)
.fetch(exec)
.map_ok(|r| Payout {
id: PayoutId(r.id),
user_id: UserId(r.user_id),
id: DBPayoutId(r.id),
user_id: DBUserId(r.user_id),
created: r.created,
status: PayoutStatus::from_string(&r.status),
amount: r.amount,
@ -96,9 +96,9 @@ impl Payout {
}
pub async fn get_all_for_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<PayoutId>, DatabaseError> {
) -> Result<Vec<DBPayoutId>, DatabaseError> {
let results = sqlx::query!(
"
SELECT id
@ -112,7 +112,7 @@ impl Payout {
Ok(results
.into_iter()
.map(|r| PayoutId(r.id))
.map(|r| DBPayoutId(r.id))
.collect::<Vec<_>>())
}
}

View File

@ -1,5 +1,5 @@
use crate::database::models::{
DatabaseError, ProductId, ProductPriceId, product_item,
DBProductId, DBProductPriceId, DatabaseError, product_item,
};
use crate::database::redis::RedisPool;
use crate::models::billing::{Price, ProductMetadata};
@ -12,7 +12,7 @@ use std::convert::TryInto;
const PRODUCTS_NAMESPACE: &str = "products";
pub struct ProductItem {
pub id: ProductId,
pub id: DBProductId,
pub metadata: ProductMetadata,
pub unitary: bool,
}
@ -42,7 +42,7 @@ impl TryFrom<ProductResult> for ProductItem {
fn try_from(r: ProductResult) -> Result<Self, Self::Error> {
Ok(ProductItem {
id: ProductId(r.id),
id: DBProductId(r.id),
metadata: serde_json::from_value(r.metadata)?,
unitary: r.unitary,
})
@ -51,14 +51,14 @@ impl TryFrom<ProductResult> for ProductItem {
impl ProductItem {
pub async fn get(
id: ProductId,
id: DBProductId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ProductItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[ProductId],
ids: &[DBProductId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@ -93,7 +93,7 @@ impl ProductItem {
#[derive(Deserialize, Serialize)]
pub struct QueryProduct {
pub id: ProductId,
pub id: DBProductId,
pub metadata: ProductMetadata,
pub unitary: bool,
pub prices: Vec<ProductPriceItem>,
@ -155,8 +155,8 @@ impl QueryProduct {
#[derive(Deserialize, Serialize)]
pub struct ProductPriceItem {
pub id: ProductPriceId,
pub product_id: ProductId,
pub id: DBProductPriceId,
pub product_id: DBProductId,
pub prices: Price,
pub currency_code: String,
}
@ -187,8 +187,8 @@ impl TryFrom<ProductPriceResult> for ProductPriceItem {
fn try_from(r: ProductPriceResult) -> Result<Self, Self::Error> {
Ok(ProductPriceItem {
id: ProductPriceId(r.id),
product_id: ProductId(r.product_id),
id: DBProductPriceId(r.id),
product_id: DBProductId(r.product_id),
prices: serde_json::from_value(r.prices)?,
currency_code: r.currency_code,
})
@ -197,14 +197,14 @@ impl TryFrom<ProductPriceResult> for ProductPriceItem {
impl ProductPriceItem {
pub async fn get(
id: ProductPriceId,
id: DBProductPriceId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ProductPriceItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[ProductPriceId],
ids: &[DBProductPriceId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@ -223,7 +223,7 @@ impl ProductPriceItem {
}
pub async fn get_all_product_prices(
product_id: ProductId,
product_id: DBProductId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let res = Self::get_all_products_prices(&[product_id], exec).await?;
@ -232,9 +232,10 @@ impl ProductPriceItem {
}
pub async fn get_all_products_prices(
product_ids: &[ProductId],
product_ids: &[DBProductId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<DashMap<ProductId, Vec<ProductPriceItem>>, DatabaseError> {
) -> Result<DashMap<DBProductId, Vec<ProductPriceItem>>, DatabaseError>
{
let ids = product_ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
@ -246,7 +247,7 @@ impl ProductPriceItem {
.fetch(exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProductId, Vec<ProductPriceItem>>, x| {
|acc: DashMap<DBProductId, Vec<ProductPriceItem>>, x| {
if let Ok(item) = <ProductPriceResult as TryInto<
ProductPriceItem,
>>::try_into(x)

View File

@ -31,7 +31,7 @@ pub struct LinkUrl {
impl LinkUrl {
pub async fn insert_many_projects(
links: Vec<Self>,
project_id: ProjectId,
project_id: DBProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
let (project_ids, platform_ids, urls): (Vec<_>, Vec<_>, Vec<_>) = links
@ -70,7 +70,7 @@ pub struct GalleryItem {
impl GalleryItem {
pub async fn insert_many(
items: Vec<Self>,
project_id: ProjectId,
project_id: DBProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
let (
@ -118,7 +118,7 @@ impl GalleryItem {
}
pub struct ModCategory {
pub project_id: ProjectId,
pub project_id: DBProjectId,
pub category_id: CategoryId,
pub is_additional: bool,
}
@ -154,9 +154,9 @@ impl ModCategory {
#[derive(Clone)]
pub struct ProjectBuilder {
pub project_id: ProjectId,
pub team_id: TeamId,
pub organization_id: Option<OrganizationId>,
pub project_id: DBProjectId,
pub team_id: DBTeamId,
pub organization_id: Option<DBOrganizationId>,
pub name: String,
pub summary: String,
pub description: String,
@ -180,7 +180,7 @@ impl ProjectBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ProjectId, DatabaseError> {
) -> Result<DBProjectId, DatabaseError> {
let project_struct = Project {
id: self.project_id,
team_id: self.team_id,
@ -264,9 +264,9 @@ impl ProjectBuilder {
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Project {
pub id: ProjectId,
pub team_id: TeamId,
pub organization_id: Option<OrganizationId>,
pub id: DBProjectId,
pub team_id: DBTeamId,
pub organization_id: Option<DBOrganizationId>,
pub name: String,
pub summary: String,
pub description: String,
@ -311,8 +311,8 @@ impl Project {
LOWER($14), $15, $16, $17
)
",
self.id as ProjectId,
self.team_id as TeamId,
self.id as DBProjectId,
self.team_id as DBTeamId,
&self.name,
&self.summary,
&self.description,
@ -336,7 +336,7 @@ impl Project {
}
pub async fn remove(
id: ProjectId,
id: DBProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -351,7 +351,7 @@ impl Project {
DELETE FROM mod_follows
WHERE mod_id = $1
",
id as ProjectId
id as DBProjectId
)
.execute(&mut **transaction)
.await?;
@ -361,7 +361,7 @@ impl Project {
DELETE FROM mods_gallery
WHERE mod_id = $1
",
id as ProjectId
id as DBProjectId
)
.execute(&mut **transaction)
.await?;
@ -371,7 +371,7 @@ impl Project {
DELETE FROM mod_follows
WHERE mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -384,7 +384,7 @@ impl Project {
SET mod_id = NULL
WHERE mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -394,7 +394,7 @@ impl Project {
DELETE FROM mods_categories
WHERE joining_mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -404,7 +404,7 @@ impl Project {
DELETE FROM mods_links
WHERE joining_mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -418,7 +418,7 @@ impl Project {
"
DELETE FROM dependencies WHERE mod_dependency_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -429,7 +429,7 @@ impl Project {
SET mod_id = NULL
WHERE (mod_id = $1)
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -439,7 +439,7 @@ impl Project {
DELETE FROM mods
WHERE id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -453,10 +453,10 @@ impl Project {
WHERE team_id = $1
RETURNING user_id
",
project.inner.team_id as TeamId,
project.inner.team_id as DBTeamId,
)
.fetch(&mut **transaction)
.map_ok(|x| UserId(x.user_id))
.map_ok(|x| DBUserId(x.user_id))
.try_collect::<Vec<_>>()
.await?;
@ -467,7 +467,7 @@ impl Project {
DELETE FROM teams
WHERE id = $1
",
project.inner.team_id as TeamId,
project.inner.team_id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@ -492,7 +492,7 @@ impl Project {
}
pub async fn get_id<'a, 'b, E>(
id: ProjectId,
id: DBProjectId,
executor: E,
redis: &RedisPool,
) -> Result<Option<QueryProject>, DatabaseError>
@ -509,7 +509,7 @@ impl Project {
}
pub async fn get_many_ids<'a, E>(
project_ids: &[ProjectId],
project_ids: &[DBProjectId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<QueryProject>, DatabaseError>
@ -553,7 +553,7 @@ impl Project {
.collect::<Vec<_>>();
let all_version_ids = DashSet::new();
let versions: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>> = sqlx::query!(
let versions: DashMap<DBProjectId, Vec<(DBVersionId, DateTime<Utc>)>> = sqlx::query!(
"
SELECT DISTINCT mod_id, v.id as id, date_published
FROM mods m
@ -570,11 +570,11 @@ impl Project {
.fetch(&mut *exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
let version_id = VersionId(m.id);
|acc: DashMap<DBProjectId, Vec<(DBVersionId, DateTime<Utc>)>>, m| {
let version_id = DBVersionId(m.id);
let date_published = m.date_published;
all_version_ids.insert(version_id);
acc.entry(ProjectId(m.mod_id))
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push((version_id, date_published));
async move { Ok(acc) }
@ -583,7 +583,7 @@ impl Project {
.await?;
let loader_field_enum_value_ids = DashSet::new();
let version_fields: DashMap<ProjectId, Vec<QueryVersionField>> = sqlx::query!(
let version_fields: DashMap<DBProjectId, Vec<QueryVersionField>> = sqlx::query!(
"
SELECT DISTINCT mod_id, version_id, field_id, int_value, enum_value, string_value
FROM versions v
@ -595,9 +595,9 @@ impl Project {
.fetch(&mut *exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<QueryVersionField>>, m| {
|acc: DashMap<DBProjectId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
version_id: DBVersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: if m.enum_value == -1 { None } else { Some(LoaderFieldEnumValueId(m.enum_value)) },
@ -608,7 +608,7 @@ impl Project {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(m.enum_value));
}
acc.entry(ProjectId(m.mod_id)).or_default().push(qvf);
acc.entry(DBProjectId(m.mod_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
@ -638,7 +638,7 @@ impl Project {
.try_collect()
.await?;
let mods_gallery: DashMap<ProjectId, Vec<GalleryItem>> = sqlx::query!(
let mods_gallery: DashMap<DBProjectId, Vec<GalleryItem>> = sqlx::query!(
"
SELECT DISTINCT mod_id, mg.image_url, mg.raw_image_url, mg.featured, mg.name, mg.description, mg.created, mg.ordering
FROM mods_gallery mg
@ -648,8 +648,8 @@ impl Project {
&project_ids_parsed,
&slugs
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<GalleryItem>>, m| {
acc.entry(ProjectId(m.mod_id))
.try_fold(DashMap::new(), |acc : DashMap<DBProjectId, Vec<GalleryItem>>, m| {
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push(GalleryItem {
image_url: m.image_url,
@ -664,7 +664,7 @@ impl Project {
}
).await?;
let links: DashMap<ProjectId, Vec<LinkUrl>> = sqlx::query!(
let links: DashMap<DBProjectId, Vec<LinkUrl>> = sqlx::query!(
"
SELECT DISTINCT joining_mod_id as mod_id, joining_platform_id as platform_id, lp.name as platform_name, url, lp.donation as donation
FROM mods_links ml
@ -675,8 +675,8 @@ impl Project {
&project_ids_parsed,
&slugs
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<LinkUrl>>, m| {
acc.entry(ProjectId(m.mod_id))
.try_fold(DashMap::new(), |acc : DashMap<DBProjectId, Vec<LinkUrl>>, m| {
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push(LinkUrl {
platform_id: LinkPlatformId(m.platform_id),
@ -697,7 +697,7 @@ impl Project {
}
let loader_field_ids = DashSet::new();
let loaders_ptypes_games: DashMap<ProjectId, VersionLoaderData> = sqlx::query!(
let loaders_ptypes_games: DashMap<DBProjectId, VersionLoaderData> = sqlx::query!(
"
SELECT DISTINCT mod_id,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
@ -718,7 +718,7 @@ impl Project {
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
).fetch(&mut *exec)
.map_ok(|m| {
let project_id = ProjectId(m.mod_id);
let project_id = DBProjectId(m.mod_id);
// Add loader fields to the set we need to fetch
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
@ -784,14 +784,14 @@ impl Project {
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc, m| {
let id = m.id;
let project_id = ProjectId(id);
let project_id = DBProjectId(id);
let VersionLoaderData {
loaders,
project_types,
games,
loader_loader_field_ids,
} = loaders_ptypes_games.remove(&project_id).map(|x|x.1).unwrap_or_default();
// Each version is a tuple of (VersionId, DateTime<Utc>)
// Each version is a tuple of (DBVersionId, DateTime<Utc>)
let mut versions = versions.remove(&project_id).map(|x| x.1).unwrap_or_default();
versions.sort_by(|a, b| a.1.cmp(&b.1));
let mut gallery = mods_gallery.remove(&project_id).map(|x| x.1).unwrap_or_default();
@ -804,9 +804,9 @@ impl Project {
let project = QueryProject {
inner: Project {
id: ProjectId(id),
team_id: TeamId(m.team_id),
organization_id: m.organization_id.map(OrganizationId),
id: DBProjectId(id),
team_id: DBTeamId(m.team_id),
organization_id: m.organization_id.map(DBOrganizationId),
name: m.name.clone(),
summary: m.summary.clone(),
downloads: m.downloads,
@ -847,7 +847,7 @@ impl Project {
},
urls,
aggregate_version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, true),
thread_id: ThreadId(m.thread_id),
thread_id: DBThreadId(m.thread_id),
};
acc.insert(m.id, (m.slug, project));
@ -863,18 +863,25 @@ impl Project {
}
pub async fn get_dependencies<'a, E>(
id: ProjectId,
id: DBProjectId,
exec: E,
redis: &RedisPool,
) -> Result<
Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>,
Vec<(
Option<DBVersionId>,
Option<DBProjectId>,
Option<DBProjectId>,
)>,
DatabaseError,
>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
type Dependencies =
Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>;
type Dependencies = Vec<(
Option<DBVersionId>,
Option<DBProjectId>,
Option<DBProjectId>,
)>;
let mut redis = redis.connect().await?;
@ -896,18 +903,18 @@ impl Project {
LEFT JOIN versions vd ON d.dependency_id = vd.id
WHERE v.mod_id = $1
",
id as ProjectId
id as DBProjectId
)
.fetch(exec)
.map_ok(|x| {
(
x.dependency_id.map(VersionId),
x.dependency_id.map(DBVersionId),
if x.mod_id == Some(0) {
None
} else {
x.mod_id.map(ProjectId)
x.mod_id.map(DBProjectId)
},
x.mod_dependency_id.map(ProjectId),
x.mod_dependency_id.map(DBProjectId),
)
})
.try_collect::<Dependencies>()
@ -925,7 +932,7 @@ impl Project {
}
pub async fn clear_cache(
id: ProjectId,
id: DBProjectId,
slug: Option<String>,
clear_dependencies: Option<bool>,
redis: &RedisPool,
@ -955,11 +962,11 @@ pub struct QueryProject {
pub inner: Project,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub versions: Vec<VersionId>,
pub versions: Vec<DBVersionId>,
pub project_types: Vec<String>,
pub games: Vec<String>,
pub urls: Vec<LinkUrl>,
pub gallery_items: Vec<GalleryItem>,
pub thread_id: ThreadId,
pub thread_id: DBThreadId,
pub aggregate_version_fields: Vec<VersionField>,
}

View File

@ -2,28 +2,28 @@ use super::ids::*;
use chrono::{DateTime, Utc};
pub struct Report {
pub id: ReportId,
pub id: DBReportId,
pub report_type_id: ReportTypeId,
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub user_id: Option<UserId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub user_id: Option<DBUserId>,
pub body: String,
pub reporter: UserId,
pub reporter: DBUserId,
pub created: DateTime<Utc>,
pub closed: bool,
}
pub struct QueryReport {
pub id: ReportId,
pub id: DBReportId,
pub report_type: String,
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub user_id: Option<UserId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub user_id: Option<DBUserId>,
pub body: String,
pub reporter: UserId,
pub reporter: DBUserId,
pub created: DateTime<Utc>,
pub closed: bool,
pub thread_id: ThreadId,
pub thread_id: DBThreadId,
}
impl Report {
@ -42,13 +42,13 @@ impl Report {
$6, $7
)
",
self.id as ReportId,
self.id as DBReportId,
self.report_type_id as ReportTypeId,
self.project_id.map(|x| x.0 as i64),
self.version_id.map(|x| x.0 as i64),
self.user_id.map(|x| x.0 as i64),
self.body,
self.reporter as UserId
self.reporter as DBUserId
)
.execute(&mut **transaction)
.await?;
@ -57,7 +57,7 @@ impl Report {
}
pub async fn get<'a, E>(
id: ReportId,
id: DBReportId,
exec: E,
) -> Result<Option<QueryReport>, sqlx::Error>
where
@ -69,7 +69,7 @@ impl Report {
}
pub async fn get_many<'a, E>(
report_ids: &[ReportId],
report_ids: &[DBReportId],
exec: E,
) -> Result<Vec<QueryReport>, sqlx::Error>
where
@ -92,16 +92,16 @@ impl Report {
)
.fetch(exec)
.map_ok(|x| QueryReport {
id: ReportId(x.id),
id: DBReportId(x.id),
report_type: x.name,
project_id: x.mod_id.map(ProjectId),
version_id: x.version_id.map(VersionId),
user_id: x.user_id.map(UserId),
project_id: x.mod_id.map(DBProjectId),
version_id: x.version_id.map(DBVersionId),
user_id: x.user_id.map(DBUserId),
body: x.body,
reporter: UserId(x.reporter),
reporter: DBUserId(x.reporter),
created: x.created,
closed: x.closed,
thread_id: ThreadId(x.thread_id)
thread_id: DBThreadId(x.thread_id)
})
.try_collect::<Vec<QueryReport>>()
.await?;
@ -110,14 +110,14 @@ impl Report {
}
pub async fn remove_full(
id: ReportId,
id: DBReportId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
let result = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1)
",
id as ReportId
id as DBReportId
)
.fetch_one(&mut **transaction)
.await?;
@ -131,14 +131,14 @@ impl Report {
SELECT id FROM threads
WHERE report_id = $1
",
id as ReportId
id as DBReportId
)
.fetch_optional(&mut **transaction)
.await?;
if let Some(thread_id) = thread_id {
crate::database::models::Thread::remove_full(
ThreadId(thread_id.id),
DBThreadId(thread_id.id),
transaction,
)
.await?;
@ -148,7 +148,7 @@ impl Report {
"
DELETE FROM reports WHERE id = $1
",
id as ReportId,
id as DBReportId,
)
.execute(&mut **transaction)
.await?;

View File

@ -14,7 +14,7 @@ const SESSIONS_USERS_NAMESPACE: &str = "sessions_users";
pub struct SessionBuilder {
pub session: String,
pub user_id: UserId,
pub user_id: DBUserId,
pub os: Option<String>,
pub platform: Option<String>,
@ -30,7 +30,7 @@ impl SessionBuilder {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<SessionId, DatabaseError> {
) -> Result<DBSessionId, DatabaseError> {
let id = generate_session_id(transaction).await?;
sqlx::query!(
@ -44,9 +44,9 @@ impl SessionBuilder {
$6, $7, $8, $9
)
",
id as SessionId,
id as DBSessionId,
self.session,
self.user_id as UserId,
self.user_id as DBUserId,
self.os,
self.platform,
self.city,
@ -63,9 +63,9 @@ impl SessionBuilder {
#[derive(Deserialize, Serialize)]
pub struct Session {
pub id: SessionId,
pub id: DBSessionId,
pub session: String,
pub user_id: UserId,
pub user_id: DBUserId,
pub created: DateTime<Utc>,
pub last_login: DateTime<Utc>,
@ -100,7 +100,7 @@ impl Session {
}
pub async fn get_id<'a, 'b, E>(
id: SessionId,
id: DBSessionId,
executor: E,
redis: &RedisPool,
) -> Result<Option<Session>, DatabaseError>
@ -117,7 +117,7 @@ impl Session {
}
pub async fn get_many_ids<'a, E>(
session_ids: &[SessionId],
session_ids: &[DBSessionId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Session>, DatabaseError>
@ -174,9 +174,9 @@ impl Session {
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
let session = Session {
id: SessionId(x.id),
id: DBSessionId(x.id),
session: x.session.clone(),
user_id: UserId(x.user_id),
user_id: DBUserId(x.user_id),
created: x.created,
last_login: x.last_login,
expires: x.expires,
@ -202,10 +202,10 @@ impl Session {
}
pub async fn get_user_sessions<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<SessionId>, DatabaseError>
) -> Result<Vec<DBSessionId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -219,11 +219,11 @@ impl Session {
.await?;
if let Some(res) = res {
return Ok(res.into_iter().map(SessionId).collect());
return Ok(res.into_iter().map(DBSessionId).collect());
}
use futures::TryStreamExt;
let db_sessions: Vec<SessionId> = sqlx::query!(
let db_sessions: Vec<DBSessionId> = sqlx::query!(
"
SELECT id
FROM sessions
@ -233,8 +233,8 @@ impl Session {
user_id.0,
)
.fetch(exec)
.map_ok(|x| SessionId(x.id))
.try_collect::<Vec<SessionId>>()
.map_ok(|x| DBSessionId(x.id))
.try_collect::<Vec<DBSessionId>>()
.await?;
redis
@ -251,9 +251,9 @@ impl Session {
pub async fn clear_cache(
clear_sessions: Vec<(
Option<SessionId>,
Option<DBSessionId>,
Option<String>,
Option<UserId>,
Option<DBUserId>,
)>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
@ -281,14 +281,14 @@ impl Session {
}
pub async fn remove(
id: SessionId,
id: DBSessionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
sqlx::query!(
"
DELETE FROM sessions WHERE id = $1
",
id as SessionId,
id as DBSessionId,
)
.execute(&mut **transaction)
.await?;

View File

@ -15,7 +15,7 @@ pub struct TeamBuilder {
pub members: Vec<TeamMemberBuilder>,
}
pub struct TeamMemberBuilder {
pub user_id: UserId,
pub user_id: DBUserId,
pub role: String,
pub is_owner: bool,
pub permissions: ProjectPermissions,
@ -29,7 +29,7 @@ impl TeamBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<TeamId, super::DatabaseError> {
) -> Result<DBTeamId, super::DatabaseError> {
let team_id = generate_team_id(transaction).await?;
let team = Team { id: team_id };
@ -39,7 +39,7 @@ impl TeamBuilder {
INSERT INTO teams (id)
VALUES ($1)
",
team.id as TeamId,
team.id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@ -111,18 +111,18 @@ impl TeamBuilder {
/// A team of users who control a project
pub struct Team {
/// The id of the team
pub id: TeamId,
pub id: DBTeamId,
}
#[derive(Deserialize, Serialize, Clone, Debug, Copy)]
pub enum TeamAssociationId {
Project(ProjectId),
Organization(OrganizationId),
Project(DBProjectId),
Organization(DBOrganizationId),
}
impl Team {
pub async fn get_association<'a, 'b, E>(
id: TeamId,
id: DBTeamId,
executor: E,
) -> Result<Option<TeamAssociationId>, super::DatabaseError>
where
@ -133,14 +133,14 @@ impl Team {
SELECT m.id AS pid, NULL AS oid
FROM mods m
WHERE m.team_id = $1
UNION ALL
SELECT NULL AS pid, o.id AS oid
FROM organizations o
WHERE o.team_id = $1
WHERE o.team_id = $1
",
id as TeamId
id as DBTeamId
)
.fetch_optional(executor)
.await?;
@ -150,11 +150,12 @@ impl Team {
let mut team_association_id = None;
if let Some(pid) = t.pid {
team_association_id =
Some(TeamAssociationId::Project(ProjectId(pid)));
Some(TeamAssociationId::Project(DBProjectId(pid)));
}
if let Some(oid) = t.oid {
team_association_id =
Some(TeamAssociationId::Organization(OrganizationId(oid)));
team_association_id = Some(TeamAssociationId::Organization(
DBOrganizationId(oid),
));
}
return Ok(team_association_id);
}
@ -165,11 +166,11 @@ impl Team {
/// A member of a team
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct TeamMember {
pub id: TeamMemberId,
pub team_id: TeamId,
pub id: DBTeamMemberId,
pub team_id: DBTeamId,
/// The ID of the user associated with the member
pub user_id: UserId,
pub user_id: DBUserId,
pub role: String,
pub is_owner: bool,
@ -189,7 +190,7 @@ pub struct TeamMember {
impl TeamMember {
// Lists the full members of a team
pub async fn get_from_team_full<'a, 'b, E>(
id: TeamId,
id: DBTeamId,
executor: E,
redis: &RedisPool,
) -> Result<Vec<TeamMember>, super::DatabaseError>
@ -200,7 +201,7 @@ impl TeamMember {
}
pub async fn get_from_team_full_many<'a, E>(
team_ids: &[TeamId],
team_ids: &[DBTeamId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<TeamMember>, super::DatabaseError>
@ -229,8 +230,8 @@ impl TeamMember {
.fetch(exec)
.try_fold(DashMap::new(), |acc: DashMap<i64, Vec<TeamMember>>, m| {
let member = TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
role: m.member_role,
is_owner: m.is_owner,
permissions: ProjectPermissions::from_bits(m.permissions as u64)
@ -239,7 +240,7 @@ impl TeamMember {
.organization_permissions
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
accepted: m.accepted,
user_id: UserId(m.user_id),
user_id: DBUserId(m.user_id),
payouts_split: m.payouts_split,
ordering: m.ordering,
};
@ -260,7 +261,7 @@ impl TeamMember {
}
pub async fn clear_cache(
id: TeamId,
id: DBTeamId,
redis: &RedisPool,
) -> Result<(), super::DatabaseError> {
let mut redis = redis.connect().await?;
@ -270,8 +271,8 @@ impl TeamMember {
/// Gets a team member from a user id and team id. Does not return pending members.
pub async fn get_from_user_id<'a, 'b, E>(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@ -284,8 +285,8 @@ impl TeamMember {
/// Gets team members from user ids and team ids. Does not return pending members.
pub async fn get_from_user_id_many<'a, 'b, E>(
team_ids: &[TeamId],
user_id: UserId,
team_ids: &[DBTeamId],
user_id: DBUserId,
executor: E,
) -> Result<Vec<Self>, super::DatabaseError>
where
@ -303,12 +304,12 @@ impl TeamMember {
ORDER BY ordering
",
&team_ids_parsed,
user_id as UserId
user_id as DBUserId
)
.fetch(executor)
.map_ok(|m| TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@ -329,8 +330,8 @@ impl TeamMember {
/// Gets a team member from a user id and team id, including pending members.
pub async fn get_from_user_id_pending<'a, 'b, E>(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@ -341,20 +342,20 @@ impl TeamMember {
SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,
accepted, payouts_split, role,
ordering, user_id
FROM team_members
WHERE (team_id = $1 AND user_id = $2)
ORDER BY ordering
",
id as TeamId,
user_id as UserId
id as DBTeamId,
user_id as DBUserId
)
.fetch_optional(executor)
.await?;
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
id: DBTeamMemberId(m.id),
team_id: id,
user_id,
role: m.role,
@ -389,9 +390,9 @@ impl TeamMember {
$1, $2, $3, $4, $5, $6, $7, $8, $9
)
",
self.id as TeamMemberId,
self.team_id as TeamId,
self.user_id as UserId,
self.id as DBTeamMemberId,
self.team_id as DBTeamId,
self.user_id as DBUserId,
self.role,
self.permissions.bits() as i64,
self.organization_permissions.map(|p| p.bits() as i64),
@ -406,8 +407,8 @@ impl TeamMember {
}
pub async fn delete(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), super::DatabaseError> {
sqlx::query!(
@ -415,8 +416,8 @@ impl TeamMember {
DELETE FROM team_members
WHERE (team_id = $1 AND user_id = $2 AND NOT is_owner = TRUE)
",
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -426,8 +427,8 @@ impl TeamMember {
#[allow(clippy::too_many_arguments)]
pub async fn edit_team_member(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
new_permissions: Option<ProjectPermissions>,
new_organization_permissions: Option<OrganizationPermissions>,
new_role: Option<String>,
@ -445,8 +446,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
permissions.bits() as i64,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -460,8 +461,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
organization_permissions.bits() as i64,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -475,8 +476,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
role,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -490,8 +491,8 @@ impl TeamMember {
SET accepted = TRUE
WHERE (team_id = $1 AND user_id = $2)
",
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -506,8 +507,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
payouts_split,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -521,8 +522,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
ordering,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -536,8 +537,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
is_owner,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -547,8 +548,8 @@ impl TeamMember {
}
pub async fn get_from_user_id_project<'a, 'b, E>(
id: ProjectId,
user_id: UserId,
id: DBProjectId,
user_id: DBUserId,
allow_pending: bool,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
@ -568,8 +569,8 @@ impl TeamMember {
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 AND accepted = ANY($3)
WHERE m.id = $1
",
id as ProjectId,
user_id as UserId,
id as DBProjectId,
user_id as DBUserId,
&accepted
)
.fetch_optional(executor)
@ -577,8 +578,8 @@ impl TeamMember {
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@ -600,8 +601,8 @@ impl TeamMember {
}
pub async fn get_from_user_id_organization<'a, 'b, E>(
id: OrganizationId,
user_id: UserId,
id: DBOrganizationId,
user_id: DBUserId,
allow_pending: bool,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
@ -620,8 +621,8 @@ impl TeamMember {
INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2 AND accepted = ANY($3)
WHERE o.id = $1
",
id as OrganizationId,
user_id as UserId,
id as DBOrganizationId,
user_id as DBUserId,
&accepted
)
.fetch_optional(executor)
@ -629,8 +630,8 @@ impl TeamMember {
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@ -652,8 +653,8 @@ impl TeamMember {
}
pub async fn get_from_user_id_version<'a, 'b, E>(
id: VersionId,
user_id: UserId,
id: DBVersionId,
user_id: DBUserId,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@ -661,22 +662,22 @@ impl TeamMember {
{
let result = sqlx::query!(
"
SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id
SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id
FROM versions v
INNER JOIN mods m ON m.id = v.mod_id
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE
WHERE v.id = $1
",
id as VersionId,
user_id as UserId
id as DBVersionId,
user_id as DBUserId
)
.fetch_optional(executor)
.await?;
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@ -702,7 +703,7 @@ impl TeamMember {
// - organization team member (a user's membership to a given organization that owns a given project)
pub async fn get_for_project_permissions<'a, 'b, E>(
project: &Project,
user_id: UserId,
user_id: DBUserId,
executor: E,
) -> Result<(Option<Self>, Option<Self>), super::DatabaseError>
where

View File

@ -6,35 +6,35 @@ use serde::{Deserialize, Serialize};
pub struct ThreadBuilder {
pub type_: ThreadType,
pub members: Vec<UserId>,
pub project_id: Option<ProjectId>,
pub report_id: Option<ReportId>,
pub members: Vec<DBUserId>,
pub project_id: Option<DBProjectId>,
pub report_id: Option<DBReportId>,
}
#[derive(Clone, Serialize)]
pub struct Thread {
pub id: ThreadId,
pub id: DBThreadId,
pub project_id: Option<ProjectId>,
pub report_id: Option<ReportId>,
pub project_id: Option<DBProjectId>,
pub report_id: Option<DBReportId>,
pub type_: ThreadType,
pub messages: Vec<ThreadMessage>,
pub members: Vec<UserId>,
pub members: Vec<DBUserId>,
}
pub struct ThreadMessageBuilder {
pub author_id: Option<UserId>,
pub author_id: Option<DBUserId>,
pub body: MessageBody,
pub thread_id: ThreadId,
pub thread_id: DBThreadId,
pub hide_identity: bool,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct ThreadMessage {
pub id: ThreadMessageId,
pub thread_id: ThreadId,
pub author_id: Option<UserId>,
pub id: DBThreadMessageId,
pub thread_id: DBThreadId,
pub author_id: Option<DBUserId>,
pub body: MessageBody,
pub created: DateTime<Utc>,
pub hide_identity: bool,
@ -44,7 +44,7 @@ impl ThreadMessageBuilder {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ThreadMessageId, DatabaseError> {
) -> Result<DBThreadMessageId, DatabaseError> {
let thread_message_id = generate_thread_message_id(transaction).await?;
sqlx::query!(
@ -56,10 +56,10 @@ impl ThreadMessageBuilder {
$1, $2, $3, $4, $5
)
",
thread_message_id as ThreadMessageId,
thread_message_id as DBThreadMessageId,
self.author_id.map(|x| x.0),
serde_json::value::to_value(self.body.clone())?,
self.thread_id as ThreadId,
self.thread_id as DBThreadId,
self.hide_identity
)
.execute(&mut **transaction)
@ -73,7 +73,7 @@ impl ThreadBuilder {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ThreadId, DatabaseError> {
) -> Result<DBThreadId, DatabaseError> {
let thread_id = generate_thread_id(&mut *transaction).await?;
sqlx::query!(
"
@ -84,7 +84,7 @@ impl ThreadBuilder {
$1, $2, $3, $4
)
",
thread_id as ThreadId,
thread_id as DBThreadId,
self.type_.as_str(),
self.project_id.map(|x| x.0),
self.report_id.map(|x| x.0),
@ -113,7 +113,7 @@ impl ThreadBuilder {
impl Thread {
pub async fn get<'a, E>(
id: ThreadId,
id: DBThreadId,
exec: E,
) -> Result<Option<Thread>, sqlx::Error>
where
@ -125,7 +125,7 @@ impl Thread {
}
pub async fn get_many<'a, E>(
thread_ids: &[ThreadId],
thread_ids: &[DBThreadId],
exec: E,
) -> Result<Vec<Thread>, sqlx::Error>
where
@ -150,9 +150,9 @@ impl Thread {
)
.fetch(exec)
.map_ok(|x| Thread {
id: ThreadId(x.id),
project_id: x.mod_id.map(ProjectId),
report_id: x.report_id.map(ReportId),
id: DBThreadId(x.id),
project_id: x.mod_id.map(DBProjectId),
report_id: x.report_id.map(DBReportId),
type_: ThreadType::from_string(&x.thread_type),
messages: {
let mut messages: Vec<ThreadMessage> = serde_json::from_value(
@ -163,7 +163,7 @@ impl Thread {
messages.sort_by(|a, b| a.created.cmp(&b.created));
messages
},
members: x.members.unwrap_or_default().into_iter().map(UserId).collect(),
members: x.members.unwrap_or_default().into_iter().map(DBUserId).collect(),
})
.try_collect::<Vec<Thread>>()
.await?;
@ -172,7 +172,7 @@ impl Thread {
}
pub async fn remove_full(
id: ThreadId,
id: DBThreadId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
sqlx::query!(
@ -180,7 +180,7 @@ impl Thread {
DELETE FROM threads_messages
WHERE thread_id = $1
",
id as ThreadId,
id as DBThreadId,
)
.execute(&mut **transaction)
.await?;
@ -189,7 +189,7 @@ impl Thread {
DELETE FROM threads_members
WHERE thread_id = $1
",
id as ThreadId
id as DBThreadId
)
.execute(&mut **transaction)
.await?;
@ -198,7 +198,7 @@ impl Thread {
DELETE FROM threads
WHERE id = $1
",
id as ThreadId,
id as DBThreadId,
)
.execute(&mut **transaction)
.await?;
@ -209,7 +209,7 @@ impl Thread {
impl ThreadMessage {
pub async fn get<'a, E>(
id: ThreadMessageId,
id: DBThreadMessageId,
exec: E,
) -> Result<Option<ThreadMessage>, sqlx::Error>
where
@ -221,7 +221,7 @@ impl ThreadMessage {
}
pub async fn get_many<'a, E>(
message_ids: &[ThreadMessageId],
message_ids: &[DBThreadMessageId],
exec: E,
) -> Result<Vec<ThreadMessage>, sqlx::Error>
where
@ -241,9 +241,9 @@ impl ThreadMessage {
)
.fetch(exec)
.map_ok(|x| ThreadMessage {
id: ThreadMessageId(x.id),
thread_id: ThreadId(x.thread_id),
author_id: x.author_id.map(UserId),
id: DBThreadMessageId(x.id),
thread_id: DBThreadId(x.thread_id),
author_id: x.author_id.map(DBUserId),
body: serde_json::from_value(x.body).unwrap_or(MessageBody::Deleted { private: false }),
created: x.created,
hide_identity: x.hide_identity,
@ -255,7 +255,7 @@ impl ThreadMessage {
}
pub async fn remove_full(
id: ThreadMessageId,
id: DBThreadMessageId,
private: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
@ -265,7 +265,7 @@ impl ThreadMessage {
SET body = $2
WHERE id = $1
",
id as ThreadMessageId,
id as DBThreadMessageId,
serde_json::to_value(MessageBody::Deleted { private })
.unwrap_or(serde_json::json!({}))
)

View File

@ -1,9 +1,9 @@
use super::ids::{ProjectId, UserId};
use super::{CollectionId, ReportId, ThreadId};
use super::ids::{DBProjectId, DBUserId};
use super::{DBCollectionId, DBReportId, DBThreadId};
use crate::database::models;
use crate::database::models::charge_item::ChargeItem;
use crate::database::models::user_subscription_item::UserSubscriptionItem;
use crate::database::models::{DatabaseError, OrganizationId};
use crate::database::models::{DBOrganizationId, DatabaseError};
use crate::database::redis::RedisPool;
use crate::models::billing::ChargeStatus;
use crate::models::users::Badges;
@ -20,7 +20,7 @@ const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct User {
pub id: UserId,
pub id: DBUserId,
pub github_id: Option<i64>,
pub discord_id: Option<i64>,
@ -72,7 +72,7 @@ impl User {
$14, $15, $16, $17, $18, $19, $20, $21
)
",
self.id as UserId,
self.id as DBUserId,
&self.username,
self.email.as_ref(),
self.avatar_url.as_ref(),
@ -114,20 +114,20 @@ impl User {
}
pub async fn get_id<'a, 'b, E>(
id: UserId,
id: DBUserId,
executor: E,
redis: &RedisPool,
) -> Result<Option<User>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
User::get_many(&[crate::models::ids::UserId::from(id)], executor, redis)
User::get_many(&[ariadne::ids::UserId::from(id)], executor, redis)
.await
.map(|x| x.into_iter().next())
}
pub async fn get_many_ids<'a, E>(
user_ids: &[UserId],
user_ids: &[DBUserId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<User>, DatabaseError>
@ -136,7 +136,7 @@ impl User {
{
let ids = user_ids
.iter()
.map(|x| crate::models::ids::UserId::from(*x))
.map(|x| ariadne::ids::UserId::from(*x))
.collect::<Vec<_>>();
User::get_many(&ids, exec, redis).await
}
@ -188,7 +188,7 @@ impl User {
.fetch(exec)
.try_fold(DashMap::new(), |acc, u| {
let user = User {
id: UserId(u.id),
id: DBUserId(u.id),
github_id: u.github_id,
discord_id: u.discord_id,
gitlab_id: u.gitlab_id,
@ -227,7 +227,7 @@ impl User {
pub async fn get_email<'a, E>(
email: &str,
exec: E,
) -> Result<Option<UserId>, sqlx::Error>
) -> Result<Option<DBUserId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -241,14 +241,14 @@ impl User {
.fetch_optional(exec)
.await?;
Ok(user_pass.map(|x| UserId(x.id)))
Ok(user_pass.map(|x| DBUserId(x.id)))
}
pub async fn get_projects<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<ProjectId>, DatabaseError>
) -> Result<Vec<DBProjectId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -257,7 +257,7 @@ impl User {
let mut redis = redis.connect().await?;
let cached_projects = redis
.get_deserialized_from_json::<Vec<ProjectId>>(
.get_deserialized_from_json::<Vec<DBProjectId>>(
USERS_PROJECTS_NAMESPACE,
&user_id.0.to_string(),
)
@ -274,11 +274,11 @@ impl User {
WHERE tm.user_id = $1
ORDER BY m.downloads DESC
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| ProjectId(m.id))
.try_collect::<Vec<ProjectId>>()
.map_ok(|m| DBProjectId(m.id))
.try_collect::<Vec<DBProjectId>>()
.await?;
redis
@ -294,9 +294,9 @@ impl User {
}
pub async fn get_organizations<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<OrganizationId>, sqlx::Error>
) -> Result<Vec<DBOrganizationId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -308,20 +308,20 @@ impl User {
INNER JOIN team_members tm ON tm.team_id = o.team_id AND tm.accepted = TRUE
WHERE tm.user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| OrganizationId(m.id))
.try_collect::<Vec<OrganizationId>>()
.map_ok(|m| DBOrganizationId(m.id))
.try_collect::<Vec<DBOrganizationId>>()
.await?;
Ok(orgs)
}
pub async fn get_collections<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<CollectionId>, sqlx::Error>
) -> Result<Vec<DBCollectionId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -332,20 +332,20 @@ impl User {
SELECT c.id FROM collections c
WHERE c.user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| CollectionId(m.id))
.try_collect::<Vec<CollectionId>>()
.map_ok(|m| DBCollectionId(m.id))
.try_collect::<Vec<DBCollectionId>>()
.await?;
Ok(projects)
}
pub async fn get_follows<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<ProjectId>, sqlx::Error>
) -> Result<Vec<DBProjectId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -356,20 +356,20 @@ impl User {
SELECT mf.mod_id FROM mod_follows mf
WHERE mf.follower_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| ProjectId(m.mod_id))
.try_collect::<Vec<ProjectId>>()
.map_ok(|m| DBProjectId(m.mod_id))
.try_collect::<Vec<DBProjectId>>()
.await?;
Ok(projects)
}
pub async fn get_reports<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<ReportId>, sqlx::Error>
) -> Result<Vec<DBReportId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@ -380,18 +380,18 @@ impl User {
SELECT r.id FROM reports r
WHERE r.user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| ReportId(m.id))
.try_collect::<Vec<ReportId>>()
.map_ok(|m| DBReportId(m.id))
.try_collect::<Vec<DBReportId>>()
.await?;
Ok(reports)
}
pub async fn get_backup_codes<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<String>, sqlx::Error>
where
@ -404,7 +404,7 @@ impl User {
SELECT code FROM user_backup_codes
WHERE user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| to_base62(m.code as u64))
@ -415,7 +415,7 @@ impl User {
}
pub async fn clear_caches(
user_ids: &[(UserId, Option<String>)],
user_ids: &[(DBUserId, Option<String>)],
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@ -435,7 +435,7 @@ impl User {
}
pub async fn clear_project_cache(
user_ids: &[UserId],
user_ids: &[DBUserId],
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@ -452,7 +452,7 @@ impl User {
}
pub async fn remove(
id: UserId,
id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@ -462,7 +462,7 @@ impl User {
User::clear_caches(&[(id, Some(delete_user.username))], redis)
.await?;
let deleted_user: UserId =
let deleted_user: DBUserId =
crate::models::users::DELETED_USER.into();
sqlx::query!(
@ -471,8 +471,8 @@ impl User {
SET user_id = $1
WHERE (user_id = $2 AND is_owner = TRUE)
",
deleted_user as UserId,
id as UserId,
deleted_user as DBUserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -483,8 +483,8 @@ impl User {
SET author_id = $1
WHERE (author_id = $2)
",
deleted_user as UserId,
id as UserId,
deleted_user as DBUserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -495,7 +495,7 @@ impl User {
SELECT n.id FROM notifications n
WHERE n.user_id = $1
",
id as UserId,
id as DBUserId,
)
.fetch(&mut **transaction)
.map_ok(|m| m.id)
@ -507,7 +507,7 @@ impl User {
DELETE FROM notifications
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -528,10 +528,10 @@ impl User {
FROM collections
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.fetch(&mut **transaction)
.map_ok(|x| CollectionId(x.id))
.map_ok(|x| DBCollectionId(x.id))
.try_collect::<Vec<_>>()
.await?;
@ -547,10 +547,10 @@ impl User {
INNER JOIN reports r ON t.report_id = r.id AND (r.user_id = $1 OR r.reporter = $1)
WHERE report_id IS NOT NULL
",
id as UserId,
id as DBUserId,
)
.fetch(&mut **transaction)
.map_ok(|x| ThreadId(x.id))
.map_ok(|x| DBThreadId(x.id))
.try_collect::<Vec<_>>()
.await?;
@ -563,7 +563,7 @@ impl User {
DELETE FROM reports
WHERE user_id = $1 OR reporter = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -573,7 +573,7 @@ impl User {
DELETE FROM mod_follows
WHERE follower_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -583,7 +583,7 @@ impl User {
DELETE FROM team_members
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -593,7 +593,7 @@ impl User {
DELETE FROM payouts_values
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -603,7 +603,7 @@ impl User {
DELETE FROM payouts
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -614,8 +614,8 @@ impl User {
SET body = '{"type": "deleted"}', author_id = $2
WHERE author_id = $1
"#,
id as UserId,
deleted_user as UserId,
id as DBUserId,
deleted_user as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -625,7 +625,7 @@ impl User {
DELETE FROM threads_members
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -635,7 +635,7 @@ impl User {
DELETE FROM sessions
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -645,7 +645,7 @@ impl User {
DELETE FROM pats
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -655,7 +655,7 @@ impl User {
DELETE FROM friends
WHERE user_id = $1 OR friend_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -683,8 +683,8 @@ impl User {
SET user_id = $1
WHERE user_id = $2
",
deleted_user as UserId,
id as UserId,
deleted_user as DBUserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -694,7 +694,7 @@ impl User {
DELETE FROM user_backup_codes
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -704,7 +704,7 @@ impl User {
DELETE FROM users
WHERE id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;

View File

@ -1,5 +1,5 @@
use crate::database::models::{
DatabaseError, ProductPriceId, UserId, UserSubscriptionId,
DBProductPriceId, DBUserId, DBUserSubscriptionId, DatabaseError,
};
use crate::models::billing::{
PriceDuration, SubscriptionMetadata, SubscriptionStatus,
@ -9,9 +9,9 @@ use itertools::Itertools;
use std::convert::{TryFrom, TryInto};
pub struct UserSubscriptionItem {
pub id: UserSubscriptionId,
pub user_id: UserId,
pub price_id: ProductPriceId,
pub id: DBUserSubscriptionId,
pub user_id: DBUserId,
pub price_id: DBProductPriceId,
pub interval: PriceDuration,
pub created: DateTime<Utc>,
pub status: SubscriptionStatus,
@ -48,9 +48,9 @@ impl TryFrom<UserSubscriptionResult> for UserSubscriptionItem {
fn try_from(r: UserSubscriptionResult) -> Result<Self, Self::Error> {
Ok(UserSubscriptionItem {
id: UserSubscriptionId(r.id),
user_id: UserId(r.user_id),
price_id: ProductPriceId(r.price_id),
id: DBUserSubscriptionId(r.id),
user_id: DBUserId(r.user_id),
price_id: DBProductPriceId(r.price_id),
interval: PriceDuration::from_string(&r.interval),
created: r.created,
status: SubscriptionStatus::from_string(&r.status),
@ -61,14 +61,14 @@ impl TryFrom<UserSubscriptionResult> for UserSubscriptionItem {
impl UserSubscriptionItem {
pub async fn get(
id: UserSubscriptionId,
id: DBUserSubscriptionId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<UserSubscriptionItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[UserSubscriptionId],
ids: &[DBUserSubscriptionId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@ -87,7 +87,7 @@ impl UserSubscriptionItem {
}
pub async fn get_all_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let user_id = user_id.0;

View File

@ -20,9 +20,9 @@ const VERSION_FILES_NAMESPACE: &str = "versions_files";
#[derive(Clone)]
pub struct VersionBuilder {
pub version_id: VersionId,
pub project_id: ProjectId,
pub author_id: UserId,
pub version_id: DBVersionId,
pub project_id: DBProjectId,
pub author_id: DBUserId,
pub name: String,
pub version_number: String,
pub changelog: String,
@ -39,8 +39,8 @@ pub struct VersionBuilder {
#[derive(Clone)]
pub struct DependencyBuilder {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub file_name: Option<String>,
pub dependency_type: String,
}
@ -48,7 +48,7 @@ pub struct DependencyBuilder {
impl DependencyBuilder {
pub async fn insert_many(
builders: Vec<Self>,
version_id: VersionId,
version_id: DBVersionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let mut project_ids = Vec::new();
@ -97,7 +97,7 @@ impl DependencyBuilder {
async fn try_get_project_id(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<ProjectId>, DatabaseError> {
) -> Result<Option<DBProjectId>, DatabaseError> {
Ok(if let Some(project_id) = self.project_id {
Some(project_id)
} else if let Some(version_id) = self.version_id {
@ -105,11 +105,11 @@ impl DependencyBuilder {
"
SELECT mod_id FROM versions WHERE id = $1
",
version_id as VersionId,
version_id as DBVersionId,
)
.fetch_optional(&mut **transaction)
.await?
.map(|x| ProjectId(x.mod_id))
.map(|x| DBProjectId(x.mod_id))
} else {
None
})
@ -129,9 +129,9 @@ pub struct VersionFileBuilder {
impl VersionFileBuilder {
pub async fn insert(
self,
version_id: VersionId,
version_id: DBVersionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<FileId, DatabaseError> {
) -> Result<DBFileId, DatabaseError> {
let file_id = generate_file_id(&mut *transaction).await?;
sqlx::query!(
@ -139,8 +139,8 @@ impl VersionFileBuilder {
INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type)
VALUES ($1, $2, $3, $4, $5, $6, $7)
",
file_id as FileId,
version_id as VersionId,
file_id as DBFileId,
version_id as DBVersionId,
self.url,
self.filename,
self.primary,
@ -156,7 +156,7 @@ impl VersionFileBuilder {
INSERT INTO hashes (file_id, algorithm, hash)
VALUES ($1, $2, $3)
",
file_id as FileId,
file_id as DBFileId,
hash.algorithm,
hash.hash,
)
@ -178,7 +178,7 @@ impl VersionBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<VersionId, DatabaseError> {
) -> Result<DBVersionId, DatabaseError> {
let version = Version {
id: self.version_id,
project_id: self.project_id,
@ -203,7 +203,7 @@ impl VersionBuilder {
SET updated = NOW()
WHERE id = $1
",
self.project_id as ProjectId,
self.project_id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@ -245,7 +245,7 @@ impl VersionBuilder {
#[derive(Serialize, Deserialize)]
pub struct LoaderVersion {
pub loader_id: LoaderId,
pub version_id: VersionId,
pub version_id: DBVersionId,
}
impl LoaderVersion {
@ -274,9 +274,9 @@ impl LoaderVersion {
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct Version {
pub id: VersionId,
pub project_id: ProjectId,
pub author_id: UserId,
pub id: DBVersionId,
pub project_id: DBProjectId,
pub author_id: DBUserId,
pub name: String,
pub version_number: String,
pub changelog: String,
@ -307,9 +307,9 @@ impl Version {
$9, $10, $11, $12
)
",
self.id as VersionId,
self.project_id as ProjectId,
self.author_id as UserId,
self.id as DBVersionId,
self.project_id as DBProjectId,
self.author_id as DBUserId,
&self.name,
&self.version_number,
self.changelog,
@ -327,7 +327,7 @@ impl Version {
}
pub async fn remove_full(
id: VersionId,
id: DBVersionId,
redis: &RedisPool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, DatabaseError> {
@ -347,7 +347,7 @@ impl Version {
SET version_id = NULL
WHERE version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@ -357,7 +357,7 @@ impl Version {
DELETE FROM version_fields vf
WHERE vf.version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@ -367,7 +367,7 @@ impl Version {
DELETE FROM loaders_versions
WHERE loaders_versions.version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@ -381,7 +381,7 @@ impl Version {
(hashes.file_id = files.id)
)
",
id as VersionId
id as DBVersionId
)
.execute(&mut **transaction)
.await?;
@ -391,7 +391,7 @@ impl Version {
DELETE FROM files
WHERE files.version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@ -402,7 +402,7 @@ impl Version {
"
SELECT mod_id FROM versions WHERE id = $1
",
id as VersionId,
id as DBVersionId,
)
.fetch_one(&mut **transaction)
.await?;
@ -413,7 +413,7 @@ impl Version {
SET dependency_id = NULL, mod_dependency_id = $2
WHERE dependency_id = $1
",
id as VersionId,
id as DBVersionId,
project_id.mod_id,
)
.execute(&mut **transaction)
@ -431,7 +431,7 @@ impl Version {
"
DELETE FROM dependencies WHERE dependent_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@ -442,13 +442,13 @@ impl Version {
"
DELETE FROM versions WHERE id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
crate::database::models::Project::clear_cache(
ProjectId(project_id.mod_id),
DBProjectId(project_id.mod_id),
None,
None,
redis,
@ -459,7 +459,7 @@ impl Version {
}
pub async fn get<'a, 'b, E>(
id: VersionId,
id: DBVersionId,
executor: E,
redis: &RedisPool,
) -> Result<Option<QueryVersion>, DatabaseError>
@ -472,7 +472,7 @@ impl Version {
}
pub async fn get_many<'a, E>(
version_ids: &[VersionId],
version_ids: &[DBVersionId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<QueryVersion>, DatabaseError>
@ -486,7 +486,7 @@ impl Version {
let mut exec = exec.acquire().await?;
let loader_field_enum_value_ids = DashSet::new();
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> = sqlx::query!(
let version_fields: DashMap<DBVersionId, Vec<QueryVersionField>> = sqlx::query!(
"
SELECT version_id, field_id, int_value, enum_value, string_value
FROM version_fields
@ -497,9 +497,9 @@ impl Version {
.fetch(&mut *exec)
.try_fold(
DashMap::new(),
|acc: DashMap<VersionId, Vec<QueryVersionField>>, m| {
|acc: DashMap<DBVersionId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
version_id: DBVersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: if m.enum_value == -1 { None } else { Some(LoaderFieldEnumValueId(m.enum_value)) },
@ -510,7 +510,7 @@ impl Version {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(m.enum_value));
}
acc.entry(VersionId(m.version_id)).or_default().push(qvf);
acc.entry(DBVersionId(m.version_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
@ -525,7 +525,7 @@ impl Version {
}
let loader_field_ids = DashSet::new();
let loaders_ptypes_games: DashMap<VersionId, VersionLoaderData> = sqlx::query!(
let loaders_ptypes_games: DashMap<DBVersionId, VersionLoaderData> = sqlx::query!(
"
SELECT DISTINCT version_id,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
@ -546,7 +546,7 @@ impl Version {
&version_ids
).fetch(&mut *exec)
.map_ok(|m| {
let version_id = VersionId(m.version_id);
let version_id = DBVersionId(m.version_id);
// Add loader fields to the set we need to fetch
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
@ -614,14 +614,14 @@ impl Version {
#[derive(Deserialize)]
struct Hash {
pub file_id: FileId,
pub file_id: DBFileId,
pub algorithm: String,
pub hash: String,
}
#[derive(Deserialize)]
struct File {
pub id: FileId,
pub id: DBFileId,
pub url: String,
pub filename: String,
pub primary: bool,
@ -631,7 +631,7 @@ impl Version {
let file_ids = DashSet::new();
let reverse_file_map = DashMap::new();
let files : DashMap<VersionId, Vec<File>> = sqlx::query!(
let files : DashMap<DBVersionId, Vec<File>> = sqlx::query!(
"
SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type
FROM files f
@ -639,9 +639,9 @@ impl Version {
",
&version_ids
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<File>>, m| {
.try_fold(DashMap::new(), |acc : DashMap<DBVersionId, Vec<File>>, m| {
let file = File {
id: FileId(m.id),
id: DBFileId(m.id),
url: m.url,
filename: m.filename,
primary: m.is_primary,
@ -649,17 +649,17 @@ impl Version {
file_type: m.file_type.map(|x| FileType::from_string(&x)),
};
file_ids.insert(FileId(m.id));
reverse_file_map.insert(FileId(m.id), VersionId(m.version_id));
file_ids.insert(DBFileId(m.id));
reverse_file_map.insert(DBFileId(m.id), DBVersionId(m.version_id));
acc.entry(VersionId(m.version_id))
acc.entry(DBVersionId(m.version_id))
.or_default()
.push(file);
async move { Ok(acc) }
}
).await?;
let hashes: DashMap<VersionId, Vec<Hash>> = sqlx::query!(
let hashes: DashMap<DBVersionId, Vec<Hash>> = sqlx::query!(
"
SELECT DISTINCT file_id, algorithm, encode(hash, 'escape') hash
FROM hashes
@ -668,15 +668,15 @@ impl Version {
&file_ids.iter().map(|x| x.0).collect::<Vec<_>>()
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc: DashMap<VersionId, Vec<Hash>>, m| {
.try_fold(DashMap::new(), |acc: DashMap<DBVersionId, Vec<Hash>>, m| {
if let Some(found_hash) = m.hash {
let hash = Hash {
file_id: FileId(m.file_id),
file_id: DBFileId(m.file_id),
algorithm: m.algorithm,
hash: found_hash,
};
if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) {
if let Some(version_id) = reverse_file_map.get(&DBFileId(m.file_id)) {
acc.entry(*version_id).or_default().push(hash);
}
}
@ -684,7 +684,7 @@ impl Version {
})
.await?;
let dependencies : DashMap<VersionId, Vec<QueryDependency>> = sqlx::query!(
let dependencies : DashMap<DBVersionId, Vec<QueryDependency>> = sqlx::query!(
"
SELECT DISTINCT dependent_id as version_id, d.mod_dependency_id as dependency_project_id, d.dependency_id as dependency_version_id, d.dependency_file_name as file_name, d.dependency_type as dependency_type
FROM dependencies d
@ -694,13 +694,13 @@ impl Version {
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<QueryDependency>>, m| {
let dependency = QueryDependency {
project_id: m.dependency_project_id.map(ProjectId),
version_id: m.dependency_version_id.map(VersionId),
project_id: m.dependency_project_id.map(DBProjectId),
version_id: m.dependency_version_id.map(DBVersionId),
file_name: m.file_name,
dependency_type: m.dependency_type,
};
acc.entry(VersionId(m.version_id))
acc.entry(DBVersionId(m.version_id))
.or_default()
.push(dependency);
async move { Ok(acc) }
@ -719,7 +719,7 @@ impl Version {
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc, v| {
let version_id = VersionId(v.id);
let version_id = DBVersionId(v.id);
let VersionLoaderData {
loaders,
project_types,
@ -737,9 +737,9 @@ impl Version {
let query_version = QueryVersion {
inner: Version {
id: VersionId(v.id),
project_id: ProjectId(v.mod_id),
author_id: UserId(v.author_id),
id: DBVersionId(v.id),
project_id: DBProjectId(v.mod_id),
author_id: DBUserId(v.author_id),
name: v.version_name,
version_number: v.version_number,
changelog: v.changelog,
@ -812,7 +812,7 @@ impl Version {
pub async fn get_file_from_hash<'a, 'b, E>(
algo: String,
hash: String,
version_id: Option<VersionId>,
version_id: Option<DBVersionId>,
executor: E,
redis: &RedisPool,
) -> Result<Option<SingleFile>, DatabaseError>
@ -873,9 +873,9 @@ impl Version {
let key = format!("{algorithm}_{hash}");
let file = SingleFile {
id: FileId(f.id),
version_id: VersionId(f.version_id),
project_id: ProjectId(f.mod_id),
id: DBFileId(f.id),
version_id: DBVersionId(f.version_id),
project_id: DBProjectId(f.mod_id),
url: f.url,
filename: f.filename,
hashes,
@ -940,15 +940,15 @@ pub struct QueryVersion {
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct QueryDependency {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub file_name: Option<String>,
pub dependency_type: String,
}
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct QueryFile {
pub id: FileId,
pub id: DBFileId,
pub url: String,
pub filename: String,
pub hashes: HashMap<String, String>,
@ -959,9 +959,9 @@ pub struct QueryFile {
#[derive(Clone, Deserialize, Serialize)]
pub struct SingleFile {
pub id: FileId,
pub version_id: VersionId,
pub project_id: ProjectId,
pub id: DBFileId,
pub version_id: DBVersionId,
pub project_id: DBProjectId,
pub url: String,
pub filename: String,
pub hashes: HashMap<String, String>,
@ -1037,11 +1037,11 @@ mod tests {
date_published: DateTime<Utc>,
) -> Version {
Version {
id: VersionId(id),
id: DBVersionId(id),
ordering,
date_published,
project_id: ProjectId(0),
author_id: UserId(0),
project_id: DBProjectId(0),
author_id: DBUserId(0),
name: Default::default(),
version_number: Default::default(),
changelog: Default::default(),

View File

@ -1,14 +1,14 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use crate::models::ids::{ThreadMessageId, VersionId};
use crate::models::{
ids::{
NotificationId, OrganizationId, ProjectId, ReportId, TeamId, ThreadId,
ThreadMessageId, UserId, VersionId,
},
notifications::{Notification, NotificationAction, NotificationBody},
projects::ProjectStatus,
};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct LegacyNotification {

View File

@ -3,17 +3,15 @@ use std::convert::TryFrom;
use std::collections::HashMap;
use super::super::ids::OrganizationId;
use super::super::teams::TeamId;
use super::super::users::UserId;
use crate::database::models::{DatabaseError, version_item};
use crate::database::redis::RedisPool;
use crate::models::ids::{ProjectId, VersionId};
use crate::models::ids::{ProjectId, TeamId, ThreadId, VersionId};
use crate::models::projects::{
Dependency, License, Link, Loader, ModeratorMessage, MonetizationStatus,
Project, ProjectStatus, Version, VersionFile, VersionStatus, VersionType,
};
use crate::models::threads::ThreadId;
use crate::routes::v2_reroute::{self, capitalize_first};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use itertools::Itertools;
use serde::{Deserialize, Serialize};

View File

@ -1,5 +1,6 @@
use crate::models::ids::{ReportId, ThreadId, UserId};
use crate::models::ids::{ReportId, ThreadId};
use crate::models::reports::{ItemType, Report};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};

View File

@ -2,7 +2,8 @@ use crate::models::ids::{
ImageId, ProjectId, ReportId, ThreadId, ThreadMessageId,
};
use crate::models::projects::ProjectStatus;
use crate::models::users::{User, UserId};
use crate::models::users::User;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};

View File

@ -1,10 +1,8 @@
use crate::{
auth::AuthProvider,
models::{
ids::UserId,
users::{Badges, Role, UserPayoutData},
},
models::users::{Badges, Role, UserPayoutData},
};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};

View File

@ -1,14 +1,11 @@
use crate::models::ids::Base62Id;
use crate::models::ids::UserId;
use crate::models::ids::{
ChargeId, ProductId, ProductPriceId, UserSubscriptionId,
};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ProductId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Product {
pub id: ProductId,
@ -29,11 +26,6 @@ pub enum ProductMetadata {
},
}
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ProductPriceId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct ProductPrice {
pub id: ProductPriceId,
@ -87,11 +79,6 @@ impl PriceDuration {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct UserSubscriptionId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct UserSubscription {
pub id: UserSubscriptionId,
@ -151,11 +138,6 @@ pub enum SubscriptionMetadata {
Pyro { id: String },
}
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ChargeId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Charge {
pub id: ChargeId,

View File

@ -1,17 +1,10 @@
use super::{
ids::{Base62Id, ProjectId},
users::UserId,
};
use super::ids::ProjectId;
use crate::database;
use crate::models::ids::CollectionId;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// The ID of a specific collection, encoded as base62 for usage in the API
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct CollectionId(pub u64);
/// A collection returned from the API
#[derive(Serialize, Deserialize, Clone)]
pub struct Collection {

View File

@ -1,41 +1,25 @@
pub use super::collections::CollectionId;
pub use super::images::ImageId;
pub use super::notifications::NotificationId;
pub use super::oauth_clients::OAuthClientAuthorizationId;
pub use super::oauth_clients::{OAuthClientId, OAuthRedirectUriId};
pub use super::organizations::OrganizationId;
pub use super::pats::PatId;
pub use super::payouts::PayoutId;
pub use super::projects::{ProjectId, VersionId};
pub use super::reports::ReportId;
pub use super::sessions::SessionId;
pub use super::teams::TeamId;
pub use super::threads::ThreadId;
pub use super::threads::ThreadMessageId;
pub use crate::models::billing::{
ChargeId, ProductId, ProductPriceId, UserSubscriptionId,
};
pub use ariadne::ids::Base62Id;
use ariadne::ids::base62_id_impl;
pub use ariadne::users::UserId;
use ariadne::ids::base62_id;
base62_id_impl!(ProjectId, ProjectId);
base62_id_impl!(VersionId, VersionId);
base62_id_impl!(CollectionId, CollectionId);
base62_id_impl!(TeamId, TeamId);
base62_id_impl!(OrganizationId, OrganizationId);
base62_id_impl!(ReportId, ReportId);
base62_id_impl!(NotificationId, NotificationId);
base62_id_impl!(ThreadId, ThreadId);
base62_id_impl!(ThreadMessageId, ThreadMessageId);
base62_id_impl!(SessionId, SessionId);
base62_id_impl!(PatId, PatId);
base62_id_impl!(ImageId, ImageId);
base62_id_impl!(OAuthClientId, OAuthClientId);
base62_id_impl!(OAuthRedirectUriId, OAuthRedirectUriId);
base62_id_impl!(OAuthClientAuthorizationId, OAuthClientAuthorizationId);
base62_id_impl!(PayoutId, PayoutId);
base62_id_impl!(ProductId, ProductId);
base62_id_impl!(ProductPriceId, ProductPriceId);
base62_id_impl!(UserSubscriptionId, UserSubscriptionId);
base62_id_impl!(ChargeId, ChargeId);
base62_id!(ChargeId);
base62_id!(CollectionId);
base62_id!(FileId);
base62_id!(ImageId);
base62_id!(NotificationId);
base62_id!(OAuthAccessTokenId);
base62_id!(OAuthClientAuthorizationId);
base62_id!(OAuthClientId);
base62_id!(OAuthRedirectUriId);
base62_id!(OrganizationId);
base62_id!(PatId);
base62_id!(PayoutId);
base62_id!(ProductId);
base62_id!(ProductPriceId);
base62_id!(ProjectId);
base62_id!(ReportId);
base62_id!(SessionId);
base62_id!(TeamId);
base62_id!(TeamMemberId);
base62_id!(ThreadId);
base62_id!(ThreadMessageId);
base62_id!(UserSubscriptionId);
base62_id!(VersionId);

View File

@ -1,18 +1,13 @@
use super::{
ids::{Base62Id, ProjectId, ThreadMessageId, VersionId},
ids::{ProjectId, ThreadMessageId, VersionId},
pats::Scopes,
reports::ReportId,
users::UserId,
};
use crate::database::models::image_item::Image as DBImage;
use crate::models::ids::{ImageId, ReportId};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ImageId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Image {
pub id: ImageId,

View File

@ -1,20 +1,15 @@
use super::ids::Base62Id;
use super::ids::OrganizationId;
use super::users::UserId;
use crate::database::models::notification_item::Notification as DBNotification;
use crate::database::models::notification_item::NotificationAction as DBNotificationAction;
use crate::models::ids::{
ProjectId, ReportId, TeamId, ThreadId, ThreadMessageId, VersionId,
NotificationId, ProjectId, ReportId, TeamId, ThreadId, ThreadMessageId,
VersionId,
};
use crate::models::projects::ProjectStatus;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct NotificationId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Notification {
pub id: NotificationId,

View File

@ -1,29 +1,14 @@
use super::{
ids::{Base62Id, UserId},
pats::Scopes,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use super::pats::Scopes;
use crate::database::models::oauth_client_authorization_item::OAuthClientAuthorization as DBOAuthClientAuthorization;
use crate::database::models::oauth_client_item::OAuthClient as DBOAuthClient;
use crate::database::models::oauth_client_item::OAuthRedirectUri as DBOAuthRedirectUri;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct OAuthClientId(pub u64);
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct OAuthClientAuthorizationId(pub u64);
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct OAuthRedirectUriId(pub u64);
use crate::models::ids::{
OAuthClientAuthorizationId, OAuthClientId, OAuthRedirectUriId,
};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
#[derive(Deserialize, Serialize)]
pub struct OAuthRedirectUri {

View File

@ -1,15 +1,7 @@
use super::{
ids::{Base62Id, TeamId},
teams::TeamMember,
};
use super::teams::TeamMember;
use crate::models::ids::{OrganizationId, TeamId};
use serde::{Deserialize, Serialize};
/// The ID of a team
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct OrganizationId(pub u64);
/// An organization of users who control a project
#[derive(Serialize, Deserialize)]
pub struct Organization {

View File

@ -1,15 +1,9 @@
use super::ids::Base62Id;
use crate::bitflags_serde_impl;
use crate::models::ids::UserId;
use crate::models::ids::PatId;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// The ID of a team
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct PatId(pub u64);
bitflags::bitflags! {
#[derive(Copy, Clone, Debug)]
pub struct Scopes: u64 {

View File

@ -1,13 +1,9 @@
use crate::models::ids::{Base62Id, UserId};
use crate::models::ids::PayoutId;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use rust_decimal::Decimal;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct PayoutId(pub u64);
#[derive(Serialize, Deserialize, Clone)]
pub struct Payout {
pub id: PayoutId,

View File

@ -1,28 +1,16 @@
use std::collections::{HashMap, HashSet};
use super::ids::{Base62Id, OrganizationId};
use super::teams::TeamId;
use super::users::UserId;
use crate::database::models::loader_fields::VersionField;
use crate::database::models::project_item::{LinkUrl, QueryProject};
use crate::database::models::version_item::QueryVersion;
use crate::models::threads::ThreadId;
use crate::models::ids::{
OrganizationId, ProjectId, TeamId, ThreadId, VersionId,
};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use validator::Validate;
/// The ID of a specific project, encoded as base62 for usage in the API
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ProjectId(pub u64);
/// The ID of a specific version of a project
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct VersionId(pub u64);
/// A project returned from the API
#[derive(Serialize, Deserialize, Clone)]
pub struct Project {

View File

@ -1,14 +1,9 @@
use super::ids::Base62Id;
use crate::database::models::report_item::QueryReport as DBReport;
use crate::models::ids::{ProjectId, ThreadId, UserId, VersionId};
use crate::models::ids::{ProjectId, ReportId, ThreadId, VersionId};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ReportId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Report {
pub id: ReportId,

View File

@ -1,13 +1,8 @@
use super::ids::Base62Id;
use crate::models::users::UserId;
use crate::models::ids::SessionId;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct SessionId(pub u64);
#[derive(Serialize, Deserialize, Clone)]
pub struct Session {
pub id: SessionId,

View File

@ -1,15 +1,9 @@
use super::ids::Base62Id;
use crate::bitflags_serde_impl;
use crate::models::ids::TeamId;
use crate::models::users::User;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
/// The ID of a team
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct TeamId(pub u64);
pub const DEFAULT_ROLE: &str = "Member";
/// A team of users who control a project

View File

@ -1,20 +1,12 @@
use super::ids::{Base62Id, ImageId};
use crate::models::ids::{ProjectId, ReportId};
use crate::models::ids::{
ImageId, ProjectId, ReportId, ThreadId, ThreadMessageId,
};
use crate::models::projects::ProjectStatus;
use crate::models::users::{User, UserId};
use crate::models::users::User;
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ThreadId(pub u64);
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ThreadMessageId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Thread {
pub id: ThreadId,

View File

@ -1,5 +1,6 @@
use crate::{auth::AuthProvider, bitflags_serde_impl};
pub use ariadne::users::{UserId, UserStatus};
use ariadne::ids::UserId;
pub use ariadne::users::UserStatus;
use chrono::{DateTime, Utc};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};

View File

@ -622,12 +622,12 @@ impl AutomatedModerationQueue {
if !mod_messages.is_empty() {
let first_time = database::models::Thread::get(project.thread_id, &pool).await?
.map(|x| x.messages.iter().all(|x| x.author_id == Some(database::models::UserId(AUTOMOD_ID)) || x.hide_identity))
.map(|x| x.messages.iter().all(|x| x.author_id == Some(database::models::DBUserId(AUTOMOD_ID)) || x.hide_identity))
.unwrap_or(true);
let mut transaction = pool.begin().await?;
let id = ThreadMessageBuilder {
author_id: Some(database::models::UserId(AUTOMOD_ID)),
author_id: Some(database::models::DBUserId(AUTOMOD_ID)),
body: MessageBody::Text {
body: mod_messages.markdown(true),
private: false,
@ -649,7 +649,7 @@ impl AutomatedModerationQueue {
if mod_messages.should_reject(first_time) {
ThreadMessageBuilder {
author_id: Some(database::models::UserId(AUTOMOD_ID)),
author_id: Some(database::models::DBUserId(AUTOMOD_ID)),
body: MessageBody::StatusChange {
new_status: ProjectStatus::Rejected,
old_status: project.inner.status,
@ -740,7 +740,7 @@ impl AutomatedModerationQueue {
let mut transaction = pool.begin().await?;
ThreadMessageBuilder {
author_id: Some(database::models::UserId(AUTOMOD_ID)),
author_id: Some(database::models::DBUserId(AUTOMOD_ID)),
body: MessageBody::Text {
body: str,
private: true,

View File

@ -1,7 +1,7 @@
use crate::database::models::pat_item::PersonalAccessToken;
use crate::database::models::session_item::Session;
use crate::database::models::{
DatabaseError, OAuthAccessTokenId, PatId, SessionId, UserId,
DBOAuthAccessTokenId, DBPatId, DBSessionId, DBUserId, DatabaseError,
};
use crate::database::redis::RedisPool;
use crate::routes::internal::session::SessionMetadata;
@ -12,9 +12,9 @@ use std::collections::{HashMap, HashSet};
use tokio::sync::Mutex;
pub struct AuthQueue {
session_queue: Mutex<HashMap<SessionId, SessionMetadata>>,
pat_queue: Mutex<HashSet<PatId>>,
oauth_access_token_queue: Mutex<HashSet<OAuthAccessTokenId>>,
session_queue: Mutex<HashMap<DBSessionId, SessionMetadata>>,
pat_queue: Mutex<HashSet<DBPatId>>,
oauth_access_token_queue: Mutex<HashSet<DBOAuthAccessTokenId>>,
}
impl Default for AuthQueue {
@ -32,22 +32,26 @@ impl AuthQueue {
oauth_access_token_queue: Mutex::new(HashSet::with_capacity(1000)),
}
}
pub async fn add_session(&self, id: SessionId, metadata: SessionMetadata) {
pub async fn add_session(
&self,
id: DBSessionId,
metadata: SessionMetadata,
) {
self.session_queue.lock().await.insert(id, metadata);
}
pub async fn add_pat(&self, id: PatId) {
pub async fn add_pat(&self, id: DBPatId) {
self.pat_queue.lock().await.insert(id);
}
pub async fn add_oauth_access_token(
&self,
id: crate::database::models::OAuthAccessTokenId,
id: crate::database::models::DBOAuthAccessTokenId,
) {
self.oauth_access_token_queue.lock().await.insert(id);
}
pub async fn take_sessions(&self) -> HashMap<SessionId, SessionMetadata> {
pub async fn take_sessions(&self) -> HashMap<DBSessionId, SessionMetadata> {
let mut queue = self.session_queue.lock().await;
let len = queue.len();
@ -87,7 +91,7 @@ impl AuthQueue {
SET last_login = $2, city = $3, country = $4, ip = $5, os = $6, platform = $7, user_agent = $8
WHERE (id = $1)
",
id as SessionId,
id as DBSessionId,
Utc::now(),
metadata.city,
metadata.country,
@ -109,8 +113,8 @@ impl AuthQueue {
"
)
.fetch(&mut *transaction)
.map_ok(|x| (SessionId(x.id), x.session, UserId(x.user_id)))
.try_collect::<Vec<(SessionId, String, UserId)>>()
.map_ok(|x| (DBSessionId(x.id), x.session, DBUserId(x.user_id)))
.try_collect::<Vec<(DBSessionId, String, DBUserId)>>()
.await?;
for (id, session, user_id) in expired_ids {
@ -157,7 +161,7 @@ impl AuthQueue {
}
async fn update_oauth_access_token_last_used(
oauth_access_token_queue: HashSet<OAuthAccessTokenId>,
oauth_access_token_queue: HashSet<DBOAuthAccessTokenId>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let ids = oauth_access_token_queue.iter().map(|id| id.0).collect_vec();

View File

@ -1,7 +1,8 @@
//! "Database" for Hydra
use crate::models::users::{UserId, UserStatus};
use crate::models::users::UserStatus;
use actix_ws::Session;
use ariadne::ids::UserId;
use dashmap::{DashMap, DashSet};
use std::sync::atomic::AtomicU32;
use uuid::Uuid;

View File

@ -71,7 +71,7 @@ pub async fn count_download(
.ok()
.flatten();
let project_id: crate::database::models::ids::ProjectId =
let project_id: crate::database::models::ids::DBProjectId =
download_body.project_id.into();
let id_option =
@ -97,7 +97,7 @@ pub async fn count_download(
WHERE ((version_number = $1 OR id = $3) AND mod_id = $2)
",
download_body.version_name,
project_id as crate::database::models::ids::ProjectId,
project_id as crate::database::models::ids::DBProjectId,
id_option
)
.fetch_optional(pool.as_ref())
@ -255,7 +255,7 @@ pub async fn delphi_result_ingest(
let mut transaction = pool.begin().await?;
ThreadMessageBuilder {
author_id: Some(crate::database::models::UserId(AUTOMOD_ID)),
author_id: Some(crate::database::models::DBUserId(AUTOMOD_ID)),
body: MessageBody::Text {
body: thread_header,
private: true,

View File

@ -85,7 +85,7 @@ pub async fn products(
#[derive(Deserialize)]
struct SubscriptionsQuery {
pub user_id: Option<crate::models::ids::UserId>,
pub user_id: Option<ariadne::ids::UserId>,
}
#[get("subscriptions")]
@ -600,7 +600,7 @@ pub async fn user_customer(
#[derive(Deserialize)]
pub struct ChargesQuery {
pub user_id: Option<crate::models::ids::UserId>,
pub user_id: Option<ariadne::ids::UserId>,
}
#[get("payments")]
@ -944,7 +944,7 @@ pub async fn active_servers(
#[derive(Serialize)]
struct ActiveServer {
pub user_id: crate::models::ids::UserId,
pub user_id: ariadne::ids::UserId,
pub server_id: String,
pub price_id: crate::models::ids::ProductPriceId,
pub interval: PriceDuration,
@ -1439,7 +1439,7 @@ pub async fn stripe_webhook(
let user_id = if let Some(user_id) = metadata
.get("modrinth_user_id")
.and_then(|x| parse_base62(x).ok())
.map(|x| crate::database::models::ids::UserId(x as i64))
.map(|x| crate::database::models::ids::DBUserId(x as i64))
{
user_id
} else {
@ -1464,7 +1464,7 @@ pub async fn stripe_webhook(
let charge_id = if let Some(charge_id) = metadata
.get("modrinth_charge_id")
.and_then(|x| parse_base62(x).ok())
.map(|x| crate::database::models::ids::ChargeId(x as i64))
.map(|x| crate::database::models::ids::DBChargeId(x as i64))
{
charge_id
} else {
@ -1557,7 +1557,7 @@ pub async fn stripe_webhook(
.get("modrinth_price_id")
.and_then(|x| parse_base62(x).ok())
.map(|x| {
crate::database::models::ids::ProductPriceId(
crate::database::models::ids::DBProductPriceId(
x as i64,
)
}) {
@ -1601,7 +1601,7 @@ pub async fn stripe_webhook(
.get("modrinth_subscription_id")
.and_then(|x| parse_base62(x).ok())
.map(|x| {
crate::database::models::ids::UserSubscriptionId(x as i64)
crate::database::models::ids::DBUserSubscriptionId(x as i64)
}) {
subscription_id
} else {
@ -1736,7 +1736,7 @@ pub async fn stripe_webhook(
",
badges.bits() as i64,
metadata.user_item.id
as crate::database::models::ids::UserId,
as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -2061,7 +2061,7 @@ pub async fn stripe_webhook(
}
async fn get_or_create_customer(
user_id: crate::models::ids::UserId,
user_id: ariadne::ids::UserId,
stripe_customer_id: Option<&str>,
user_email: Option<&str>,
client: &stripe::Client,
@ -2212,7 +2212,7 @@ pub async fn index_subscriptions(pool: PgPool, redis: RedisPool) {
WHERE (id = $2)
",
badges.bits() as i64,
user.id as crate::database::models::ids::UserId,
user.id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;

View File

@ -74,7 +74,7 @@ impl TempUser {
client: &PgPool,
file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &RedisPool,
) -> Result<crate::database::models::UserId, AuthenticationError> {
) -> Result<crate::database::models::DBUserId, AuthenticationError> {
if let Some(email) = &self.email {
if crate::database::models::User::get_email(email, client)
.await?
@ -115,47 +115,45 @@ impl TempUser {
}
}
let (avatar_url, raw_avatar_url) =
if let Some(avatar_url) = self.avatar_url {
let res = reqwest::get(&avatar_url).await?;
let headers = res.headers().clone();
let (avatar_url, raw_avatar_url) = if let Some(avatar_url) =
self.avatar_url
{
let res = reqwest::get(&avatar_url).await?;
let headers = res.headers().clone();
let img_data = if let Some(content_type) = headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
{
get_image_ext(content_type)
} else {
avatar_url.rsplit('.').next()
};
let img_data = if let Some(content_type) = headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
{
get_image_ext(content_type)
} else {
avatar_url.rsplit('.').next()
};
if let Some(ext) = img_data {
let bytes = res.bytes().await?;
if let Some(ext) = img_data {
let bytes = res.bytes().await?;
let upload_result = upload_image_optimized(
&format!(
"user/{}",
crate::models::users::UserId::from(user_id)
),
bytes,
ext,
Some(96),
Some(1.0),
&**file_host,
)
.await;
let upload_result = upload_image_optimized(
&format!("user/{}", ariadne::ids::UserId::from(user_id)),
bytes,
ext,
Some(96),
Some(1.0),
&**file_host,
)
.await;
if let Ok(upload_result) = upload_result {
(Some(upload_result.url), Some(upload_result.raw_url))
} else {
(None, None)
}
if let Ok(upload_result) = upload_result {
(Some(upload_result.url), Some(upload_result.raw_url))
} else {
(None, None)
}
} else {
(None, None)
};
}
} else {
(None, None)
};
if let Some(username) = username {
crate::database::models::User {
@ -823,7 +821,7 @@ impl AuthProvider {
&self,
id: &str,
executor: E,
) -> Result<Option<crate::database::models::UserId>, AuthenticationError>
) -> Result<Option<crate::database::models::DBUserId>, AuthenticationError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@ -837,7 +835,7 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
AuthProvider::Discord => {
let value = sqlx::query!(
@ -848,7 +846,7 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
AuthProvider::Microsoft => {
let value = sqlx::query!(
@ -858,7 +856,7 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
AuthProvider::GitLab => {
let value = sqlx::query!(
@ -869,7 +867,7 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
AuthProvider::Google => {
let value = sqlx::query!(
@ -879,7 +877,7 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
AuthProvider::Steam => {
let value = sqlx::query!(
@ -890,7 +888,7 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
AuthProvider::PayPal => {
let value = sqlx::query!(
@ -900,14 +898,14 @@ impl AuthProvider {
.fetch_optional(executor)
.await?;
value.map(|x| crate::database::models::UserId(x.id))
value.map(|x| crate::database::models::DBUserId(x.id))
}
})
}
pub async fn update_user_id(
&self,
user_id: crate::database::models::UserId,
user_id: crate::database::models::DBUserId,
id: Option<&str>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), AuthenticationError> {
@ -919,7 +917,7 @@ impl AuthProvider {
SET github_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id.and_then(|x| x.parse::<i64>().ok())
)
.execute(&mut **transaction)
@ -932,7 +930,7 @@ impl AuthProvider {
SET discord_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id.and_then(|x| x.parse::<i64>().ok())
)
.execute(&mut **transaction)
@ -945,7 +943,7 @@ impl AuthProvider {
SET microsoft_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id,
)
.execute(&mut **transaction)
@ -958,7 +956,7 @@ impl AuthProvider {
SET gitlab_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id.and_then(|x| x.parse::<i64>().ok())
)
.execute(&mut **transaction)
@ -971,7 +969,7 @@ impl AuthProvider {
SET google_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id,
)
.execute(&mut **transaction)
@ -984,7 +982,7 @@ impl AuthProvider {
SET steam_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id.and_then(|x| x.parse::<i64>().ok())
)
.execute(&mut **transaction)
@ -998,7 +996,7 @@ impl AuthProvider {
SET paypal_country = NULL, paypal_email = NULL, paypal_id = NULL
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
)
.execute(&mut **transaction)
.await?;
@ -1009,7 +1007,7 @@ impl AuthProvider {
SET paypal_id = $2
WHERE (id = $1)
",
user_id as crate::database::models::UserId,
user_id as crate::database::models::DBUserId,
id,
)
.execute(&mut **transaction)
@ -1152,7 +1150,7 @@ pub async fn auth_callback(
oauth_user.country,
oauth_user.email,
oauth_user.id,
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -1527,7 +1525,7 @@ async fn validate_2fa_code(
input: String,
secret: String,
allow_backup: bool,
user_id: crate::database::models::UserId,
user_id: crate::database::models::DBUserId,
redis: &RedisPool,
pool: &PgPool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
@ -1583,7 +1581,7 @@ async fn validate_2fa_code(
DELETE FROM user_backup_codes
WHERE user_id = $1 AND code = $2
",
user_id as crate::database::models::ids::UserId,
user_id as crate::database::models::ids::DBUserId,
code as i64,
)
.execute(&mut **transaction)
@ -1746,7 +1744,7 @@ pub async fn finish_2fa_flow(
WHERE (id = $2)
",
secret,
user_id as crate::database::models::ids::UserId,
user_id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -1756,7 +1754,7 @@ pub async fn finish_2fa_flow(
DELETE FROM user_backup_codes
WHERE user_id = $1
",
user_id as crate::database::models::ids::UserId,
user_id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -1776,7 +1774,7 @@ pub async fn finish_2fa_flow(
$1, $2
)
",
user_id as crate::database::models::ids::UserId,
user_id as crate::database::models::ids::DBUserId,
val as i64,
)
.execute(&mut *transaction)
@ -1869,7 +1867,7 @@ pub async fn remove_2fa(
SET totp_secret = NULL
WHERE (id = $1)
",
user.id as crate::database::models::ids::UserId,
user.id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -1879,7 +1877,7 @@ pub async fn remove_2fa(
DELETE FROM user_backup_codes
WHERE user_id = $1
",
user.id as crate::database::models::ids::UserId,
user.id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -2081,7 +2079,7 @@ pub async fn change_password(
WHERE (id = $2)
",
update_password,
user.id as crate::database::models::ids::UserId,
user.id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -2291,7 +2289,7 @@ pub async fn verify_email(
SET email_verified = TRUE
WHERE (id = $1)
",
user.id as crate::database::models::ids::UserId,
user.id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;

View File

@ -143,7 +143,7 @@ pub async fn export(
.fetch_all(pool.as_ref())
.await?
.into_iter()
.map(|x| crate::database::models::ids::ThreadMessageId(x.id))
.map(|x| crate::database::models::ids::DBThreadMessageId(x.id))
.collect::<Vec<_>>();
let messages =
@ -163,7 +163,7 @@ pub async fn export(
.fetch_all(pool.as_ref())
.await?
.into_iter()
.map(|x| crate::database::models::ids::ImageId(x.id))
.map(|x| crate::database::models::ids::DBImageId(x.id))
.collect::<Vec<_>>();
let uploaded_images = crate::database::models::image_item::Image::get_many(

View File

@ -56,8 +56,8 @@ pub async fn get_projects(
count.count as i64
)
.fetch(&**pool)
.map_ok(|m| database::models::ProjectId(m.id))
.try_collect::<Vec<database::models::ProjectId>>()
.map_ok(|m| database::models::DBProjectId(m.id))
.try_collect::<Vec<database::models::DBProjectId>>()
.await?;
let projects: Vec<_> =

View File

@ -1,5 +1,5 @@
use crate::auth::{AuthenticationError, get_user_from_headers};
use crate::database::models::UserId;
use crate::database::models::DBUserId;
use crate::database::models::session_item::Session as DBSession;
use crate::database::models::session_item::SessionBuilder;
use crate::database::redis::RedisPool;
@ -85,7 +85,7 @@ pub async fn get_session_metadata(
pub async fn issue_session(
req: HttpRequest,
user_id: UserId,
user_id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<DBSession, AuthenticationError> {

View File

@ -4,8 +4,8 @@ use crate::database::models::loader_fields::Loader;
use crate::database::models::project_item::QueryProject;
use crate::database::models::version_item::{QueryFile, QueryVersion};
use crate::database::redis::RedisPool;
use crate::models::ids::{ProjectId, VersionId};
use crate::models::pats::Scopes;
use crate::models::projects::{ProjectId, VersionId};
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::{auth::get_user_from_headers, database};
@ -103,7 +103,7 @@ pub async fn maven_metadata(
WHERE mod_id = $1 AND status = ANY($2)
ORDER BY ordering ASC NULLS LAST, date_published ASC
",
project.inner.id as database::models::ids::ProjectId,
project.inner.id as database::models::ids::DBProjectId,
&*crate::models::projects::VersionStatus::iterator()
.filter(|x| x.is_listed())
.map(|x| x.to_string())

View File

@ -126,7 +126,7 @@ pub async fn report_get(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
info: web::Path<(crate::models::ids::ReportId,)>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response =
@ -156,7 +156,7 @@ pub async fn report_edit(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
info: web::Path<(crate::models::ids::ReportId,)>,
session_queue: web::Data<AuthQueue>,
edit_report: web::Json<EditReport>,
) -> Result<HttpResponse, ApiError> {
@ -181,7 +181,7 @@ pub async fn report_edit(
pub async fn report_delete(
req: HttpRequest,
pool: web::Data<PgPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
info: web::Path<(crate::models::ids::ReportId,)>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {

View File

@ -1,12 +1,13 @@
use crate::database::redis::RedisPool;
use crate::models::ids::TeamId;
use crate::models::teams::{
OrganizationPermissions, ProjectPermissions, TeamId, TeamMember,
OrganizationPermissions, ProjectPermissions, TeamMember,
};
use crate::models::users::UserId;
use crate::models::v2::teams::LegacyTeamMember;
use crate::queue::session::AuthQueue;
use crate::routes::{ApiError, v2_reroute, v3};
use actix_web::{HttpRequest, HttpResponse, delete, get, patch, post, web};
use ariadne::ids::UserId;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;

View File

@ -2,8 +2,8 @@ use std::sync::Arc;
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::ThreadMessageId;
use crate::models::threads::{MessageBody, Thread, ThreadId};
use crate::models::ids::{ThreadId, ThreadMessageId};
use crate::models::threads::{MessageBody, Thread};
use crate::models::v2::threads::LegacyThread;
use crate::queue::session::AuthQueue;
use crate::routes::{ApiError, v2_reroute, v3};

View File

@ -2,10 +2,9 @@ use crate::database::models::loader_fields::VersionField;
use crate::database::models::{project_item, version_item};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::ImageId;
use crate::models::ids::{ImageId, ProjectId, VersionId};
use crate::models::projects::{
Dependency, FileType, Loader, ProjectId, Version, VersionId, VersionStatus,
VersionType,
Dependency, FileType, Loader, Version, VersionStatus, VersionType,
};
use crate::models::v2::projects::LegacyVersion;
use crate::queue::moderation::AutomatedModerationQueue;

View File

@ -599,7 +599,7 @@ async fn filter_allowed_ids(
let team_ids = projects_data
.iter()
.map(|x| x.inner.team_id)
.collect::<Vec<database::models::TeamId>>();
.collect::<Vec<database::models::DBTeamId>>();
let team_members =
database::models::TeamMember::get_from_team_full_many(
&team_ids, &***pool, redis,
@ -609,7 +609,7 @@ async fn filter_allowed_ids(
let organization_ids = projects_data
.iter()
.filter_map(|x| x.inner.organization_id)
.collect::<Vec<database::models::OrganizationId>>();
.collect::<Vec<database::models::DBOrganizationId>>();
let organizations = database::models::Organization::get_many_ids(
&organization_ids,
&***pool,
@ -620,7 +620,7 @@ async fn filter_allowed_ids(
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<database::models::TeamId>>();
.collect::<Vec<database::models::DBTeamId>>();
let organization_team_members =
database::models::TeamMember::get_from_team_full_many(
&organization_team_ids,

View File

@ -144,7 +144,7 @@ pub async fn collections_get(
let ids = ids
.into_iter()
.map(|x| {
parse_base62(x).map(|x| database::models::CollectionId(x as i64))
parse_base62(x).map(|x| database::models::DBCollectionId(x as i64))
})
.collect::<Result<Vec<_>, _>>()?;
@ -177,7 +177,7 @@ pub async fn collection_get(
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let id = database::models::DBCollectionId(parse_base62(&string)? as i64);
let collection_data =
database::models::Collection::get(id, &**pool, &redis).await?;
let user_option = get_user_from_headers(
@ -241,7 +241,7 @@ pub async fn collection_edit(
})?;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let id = database::models::DBCollectionId(parse_base62(&string)? as i64);
let result = database::models::Collection::get(id, &**pool, &redis).await?;
if let Some(collection_item) = result {
@ -261,7 +261,7 @@ pub async fn collection_edit(
WHERE (id = $2)
",
name.trim(),
id as database::models::ids::CollectionId,
id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -275,7 +275,7 @@ pub async fn collection_edit(
WHERE (id = $2)
",
description.as_ref(),
id as database::models::ids::CollectionId,
id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -298,7 +298,7 @@ pub async fn collection_edit(
WHERE (id = $2)
",
status.to_string(),
id as database::models::ids::CollectionId,
id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -311,7 +311,7 @@ pub async fn collection_edit(
DELETE FROM collections_mods
WHERE collection_id = $1
",
collection_item.id as database::models::ids::CollectionId,
collection_item.id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -352,7 +352,7 @@ pub async fn collection_edit(
SET updated = NOW()
WHERE id = $1
",
collection_item.id as database::models::ids::CollectionId,
collection_item.id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -395,7 +395,7 @@ pub async fn collection_icon_edit(
.1;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let id = database::models::DBCollectionId(parse_base62(&string)? as i64);
let collection_item =
database::models::Collection::get(id, &**pool, &redis)
.await?
@ -445,7 +445,7 @@ pub async fn collection_icon_edit(
upload_result.url,
upload_result.raw_url,
upload_result.color.map(|x| x as i32),
collection_item.id as database::models::ids::CollectionId,
collection_item.id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -476,7 +476,7 @@ pub async fn delete_collection_icon(
.1;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let id = database::models::DBCollectionId(parse_base62(&string)? as i64);
let collection_item =
database::models::Collection::get(id, &**pool, &redis)
.await?
@ -503,7 +503,7 @@ pub async fn delete_collection_icon(
SET icon_url = NULL, raw_icon_url = NULL, color = NULL
WHERE (id = $1)
",
collection_item.id as database::models::ids::CollectionId,
collection_item.id as database::models::ids::DBCollectionId,
)
.execute(&mut *transaction)
.await?;
@ -533,7 +533,7 @@ pub async fn collection_delete(
.1;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let id = database::models::DBCollectionId(parse_base62(&string)? as i64);
let collection = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {

View File

@ -1,5 +1,5 @@
use crate::auth::get_user_from_headers;
use crate::database::models::UserId;
use crate::database::models::DBUserId;
use crate::database::redis::RedisPool;
use crate::models::pats::Scopes;
use crate::models::users::UserFriend;
@ -77,8 +77,8 @@ pub async fn add_friend(
.await?;
async fn send_friend_status(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
sockets: &ActiveSockets,
redis: &RedisPool,
) -> Result<(), ApiError> {

View File

@ -9,9 +9,8 @@ use crate::database::models::{
};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::{ThreadMessageId, VersionId};
use crate::models::ids::{ReportId, ThreadMessageId, VersionId};
use crate::models::images::{Image, ImageContext};
use crate::models::reports::ReportId;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::util::img::upload_image_optimized;
@ -205,13 +204,13 @@ pub async fn images_add(
raw_url: upload_result.raw_url,
size: content_length as u64,
created: chrono::Utc::now(),
owner_id: database::models::UserId::from(user.id),
owner_id: database::models::DBUserId::from(user.id),
context: context.context_as_str().to_string(),
project_id: if let ImageContext::Project {
project_id: Some(id),
} = context
{
Some(crate::database::models::ProjectId::from(id))
Some(crate::database::models::DBProjectId::from(id))
} else {
None
},
@ -219,7 +218,7 @@ pub async fn images_add(
version_id: Some(id),
} = context
{
Some(database::models::VersionId::from(id))
Some(database::models::DBVersionId::from(id))
} else {
None
},
@ -227,7 +226,7 @@ pub async fn images_add(
thread_message_id: Some(id),
} = context
{
Some(database::models::ThreadMessageId::from(id))
Some(database::models::DBThreadMessageId::from(id))
} else {
None
},
@ -235,7 +234,7 @@ pub async fn images_add(
report_id: Some(id),
} = context
{
Some(database::models::ReportId::from(id))
Some(database::models::DBReportId::from(id))
} else {
None
},

View File

@ -45,7 +45,7 @@ pub async fn notifications_get(
.await?
.1;
use database::models::NotificationId as DBNotificationId;
use database::models::DBNotificationId;
use database::models::notification_item::Notification as DBNotification;
let notification_ids: Vec<DBNotificationId> =
@ -240,7 +240,7 @@ pub async fn notifications_read(
)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> =
let mut notifications: Vec<database::models::ids::DBNotificationId> =
Vec::new();
for notification in notifications_data {
@ -293,7 +293,7 @@ pub async fn notifications_delete(
)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> =
let mut notifications: Vec<database::models::ids::DBNotificationId> =
Vec::new();
for notification in notifications_data {

View File

@ -5,7 +5,7 @@ use crate::{
auth::{checks::ValidateAuthorized, get_user_from_headers},
database::{
models::{
DatabaseError, OAuthClientId, User, generate_oauth_client_id,
DBOAuthClientId, DatabaseError, User, generate_oauth_client_id,
generate_oauth_redirect_id,
oauth_client_authorization_item::OAuthClientAuthorization,
oauth_client_item::{OAuthClient, OAuthRedirectUri},
@ -39,7 +39,7 @@ use sqlx::PgPool;
use validator::Validate;
use crate::database::models::oauth_client_item::OAuthClient as DBOAuthClient;
use crate::models::ids::OAuthClientId as ApiOAuthClientId;
use crate::models::ids::OAuthClientId;
use crate::util::img::{delete_old_images, upload_image_optimized};
pub fn config(cfg: &mut web::ServiceConfig) {
@ -102,7 +102,7 @@ pub async fn get_user_clients(
#[get("app/{id}")]
pub async fn get_client(
id: web::Path<ApiOAuthClientId>,
id: web::Path<OAuthClientId>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let clients = get_clients_inner(&[id.into_inner()], pool).await?;
@ -121,7 +121,7 @@ pub async fn get_clients(
let ids: Vec<_> = info
.ids
.iter()
.map(|id| parse_base62(id).map(ApiOAuthClientId))
.map(|id| parse_base62(id).map(OAuthClientId))
.collect::<Result<_, _>>()?;
let clients = get_clients_inner(&ids, pool).await?;
@ -218,7 +218,7 @@ pub async fn oauth_client_create(
#[delete("app/{id}")]
pub async fn oauth_client_delete(
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
client_id: web::Path<OAuthClientId>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
@ -274,7 +274,7 @@ pub struct OAuthClientEdit {
#[patch("app/{id}")]
pub async fn oauth_client_edit(
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
client_id: web::Path<OAuthClientId>,
client_updates: web::Json<OAuthClientEdit>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
@ -351,7 +351,7 @@ pub struct Extension {
pub async fn oauth_client_icon_edit(
web::Query(ext): web::Query<Extension>,
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
client_id: web::Path<OAuthClientId>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
@ -419,7 +419,7 @@ pub async fn oauth_client_icon_edit(
#[delete("app/{id}/icon")]
pub async fn oauth_client_icon_delete(
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
client_id: web::Path<OAuthClientId>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
@ -532,7 +532,7 @@ fn generate_oauth_client_secret() -> String {
async fn create_redirect_uris(
uri_strings: impl IntoIterator<Item = impl Display>,
client_id: OAuthClientId,
client_id: DBOAuthClientId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Vec<OAuthRedirectUri>, DatabaseError> {
let mut redirect_uris = vec![];
@ -581,10 +581,10 @@ async fn edit_redirects(
}
pub async fn get_clients_inner(
ids: &[ApiOAuthClientId],
ids: &[OAuthClientId],
pool: web::Data<PgPool>,
) -> Result<Vec<models::oauth_clients::OAuthClient>, ApiError> {
let ids: Vec<OAuthClientId> = ids.iter().map(|i| (*i).into()).collect();
let ids: Vec<DBOAuthClientId> = ids.iter().map(|i| (*i).into()).collect();
let clients = OAuthClient::get_many(&ids, &**pool).await?;
Ok(clients.into_iter().map(|c| c.into()).collect_vec())

View File

@ -9,8 +9,7 @@ use crate::database::models::{
};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::UserId;
use crate::models::organizations::OrganizationId;
use crate::models::ids::OrganizationId;
use crate::models::pats::Scopes;
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
use crate::queue::session::AuthQueue;
@ -20,6 +19,7 @@ use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use crate::{database, models};
use actix_web::{HttpRequest, HttpResponse, web};
use ariadne::ids::UserId;
use ariadne::ids::base62_impl::parse_base62;
use futures::TryStreamExt;
use rust_decimal::Decimal;
@ -77,10 +77,10 @@ pub async fn organization_projects_get(
INNER JOIN mods m ON m.organization_id = o.id
WHERE o.id = $1
",
organization.id as database::models::ids::OrganizationId
organization.id as database::models::ids::DBOrganizationId
)
.fetch(&**pool)
.map_ok(|m| database::models::ProjectId(m.id))
.map_ok(|m| database::models::DBProjectId(m.id))
.try_collect::<Vec<_>>()
.await?;
@ -256,7 +256,7 @@ pub async fn organization_get(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| {
.map(|y: crate::database::models::DBUserId| {
y == x.user_id
})
.unwrap_or(false)
@ -344,7 +344,7 @@ pub async fn organizations_get(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| {
.map(|y: crate::database::models::DBUserId| {
y == x.user_id
})
.unwrap_or(false)
@ -437,7 +437,7 @@ pub async fn organizations_edit(
WHERE (id = $2)
",
description,
id as database::models::ids::OrganizationId,
id as database::models::ids::DBOrganizationId,
)
.execute(&mut *transaction)
.await?;
@ -457,7 +457,7 @@ pub async fn organizations_edit(
WHERE (id = $2)
",
name,
id as database::models::ids::OrganizationId,
id as database::models::ids::DBOrganizationId,
)
.execute(&mut *transaction)
.await?;
@ -519,7 +519,7 @@ pub async fn organizations_edit(
WHERE (id = $2)
",
Some(slug),
id as database::models::ids::OrganizationId,
id as database::models::ids::DBOrganizationId,
)
.execute(&mut *transaction)
.await?;
@ -607,12 +607,12 @@ pub async fn organization_delete(
SELECT user_id FROM team_members
WHERE team_id = $1 AND is_owner = TRUE
",
organization.team_id as database::models::ids::TeamId
organization.team_id as database::models::ids::DBTeamId
)
.fetch_one(&**pool)
.await?
.user_id;
let owner_id = database::models::ids::UserId(owner_id);
let owner_id = database::models::ids::DBUserId(owner_id);
let mut transaction = pool.begin().await?;
@ -626,10 +626,10 @@ pub async fn organization_delete(
INNER JOIN teams t ON t.id = m.team_id
WHERE o.id = $1 AND $1 IS NOT NULL
",
organization.id as database::models::ids::OrganizationId
organization.id as database::models::ids::DBOrganizationId
)
.fetch(&mut *transaction)
.map_ok(|c| database::models::TeamId(c.id))
.map_ok(|c| database::models::DBTeamId(c.id))
.try_collect::<Vec<_>>()
.await?;
@ -777,8 +777,8 @@ pub async fn organization_projects_add(
SET organization_id = $1
WHERE (id = $2)
",
organization.id as database::models::OrganizationId,
project_item.inner.id as database::models::ids::ProjectId
organization.id as database::models::DBOrganizationId,
project_item.inner.id as database::models::ids::DBProjectId
)
.execute(&mut *transaction)
.await?;
@ -794,20 +794,20 @@ pub async fn organization_projects_add(
INNER JOIN users u ON u.id = team_members.user_id
WHERE team_id = $1 AND is_owner = TRUE
",
organization.team_id as database::models::ids::TeamId
organization.team_id as database::models::ids::DBTeamId
)
.fetch_one(&mut *transaction)
.await?;
let organization_owner_user_id =
database::models::ids::UserId(organization_owner_user_id.id);
database::models::ids::DBUserId(organization_owner_user_id.id);
sqlx::query!(
"
DELETE FROM team_members
WHERE team_id = $1 AND (is_owner = TRUE OR user_id = $2)
",
project_item.inner.team_id as database::models::ids::TeamId,
organization_owner_user_id as database::models::ids::UserId,
project_item.inner.team_id as database::models::ids::DBTeamId,
organization_owner_user_id as database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -980,7 +980,7 @@ pub async fn organization_projects_remove(
role = 'Inherited Owner'
WHERE (id = $1)
",
new_owner.id as database::models::ids::TeamMemberId,
new_owner.id as database::models::ids::DBTeamMemberId,
ProjectPermissions::all().bits() as i64
)
.execute(&mut *transaction)
@ -992,7 +992,7 @@ pub async fn organization_projects_remove(
SET organization_id = NULL
WHERE (id = $1)
",
project_item.inner.id as database::models::ids::ProjectId
project_item.inner.id as database::models::ids::DBProjectId
)
.execute(&mut *transaction)
.await?;
@ -1119,7 +1119,7 @@ pub async fn organization_icon_edit(
upload_result.url,
upload_result.raw_url,
upload_result.color.map(|x| x as i32),
organization_item.id as database::models::ids::OrganizationId,
organization_item.id as database::models::ids::DBOrganizationId,
)
.execute(&mut *transaction)
.await?;
@ -1201,7 +1201,7 @@ pub async fn delete_organization_icon(
SET icon_url = NULL, raw_icon_url = NULL, color = NULL
WHERE (id = $1)
",
organization_item.id as database::models::ids::OrganizationId,
organization_item.id as database::models::ids::DBOrganizationId,
)
.execute(&mut *transaction)
.await?;

View File

@ -161,7 +161,10 @@ pub async fn paypal_webhook(
transaction.commit().await?;
crate::database::models::user_item::User::clear_caches(
&[(crate::database::models::UserId(result.user_id), None)],
&[(
crate::database::models::DBUserId(result.user_id),
None,
)],
&redis,
)
.await?;
@ -268,7 +271,10 @@ pub async fn tremendous_webhook(
transaction.commit().await?;
crate::database::models::user_item::User::clear_caches(
&[(crate::database::models::UserId(result.user_id), None)],
&[(
crate::database::models::DBUserId(result.user_id),
None,
)],
&redis,
)
.await?;
@ -788,7 +794,7 @@ pub async fn get_balance(
}
async fn get_user_balance(
user_id: crate::database::models::ids::UserId,
user_id: crate::database::models::ids::DBUserId,
pool: &PgPool,
) -> Result<UserBalance, sqlx::Error> {
let payouts = sqlx::query!(

View File

@ -8,16 +8,14 @@ use crate::database::models::{self, User, image_item};
use crate::database::redis::RedisPool;
use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError;
use crate::models::ids::{ImageId, OrganizationId};
use crate::models::ids::{ImageId, OrganizationId, ProjectId, VersionId};
use crate::models::images::{Image, ImageContext};
use crate::models::pats::Scopes;
use crate::models::projects::{
License, Link, MonetizationStatus, ProjectId, ProjectStatus, VersionId,
VersionStatus,
License, Link, MonetizationStatus, ProjectStatus, VersionStatus,
};
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
use crate::models::threads::ThreadType;
use crate::models::users::UserId;
use crate::queue::session::AuthQueue;
use crate::search::indexing::IndexingError;
use crate::util::img::upload_image_optimized;
@ -27,6 +25,7 @@ use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use ariadne::ids::UserId;
use ariadne::ids::base62_impl::to_base62;
use chrono::Utc;
use futures::stream::StreamExt;
@ -397,13 +396,13 @@ async fn project_create_inner(
serde_json::from_str(&format!("\"{}\"", create_data.slug)).ok();
if let Some(slug_project_id) = slug_project_id_option {
let slug_project_id: models::ids::ProjectId =
let slug_project_id: models::ids::DBProjectId =
slug_project_id.into();
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)
",
slug_project_id as models::ids::ProjectId
slug_project_id as models::ids::DBProjectId
)
.fetch_one(&mut **transaction)
.await
@ -817,7 +816,7 @@ async fn project_create_inner(
SET mod_id = $1
WHERE id = $2
",
id as models::ids::ProjectId,
id as models::ids::DBProjectId,
image_id.0 as i64
)
.execute(&mut **transaction)

View File

@ -11,11 +11,12 @@ use crate::database::redis::RedisPool;
use crate::database::{self, models as db_models};
use crate::file_hosting::FileHost;
use crate::models;
use crate::models::ids::ProjectId;
use crate::models::images::ImageContext;
use crate::models::notifications::NotificationBody;
use crate::models::pats::Scopes;
use crate::models::projects::{
MonetizationStatus, Project, ProjectId, ProjectStatus, SearchRequest,
MonetizationStatus, Project, ProjectStatus, SearchRequest,
};
use crate::models::teams::ProjectPermissions;
use crate::models::threads::MessageBody;
@ -103,7 +104,7 @@ pub async fn random_projects_get(
.collect::<Vec<String>>(),
)
.fetch(&**pool)
.map_ok(|m| db_ids::ProjectId(m.id))
.map_ok(|m| db_ids::DBProjectId(m.id))
.try_collect::<Vec<_>>()
.await?;
@ -303,7 +304,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
name.trim(),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -324,7 +325,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
summary,
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -363,7 +364,7 @@ pub async fn project_edit(
SET moderation_message = NULL, moderation_message_body = NULL, queued = NOW()
WHERE (id = $1)
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -382,7 +383,7 @@ pub async fn project_edit(
SET approved = NOW()
WHERE id = $1 AND approved IS NULL
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -407,7 +408,7 @@ pub async fn project_edit(
SET webhook_sent = TRUE
WHERE id = $1
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -447,10 +448,10 @@ pub async fn project_edit(
FROM team_members tm
WHERE tm.team_id = $1 AND tm.accepted
",
project_item.inner.team_id as db_ids::TeamId
project_item.inner.team_id as db_ids::DBTeamId
)
.fetch(&mut *transaction)
.map_ok(|c| db_models::UserId(c.id))
.map_ok(|c| db_models::DBUserId(c.id))
.try_collect::<Vec<_>>()
.await?;
@ -484,7 +485,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
status.as_str(),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -528,7 +529,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
requested_status.map(|x| x.as_str()),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -541,7 +542,7 @@ pub async fn project_edit(
DELETE FROM mods_categories
WHERE joining_mod_id = $1 AND is_additional = FALSE
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -553,7 +554,7 @@ pub async fn project_edit(
DELETE FROM mods_categories
WHERE joining_mod_id = $1 AND is_additional = TRUE
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -564,7 +565,7 @@ pub async fn project_edit(
edit_project_categories(
categories,
&perms,
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
false,
&mut transaction,
)
@ -575,7 +576,7 @@ pub async fn project_edit(
edit_project_categories(
categories,
&perms,
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
true,
&mut transaction,
)
@ -597,7 +598,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
license_url.as_deref(),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -663,7 +664,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
Some(slug),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -696,7 +697,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
license,
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -720,7 +721,7 @@ pub async fn project_edit(
SELECT id FROM link_platforms WHERE name = ANY($2)
)
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
@ -747,7 +748,7 @@ pub async fn project_edit(
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
VALUES ($1, $2, $3)
",
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
platform_id as db_ids::LinkPlatformId,
url
)
@ -775,7 +776,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
moderation_message.as_deref(),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -801,7 +802,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
moderation_message_body.as_deref(),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -822,7 +823,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
description,
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -856,7 +857,7 @@ pub async fn project_edit(
WHERE (id = $2)
",
monetization_status.as_str(),
id as db_ids::ProjectId,
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -905,7 +906,7 @@ pub async fn project_edit(
pub async fn edit_project_categories(
categories: &Vec<String>,
perms: &ProjectPermissions,
project_id: db_ids::ProjectId,
project_id: db_ids::DBProjectId,
is_additional: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), ApiError> {
@ -1051,7 +1052,7 @@ pub async fn dependency_list(
.iter()
.filter_map(|x| x.0)
.unique()
.collect::<Vec<db_models::VersionId>>();
.collect::<Vec<db_models::DBVersionId>>();
let (projects_result, versions_result) = futures::future::try_join(
database::Project::get_many_ids(&project_ids, &**pool, &redis),
database::Version::get_many(&dep_version_ids, &**pool, &redis),
@ -1133,7 +1134,7 @@ pub async fn projects_edit(
ApiError::Validation(validation_errors_to_string(err, None))
})?;
let project_ids: Vec<db_ids::ProjectId> =
let project_ids: Vec<db_ids::DBProjectId> =
serde_json::from_str::<Vec<ProjectId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
@ -1155,7 +1156,7 @@ pub async fn projects_edit(
let team_ids = projects_data
.iter()
.map(|x| x.inner.team_id)
.collect::<Vec<db_models::TeamId>>();
.collect::<Vec<db_models::DBTeamId>>();
let team_members = db_models::TeamMember::get_from_team_full_many(
&team_ids, &**pool, &redis,
)
@ -1164,7 +1165,7 @@ pub async fn projects_edit(
let organization_ids = projects_data
.iter()
.filter_map(|x| x.inner.organization_id)
.collect::<Vec<db_models::OrganizationId>>();
.collect::<Vec<db_models::DBOrganizationId>>();
let organizations = db_models::Organization::get_many_ids(
&organization_ids,
&**pool,
@ -1175,7 +1176,7 @@ pub async fn projects_edit(
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<db_models::TeamId>>();
.collect::<Vec<db_models::DBTeamId>>();
let organization_team_members =
db_models::TeamMember::get_from_team_full_many(
&organization_team_ids,
@ -1243,7 +1244,7 @@ pub async fn projects_edit(
bulk_edit_project_categories(
&categories,
&project.categories,
project.inner.id as db_ids::ProjectId,
project.inner.id as db_ids::DBProjectId,
CategoryChanges {
categories: &bulk_edit_project.categories,
add_categories: &bulk_edit_project.add_categories,
@ -1258,7 +1259,7 @@ pub async fn projects_edit(
bulk_edit_project_categories(
&categories,
&project.additional_categories,
project.inner.id as db_ids::ProjectId,
project.inner.id as db_ids::DBProjectId,
CategoryChanges {
categories: &bulk_edit_project.additional_categories,
add_categories: &bulk_edit_project.add_additional_categories,
@ -1281,7 +1282,7 @@ pub async fn projects_edit(
SELECT id FROM link_platforms WHERE name = ANY($2)
)
",
project.inner.id as db_ids::ProjectId,
project.inner.id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
@ -1304,7 +1305,7 @@ pub async fn projects_edit(
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
VALUES ($1, $2, $3)
",
project.inner.id as db_ids::ProjectId,
project.inner.id as db_ids::DBProjectId,
platform_id as db_ids::LinkPlatformId,
url
)
@ -1331,7 +1332,7 @@ pub async fn projects_edit(
pub async fn bulk_edit_project_categories(
all_db_categories: &[db_models::categories::Category],
project_categories: &Vec<String>,
project_id: db_ids::ProjectId,
project_id: db_ids::DBProjectId,
bulk_changes: CategoryChanges<'_>,
max_num_categories: usize,
is_additional: bool,
@ -1369,7 +1370,7 @@ pub async fn bulk_edit_project_categories(
DELETE FROM mods_categories
WHERE joining_mod_id = $1 AND is_additional = $2
",
project_id as db_ids::ProjectId,
project_id as db_ids::DBProjectId,
is_additional
)
.execute(&mut **transaction)
@ -1501,7 +1502,7 @@ pub async fn project_icon_edit(
upload_result.url,
upload_result.raw_url,
upload_result.color.map(|x| x as i32),
project_item.inner.id as db_ids::ProjectId,
project_item.inner.id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -1590,7 +1591,7 @@ pub async fn delete_project_icon(
SET icon_url = NULL, raw_icon_url = NULL, color = NULL
WHERE (id = $1)
",
project_item.inner.id as db_ids::ProjectId,
project_item.inner.id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -1727,7 +1728,7 @@ pub async fn add_gallery_item(
SET featured = $2
WHERE mod_id = $1
",
project_item.inner.id as db_ids::ProjectId,
project_item.inner.id as db_ids::DBProjectId,
false,
)
.execute(&mut *transaction)
@ -1822,7 +1823,7 @@ pub async fn edit_gallery_item(
})?;
let project_item = db_models::Project::get_id(
database::models::ProjectId(result.mod_id),
database::models::DBProjectId(result.mod_id),
&**pool,
&redis,
)
@ -1873,7 +1874,7 @@ pub async fn edit_gallery_item(
SET featured = $2
WHERE mod_id = $1
",
project_item.inner.id as db_ids::ProjectId,
project_item.inner.id as db_ids::DBProjectId,
false,
)
.execute(&mut *transaction)
@ -1985,7 +1986,7 @@ pub async fn delete_gallery_item(
})?;
let project_item = db_models::Project::get_id(
database::models::ProjectId(item.mod_id),
database::models::DBProjectId(item.mod_id),
&**pool,
&redis,
)
@ -2131,7 +2132,7 @@ pub async fn project_delete(
DELETE FROM collections_mods
WHERE mod_id = $1
",
project.inner.id as db_ids::ProjectId,
project.inner.id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -2185,8 +2186,8 @@ pub async fn project_follow(
)
})?;
let user_id: db_ids::UserId = user.id.into();
let project_id: db_ids::ProjectId = result.inner.id;
let user_id: db_ids::DBUserId = user.id.into();
let project_id: db_ids::DBProjectId = result.inner.id;
if !is_visible_project(&result.inner, &Some(user), &pool, false).await? {
return Err(ApiError::NotFound);
@ -2196,8 +2197,8 @@ pub async fn project_follow(
"
SELECT EXISTS(SELECT 1 FROM mod_follows mf WHERE mf.follower_id = $1 AND mf.mod_id = $2)
",
user_id as db_ids::UserId,
project_id as db_ids::ProjectId
user_id as db_ids::DBUserId,
project_id as db_ids::DBProjectId
)
.fetch_one(&**pool)
.await?
@ -2213,7 +2214,7 @@ pub async fn project_follow(
SET follows = follows + 1
WHERE id = $1
",
project_id as db_ids::ProjectId,
project_id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -2223,8 +2224,8 @@ pub async fn project_follow(
INSERT INTO mod_follows (follower_id, mod_id)
VALUES ($1, $2)
",
user_id as db_ids::UserId,
project_id as db_ids::ProjectId
user_id as db_ids::DBUserId,
project_id as db_ids::DBProjectId
)
.execute(&mut *transaction)
.await?;
@ -2265,15 +2266,15 @@ pub async fn project_unfollow(
)
})?;
let user_id: db_ids::UserId = user.id.into();
let user_id: db_ids::DBUserId = user.id.into();
let project_id = result.inner.id;
let following = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mod_follows mf WHERE mf.follower_id = $1 AND mf.mod_id = $2)
",
user_id as db_ids::UserId,
project_id as db_ids::ProjectId
user_id as db_ids::DBUserId,
project_id as db_ids::DBProjectId
)
.fetch_one(&**pool)
.await?
@ -2289,7 +2290,7 @@ pub async fn project_unfollow(
SET follows = follows - 1
WHERE id = $1
",
project_id as db_ids::ProjectId,
project_id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -2299,8 +2300,8 @@ pub async fn project_unfollow(
DELETE FROM mod_follows
WHERE follower_id = $1 AND mod_id = $2
",
user_id as db_ids::UserId,
project_id as db_ids::ProjectId
user_id as db_ids::DBUserId,
project_id as db_ids::DBProjectId
)
.execute(&mut *transaction)
.await?;
@ -2384,7 +2385,7 @@ pub async fn project_get_organization(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| {
.map(|y: crate::database::models::DBUserId| {
y == x.user_id
})
.unwrap_or(false)

View File

@ -6,7 +6,7 @@ use crate::database::models::thread_item::{
};
use crate::database::redis::RedisPool;
use crate::models::ids::ImageId;
use crate::models::ids::{ProjectId, UserId, VersionId};
use crate::models::ids::{ProjectId, VersionId};
use crate::models::images::{Image, ImageContext};
use crate::models::pats::Scopes;
use crate::models::reports::{ItemType, Report};
@ -15,6 +15,7 @@ use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::util::img;
use actix_web::{HttpRequest, HttpResponse, web};
use ariadne::ids::UserId;
use ariadne::ids::base62_impl::parse_base62;
use chrono::Utc;
use futures::StreamExt;
@ -271,8 +272,8 @@ pub async fn reports(
count.count as i64
)
.fetch(&**pool)
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
.map_ok(|m| crate::database::models::ids::DBReportId(m.id))
.try_collect::<Vec<crate::database::models::ids::DBReportId>>()
.await?
} else {
sqlx::query!(
@ -286,8 +287,8 @@ pub async fn reports(
count.count as i64
)
.fetch(&**pool)
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
.map_ok(|m| crate::database::models::ids::DBReportId(m.id))
.try_collect::<Vec<crate::database::models::ids::DBReportId>>()
.await?
};
@ -318,7 +319,7 @@ pub async fn reports_get(
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let report_ids: Vec<crate::database::models::ids::ReportId> =
let report_ids: Vec<crate::database::models::ids::DBReportId> =
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
@ -353,7 +354,7 @@ pub async fn report_get(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
info: web::Path<(crate::models::ids::ReportId,)>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
@ -393,7 +394,7 @@ pub async fn report_edit(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
info: web::Path<(crate::models::ids::ReportId,)>,
session_queue: web::Data<AuthQueue>,
edit_report: web::Json<EditReport>,
) -> Result<HttpResponse, ApiError> {
@ -426,7 +427,7 @@ pub async fn report_edit(
WHERE (id = $2)
",
edit_body,
id as crate::database::models::ids::ReportId,
id as crate::database::models::ids::DBReportId,
)
.execute(&mut *transaction)
.await?;
@ -459,7 +460,7 @@ pub async fn report_edit(
WHERE (id = $2)
",
edit_closed,
id as crate::database::models::ids::ReportId,
id as crate::database::models::ids::DBReportId,
)
.execute(&mut *transaction)
.await?;
@ -492,7 +493,7 @@ pub async fn report_edit(
pub async fn report_delete(
req: HttpRequest,
pool: web::Data<PgPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
info: web::Path<(crate::models::ids::ReportId,)>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {

View File

@ -5,15 +5,14 @@ use crate::database::models::notification_item::NotificationBuilder;
use crate::database::models::team_item::TeamAssociationId;
use crate::database::models::{Organization, Team, TeamMember, User};
use crate::database::redis::RedisPool;
use crate::models::ids::TeamId;
use crate::models::notifications::NotificationBody;
use crate::models::pats::Scopes;
use crate::models::teams::{
OrganizationPermissions, ProjectPermissions, TeamId,
};
use crate::models::users::UserId;
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use actix_web::{HttpRequest, HttpResponse, web};
use ariadne::ids::UserId;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@ -102,7 +101,7 @@ pub async fn team_members_get_project(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| {
.map(|y: crate::database::models::DBUserId| {
y == x.user_id
})
.unwrap_or(false)
@ -177,7 +176,7 @@ pub async fn team_members_get_organization(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| {
.map(|y: crate::database::models::DBUserId| {
y == x.user_id
})
.unwrap_or(false)
@ -243,7 +242,7 @@ pub async fn team_members_get(
logged_in
|| x.accepted
|| user_id
.map(|y: crate::database::models::UserId| y == x.user_id)
.map(|y: crate::database::models::DBUserId| y == x.user_id)
.unwrap_or(false)
})
.flat_map(|data| {
@ -277,7 +276,7 @@ pub async fn teams_get(
let team_ids = serde_json::from_str::<Vec<TeamId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<crate::database::models::ids::TeamId>>();
.collect::<Vec<crate::database::models::ids::DBTeamId>>();
let teams_data =
TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
@ -997,10 +996,11 @@ pub async fn transfer_ownership(
.fetch_all(&mut *transaction)
.await?;
let team_ids: Vec<crate::database::models::ids::TeamId> = team_ids
.into_iter()
.map(|x| TeamId(x.team_id as u64).into())
.collect();
let team_ids: Vec<crate::database::models::ids::DBTeamId> =
team_ids
.into_iter()
.map(|x| TeamId(x.team_id as u64).into())
.collect();
// If the owner of the organization is a member of the project, remove them
for team_id in team_ids.iter() {

View File

@ -7,12 +7,12 @@ use crate::database::models::notification_item::NotificationBuilder;
use crate::database::models::thread_item::ThreadMessageBuilder;
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::ThreadMessageId;
use crate::models::ids::{ThreadId, ThreadMessageId};
use crate::models::images::{Image, ImageContext};
use crate::models::notifications::NotificationBody;
use crate::models::pats::Scopes;
use crate::models::projects::ProjectStatus;
use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType};
use crate::models::threads::{MessageBody, Thread, ThreadType};
use crate::models::users::User;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
@ -42,14 +42,14 @@ pub async fn is_authorized_thread(
return Ok(true);
}
let user_id: database::models::UserId = user.id.into();
let user_id: database::models::DBUserId = user.id.into();
Ok(match thread.type_ {
ThreadType::Report => {
if let Some(report_id) = thread.report_id {
let report_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1 AND reporter = $2)",
report_id as database::models::ids::ReportId,
user_id as database::models::ids::UserId,
report_id as database::models::ids::DBReportId,
user_id as database::models::ids::DBUserId,
)
.fetch_one(pool)
.await?
@ -64,8 +64,8 @@ pub async fn is_authorized_thread(
if let Some(project_id) = thread.project_id {
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 WHERE m.id = $1)",
project_id as database::models::ids::ProjectId,
user_id as database::models::ids::UserId,
project_id as database::models::ids::DBProjectId,
user_id as database::models::ids::DBUserId,
)
.fetch_one(pool)
.await?
@ -74,8 +74,8 @@ pub async fn is_authorized_thread(
if !project_exists.unwrap_or(false) {
let org_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN organizations o ON m.organization_id = o.id INNER JOIN team_members tm ON tm.team_id = o.team_id AND tm.user_id = $2 WHERE m.id = $1)",
project_id as database::models::ids::ProjectId,
user_id as database::models::ids::UserId,
project_id as database::models::ids::DBProjectId,
user_id as database::models::ids::DBUserId,
)
.fetch_one(pool)
.await?
@ -99,7 +99,7 @@ pub async fn filter_authorized_threads(
pool: &web::Data<PgPool>,
redis: &RedisPool,
) -> Result<Vec<Thread>, ApiError> {
let user_id: database::models::UserId = user.id.into();
let user_id: database::models::DBUserId = user.id.into();
let mut return_threads = Vec::new();
let mut check_threads = Vec::new();
@ -130,7 +130,7 @@ pub async fn filter_authorized_threads(
WHERE m.id = ANY($1)
",
&*project_thread_ids,
user_id as database::models::ids::UserId,
user_id as database::models::ids::DBUserId,
)
.fetch(&***pool)
.map_ok(|row| {
@ -163,7 +163,7 @@ pub async fn filter_authorized_threads(
WHERE m.id = ANY($1)
",
&*project_thread_ids,
user_id as database::models::ids::UserId,
user_id as database::models::ids::DBUserId,
)
.fetch(&***pool)
.map_ok(|row| {
@ -194,7 +194,7 @@ pub async fn filter_authorized_threads(
WHERE id = ANY($1) AND reporter = $2
",
&*report_thread_ids,
user_id as database::models::ids::UserId,
user_id as database::models::ids::DBUserId,
)
.fetch(&***pool)
.map_ok(|row| {
@ -216,7 +216,7 @@ pub async fn filter_authorized_threads(
let mut user_ids = return_threads
.iter()
.flat_map(|x| x.members.clone())
.collect::<Vec<database::models::UserId>>();
.collect::<Vec<database::models::DBUserId>>();
user_ids.append(
&mut return_threads
.iter()
@ -226,7 +226,7 @@ pub async fn filter_authorized_threads(
.filter_map(|x| x.author_id)
.collect::<Vec<_>>()
})
.collect::<Vec<database::models::UserId>>(),
.collect::<Vec<database::models::DBUserId>>(),
);
let users: Vec<User> =
@ -345,7 +345,7 @@ pub async fn threads_get(
.await?
.1;
let thread_ids: Vec<database::models::ids::ThreadId> =
let thread_ids: Vec<database::models::ids::DBThreadId> =
serde_json::from_str::<Vec<ThreadId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
@ -383,7 +383,7 @@ pub async fn thread_send_message(
.await?
.1;
let string: database::models::ThreadId = info.into_inner().0.into();
let string: database::models::DBThreadId = info.into_inner().0.into();
if let MessageBody::Text {
body,

View File

@ -1,10 +1,5 @@
use std::{collections::HashMap, sync::Arc};
use actix_web::{HttpRequest, HttpResponse, web};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use validator::Validate;
use super::{ApiError, oauth_clients::get_user_clients};
use crate::util::img::delete_old_images;
use crate::{
@ -13,7 +8,6 @@ use crate::{
file_hosting::FileHost,
models::{
collections::{Collection, CollectionStatus},
ids::UserId,
notifications::Notification,
pats::Scopes,
projects::Project,
@ -22,6 +16,11 @@ use crate::{
queue::session::AuthQueue,
util::{routes::read_from_payload, validate::validation_errors_to_string},
};
use actix_web::{HttpRequest, HttpResponse, web};
use ariadne::ids::UserId;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use validator::Validate;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("user", web::get().to(user_auth_get));
@ -89,9 +88,12 @@ pub async fn admin_user_email(
)
})?;
let user =
User::get_id(crate::database::models::UserId(user_id), &**pool, &redis)
.await?;
let user = User::get_id(
crate::database::models::DBUserId(user_id),
&**pool,
&redis,
)
.await?;
if let Some(user) = user {
Ok(HttpResponse::Ok().json(user))
@ -420,7 +422,7 @@ pub async fn user_edit(
WHERE (id = $2)
",
username,
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -439,7 +441,7 @@ pub async fn user_edit(
WHERE (id = $2)
",
bio.as_deref(),
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -462,7 +464,7 @@ pub async fn user_edit(
WHERE (id = $2)
",
role,
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -483,7 +485,7 @@ pub async fn user_edit(
WHERE (id = $2)
",
badges.bits() as i64,
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -504,7 +506,7 @@ pub async fn user_edit(
WHERE (id = $2)
",
venmo_handle,
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -518,7 +520,7 @@ pub async fn user_edit(
WHERE (id = $2)
",
allow_friend_requests,
id as crate::database::models::ids::UserId,
id as crate::database::models::ids::DBUserId,
)
.execute(&mut *transaction)
.await?;
@ -606,7 +608,7 @@ pub async fn user_icon_edit(
",
upload_result.url,
upload_result.raw_url,
actual_user.id as crate::database::models::ids::UserId,
actual_user.id as crate::database::models::ids::DBUserId,
)
.execute(&**pool)
.await?;
@ -658,7 +660,7 @@ pub async fn user_icon_delete(
SET avatar_url = NULL, raw_avatar_url = NULL
WHERE (id = $1)
",
actual_user.id as crate::database::models::ids::UserId,
actual_user.id as crate::database::models::ids::DBUserId,
)
.execute(&**pool)
.await?;

View File

@ -10,13 +10,14 @@ use crate::database::models::version_item::{
use crate::database::models::{self, Organization, image_item};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::images::{Image, ImageContext, ImageId};
use crate::models::ids::{ImageId, ProjectId, VersionId};
use crate::models::images::{Image, ImageContext};
use crate::models::notifications::NotificationBody;
use crate::models::pack::PackFileHash;
use crate::models::pats::Scopes;
use crate::models::projects::{
Dependency, FileType, Loader, ProjectId, Version, VersionFile, VersionId,
VersionStatus, VersionType,
Dependency, FileType, Loader, Version, VersionFile, VersionStatus,
VersionType,
};
use crate::models::projects::{DependencyType, ProjectStatus, skip_nulls};
use crate::models::teams::ProjectPermissions;
@ -212,7 +213,7 @@ async fn version_create_inner(
));
}
let project_id: models::ProjectId = version_create_data.project_id.unwrap().into();
let project_id: models::DBProjectId = version_create_data.project_id.unwrap().into();
// Ensure that the project this version is being added to exists
if models::Project::get_id(project_id, &mut **transaction, redis)
@ -402,11 +403,11 @@ async fn version_create_inner(
SELECT follower_id FROM mod_follows
WHERE mod_id = $1
",
builder.project_id as crate::database::models::ids::ProjectId
builder.project_id as crate::database::models::ids::DBProjectId
)
.fetch(&mut **transaction)
.map_ok(|m| models::ids::UserId(m.follower_id))
.try_collect::<Vec<models::ids::UserId>>()
.map_ok(|m| models::ids::DBUserId(m.follower_id))
.try_collect::<Vec<models::ids::DBUserId>>()
.await?;
let project_id: ProjectId = builder.project_id.into();
@ -516,7 +517,7 @@ async fn version_create_inner(
let project_status = sqlx::query!(
"SELECT status FROM mods WHERE id = $1",
project_id as models::ProjectId,
project_id as models::DBProjectId,
)
.fetch_optional(pool)
.await?;
@ -542,7 +543,7 @@ pub async fn upload_file_to_version(
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
let version_id = models::VersionId::from(url_data.into_inner().0);
let version_id = models::DBVersionId::from(url_data.into_inner().0);
let result = upload_file_to_version_inner(
req,
@ -585,7 +586,7 @@ async fn upload_file_to_version_inner(
redis: Data<RedisPool>,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
version_id: models::VersionId,
version_id: models::DBVersionId,
session_queue: &AuthQueue,
) -> Result<HttpResponse, CreateError> {
let cdn_url = dotenvy::var("CDN_URL")?;
@ -903,8 +904,8 @@ pub async fn upload_file(
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::ProjectId(dep.project_id)),
version_id: Some(models::VersionId(dep.version_id)),
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});

View File

@ -370,10 +370,10 @@ pub async fn update_files(
&update_data.version_types.clone().unwrap_or_default().iter().map(|x| x.to_string()).collect::<Vec<_>>(),
)
.fetch(&**pool)
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<database::models::ids::VersionId>>, m| {
acc.entry(database::models::ProjectId(m.mod_id))
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<database::models::ids::DBVersionId>>, m| {
acc.entry(database::models::DBProjectId(m.mod_id))
.or_default()
.push(database::models::VersionId(m.version_id));
.push(database::models::DBVersionId(m.version_id));
async move { Ok(acc) }
})
.await?;

View File

@ -132,7 +132,7 @@ pub async fn versions_get(
serde_json::from_str::<Vec<models::ids::VersionId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<database::models::VersionId>>();
.collect::<Vec<database::models::DBVersionId>>();
let versions_data =
database::models::Version::get_many(&version_ids, &**pool, &redis)
.await?;
@ -345,7 +345,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
name.trim(),
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -359,7 +359,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
number,
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -373,7 +373,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
version_type.as_str(),
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -384,7 +384,7 @@ pub async fn version_edit_helper(
"
DELETE FROM dependencies WHERE dependent_id = $1
",
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -448,7 +448,7 @@ pub async fn version_edit_helper(
WHERE version_id = $1
AND field_id = ANY($2)
",
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
&loader_field_ids
)
.execute(&mut *transaction)
@ -493,7 +493,7 @@ pub async fn version_edit_helper(
"
DELETE FROM loaders_versions WHERE version_id = $1
",
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -538,7 +538,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
featured,
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -552,7 +552,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
body,
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -572,7 +572,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
*downloads as i32,
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -587,7 +587,7 @@ pub async fn version_edit_helper(
",
diff as i32,
version_item.inner.project_id
as database::models::ids::ProjectId,
as database::models::ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
@ -607,7 +607,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
status.as_str(),
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;
@ -655,7 +655,7 @@ pub async fn version_edit_helper(
WHERE (id = $2)
",
ordering.to_owned() as Option<i32>,
version_id as database::models::ids::VersionId,
version_id as database::models::ids::DBVersionId,
)
.execute(&mut *transaction)
.await?;

View File

@ -11,8 +11,8 @@ use crate::database::models::loader_fields::{
VersionField,
};
use crate::database::models::{
LoaderFieldEnumId, LoaderFieldEnumValueId, LoaderFieldId, ProjectId,
VersionId,
DBProjectId, DBVersionId, LoaderFieldEnumId, LoaderFieldEnumValueId,
LoaderFieldId,
};
use crate::models::projects::from_duplicate_version_fields;
use crate::models::v2::projects::LegacyProject;
@ -27,7 +27,7 @@ pub async fn index_local(
// todo: loaders, project type, game versions
struct PartialProject {
id: ProjectId,
id: DBProjectId,
name: String,
summary: String,
downloads: i32,
@ -56,7 +56,7 @@ pub async fn index_local(
.fetch(pool)
.map_ok(|m| {
PartialProject {
id: ProjectId(m.id),
id: DBProjectId(m.id),
name: m.name,
summary: m.summary,
downloads: m.downloads,
@ -82,7 +82,7 @@ pub async fn index_local(
info!("Indexing local gallery!");
let mods_gallery: DashMap<ProjectId, Vec<PartialGallery>> = sqlx::query!(
let mods_gallery: DashMap<DBProjectId, Vec<PartialGallery>> = sqlx::query!(
"
SELECT mod_id, image_url, featured, ordering
FROM mods_gallery
@ -93,14 +93,14 @@ pub async fn index_local(
.fetch(pool)
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<PartialGallery>>, m| {
acc.entry(ProjectId(m.mod_id))
.or_default()
.push(PartialGallery {
|acc: DashMap<DBProjectId, Vec<PartialGallery>>, m| {
acc.entry(DBProjectId(m.mod_id)).or_default().push(
PartialGallery {
url: m.image_url,
featured: m.featured.unwrap_or(false),
ordering: m.ordering,
});
},
);
async move { Ok(acc) }
},
)
@ -108,7 +108,7 @@ pub async fn index_local(
info!("Indexing local categories!");
let categories: DashMap<ProjectId, Vec<(String, bool)>> = sqlx::query!(
let categories: DashMap<DBProjectId, Vec<(String, bool)>> = sqlx::query!(
"
SELECT mc.joining_mod_id mod_id, c.category name, mc.is_additional is_additional
FROM mods_categories mc
@ -120,8 +120,8 @@ pub async fn index_local(
.fetch(pool)
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<(String, bool)>>, m| {
acc.entry(ProjectId(m.mod_id))
|acc: DashMap<DBProjectId, Vec<(String, bool)>>, m| {
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push((m.name, m.is_additional));
async move { Ok(acc) }
@ -134,7 +134,7 @@ pub async fn index_local(
info!("Indexing local org owners!");
let mods_org_owners: DashMap<ProjectId, String> = sqlx::query!(
let mods_org_owners: DashMap<DBProjectId, String> = sqlx::query!(
"
SELECT m.id mod_id, u.username
FROM mods m
@ -146,15 +146,15 @@ pub async fn index_local(
&*project_ids,
)
.fetch(pool)
.try_fold(DashMap::new(), |acc: DashMap<ProjectId, String>, m| {
acc.insert(ProjectId(m.mod_id), m.username);
.try_fold(DashMap::new(), |acc: DashMap<DBProjectId, String>, m| {
acc.insert(DBProjectId(m.mod_id), m.username);
async move { Ok(acc) }
})
.await?;
info!("Indexing local team owners!");
let mods_team_owners: DashMap<ProjectId, String> = sqlx::query!(
let mods_team_owners: DashMap<DBProjectId, String> = sqlx::query!(
"
SELECT m.id mod_id, u.username
FROM mods m
@ -165,8 +165,8 @@ pub async fn index_local(
&project_ids,
)
.fetch(pool)
.try_fold(DashMap::new(), |acc: DashMap<ProjectId, String>, m| {
acc.insert(ProjectId(m.mod_id), m.username);
.try_fold(DashMap::new(), |acc: DashMap<DBProjectId, String>, m| {
acc.insert(DBProjectId(m.mod_id), m.username);
async move { Ok(acc) }
})
.await?;
@ -416,7 +416,7 @@ pub async fn index_local(
}
struct PartialVersion {
id: VersionId,
id: DBVersionId,
loaders: Vec<String>,
project_types: Vec<String>,
version_fields: Vec<QueryVersionField>,
@ -425,8 +425,8 @@ struct PartialVersion {
async fn index_versions(
pool: &PgPool,
project_ids: Vec<i64>,
) -> Result<HashMap<ProjectId, Vec<PartialVersion>>, IndexingError> {
let versions: HashMap<ProjectId, Vec<VersionId>> = sqlx::query!(
) -> Result<HashMap<DBProjectId, Vec<PartialVersion>>, IndexingError> {
let versions: HashMap<DBProjectId, Vec<DBVersionId>> = sqlx::query!(
"
SELECT v.id, v.mod_id
FROM versions v
@ -437,10 +437,10 @@ async fn index_versions(
.fetch(pool)
.try_fold(
HashMap::new(),
|mut acc: HashMap<ProjectId, Vec<VersionId>>, m| {
acc.entry(ProjectId(m.mod_id))
|mut acc: HashMap<DBProjectId, Vec<DBVersionId>>, m| {
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push(VersionId(m.id));
.push(DBVersionId(m.id));
async move { Ok(acc) }
},
)
@ -459,7 +459,7 @@ async fn index_versions(
.map(|x| x.0)
.collect::<Vec<i64>>();
let loaders_ptypes: DashMap<VersionId, VersionLoaderData> = sqlx::query!(
let loaders_ptypes: DashMap<DBVersionId, VersionLoaderData> = sqlx::query!(
"
SELECT DISTINCT version_id,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
@ -476,7 +476,7 @@ async fn index_versions(
)
.fetch(pool)
.map_ok(|m| {
let version_id = VersionId(m.version_id);
let version_id = DBVersionId(m.version_id);
let version_loader_data = VersionLoaderData {
loaders: m.loaders.unwrap_or_default(),
@ -488,7 +488,7 @@ async fn index_versions(
.await?;
// Get version fields
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> =
let version_fields: DashMap<DBVersionId, Vec<QueryVersionField>> =
sqlx::query!(
"
SELECT version_id, field_id, int_value, enum_value, string_value
@ -500,9 +500,9 @@ async fn index_versions(
.fetch(pool)
.try_fold(
DashMap::new(),
|acc: DashMap<VersionId, Vec<QueryVersionField>>, m| {
|acc: DashMap<DBVersionId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
version_id: DBVersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: if m.enum_value == -1 {
@ -513,14 +513,14 @@ async fn index_versions(
string_value: m.string_value,
};
acc.entry(VersionId(m.version_id)).or_default().push(qvf);
acc.entry(DBVersionId(m.version_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
.await?;
// Convert to partial versions
let mut res_versions: HashMap<ProjectId, Vec<PartialVersion>> =
let mut res_versions: HashMap<DBProjectId, Vec<PartialVersion>> =
HashMap::new();
for (project_id, version_ids) in versions.iter() {
for version_id in version_ids {

View File

@ -73,10 +73,8 @@ pub fn validate_deps(
.duplicates_by(|x| {
format!(
"{}-{}-{}",
x.version_id
.unwrap_or(crate::models::projects::VersionId(0)),
x.project_id
.unwrap_or(crate::models::projects::ProjectId(0)),
x.version_id.unwrap_or(crate::models::ids::VersionId(0)),
x.project_id.unwrap_or(crate::models::ids::ProjectId(0)),
x.file_name.as_deref().unwrap_or_default()
)
})

View File

@ -1,6 +1,6 @@
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
use crate::database::redis::RedisPool;
use crate::models::projects::ProjectId;
use crate::models::ids::ProjectId;
use crate::routes::ApiError;
use ariadne::ids::base62_impl::to_base62;
use chrono::{DateTime, Utc};

View File

@ -1,14 +1,14 @@
use std::collections::HashMap;
use crate::common::{api_v2::ApiV2, api_v3::ApiV3, dummy_data::TestFile};
use actix_web::dev::ServiceResponse;
use async_trait::async_trait;
use labrinth::models::ids::ProjectId;
use labrinth::models::{
projects::{ProjectId, VersionType},
projects::VersionType,
teams::{OrganizationPermissions, ProjectPermissions},
};
use crate::common::{api_v2::ApiV2, api_v3::ApiV3, dummy_data::TestFile};
use super::{
Api, ApiProject, ApiTags, ApiTeams, ApiUser, ApiVersion,
models::{CommonProject, CommonVersion},

View File

@ -5,18 +5,18 @@ use self::models::{
CommonProject, CommonTeamMember, CommonVersion,
};
use self::request_data::{ImageData, ProjectCreationRequestData};
use super::dummy_data::TestFile;
use actix_web::dev::ServiceResponse;
use async_trait::async_trait;
use labrinth::models::ids::ProjectId;
use labrinth::{
LabrinthConfig,
models::{
projects::{ProjectId, VersionType},
projects::VersionType,
teams::{OrganizationPermissions, ProjectPermissions},
},
};
use super::dummy_data::TestFile;
pub mod generic;
pub mod models;
pub mod request_data;

View File

@ -1,24 +1,23 @@
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use labrinth::models::ids::{
ImageId, NotificationId, OrganizationId, ProjectId, ReportId, TeamId,
ThreadId, ThreadMessageId, VersionId,
};
use labrinth::{
auth::AuthProvider,
models::{
images::ImageId,
notifications::NotificationId,
organizations::OrganizationId,
projects::{
Dependency, GalleryItem, License, ModeratorMessage,
MonetizationStatus, ProjectId, ProjectStatus, VersionFile,
VersionId, VersionStatus, VersionType,
MonetizationStatus, ProjectStatus, VersionFile, VersionStatus,
VersionType,
},
reports::ReportId,
teams::{ProjectPermissions, TeamId},
threads::{ThreadId, ThreadMessageId},
users::{Badges, Role, User, UserId, UserPayoutData},
teams::ProjectPermissions,
users::{Badges, Role, User, UserPayoutData},
},
};
use rust_decimal::Decimal;
use serde::Deserialize;
// Fields shared by every version of the API.
// No struct in here should have ANY field that
// is not present in *every* version of the API.

View File

@ -7,10 +7,8 @@ use crate::common::{
},
dummy_data::TestFile,
};
use labrinth::{
models::projects::ProjectId,
util::actix::{MultipartSegment, MultipartSegmentData},
};
use labrinth::models::ids::ProjectId;
use labrinth::util::actix::{MultipartSegment, MultipartSegmentData};
pub fn get_public_project_creation_data(
slug: &str,

View File

@ -19,11 +19,9 @@ use actix_web::{
test::{self, TestRequest},
};
use async_trait::async_trait;
use labrinth::models::ids::ProjectId;
use labrinth::{
models::{
projects::{ProjectId, VersionType},
v2::projects::LegacyVersion,
},
models::{projects::VersionType, v2::projects::LegacyVersion},
routes::v2::version_file::FileUpdateData,
util::actix::AppendsMultipart,
};

View File

@ -1,19 +1,17 @@
use crate::{
assert_status,
common::api_common::{Api, AppendsOptionalPat, request_data::ImageData},
};
use actix_http::StatusCode;
use actix_web::{
dev::ServiceResponse,
test::{self, TestRequest},
};
use ariadne::ids::UserId;
use bytes::Bytes;
use labrinth::models::{
organizations::Organization, users::UserId, v3::projects::Project,
};
use labrinth::models::{organizations::Organization, v3::projects::Project};
use serde_json::json;
use crate::{
assert_status,
common::api_common::{Api, AppendsOptionalPat, request_data::ImageData},
};
use super::ApiV3;
impl ApiV3 {

View File

@ -7,10 +7,8 @@ use crate::common::{
},
dummy_data::TestFile,
};
use labrinth::{
models::projects::ProjectId,
util::actix::{MultipartSegment, MultipartSegmentData},
};
use labrinth::models::ids::ProjectId;
use labrinth::util::actix::{MultipartSegment, MultipartSegmentData};
pub fn get_public_project_creation_data(
slug: &str,

View File

@ -19,11 +19,9 @@ use actix_web::{
test::{self, TestRequest},
};
use async_trait::async_trait;
use labrinth::models::ids::ProjectId;
use labrinth::{
models::{
projects::{ProjectId, VersionType},
v3::projects::Version,
},
models::{projects::VersionType, v3::projects::Version},
routes::v3::version_file::FileUpdateData,
util::actix::AppendsMultipart,
};

View File

@ -1,22 +1,22 @@
#![allow(dead_code)]
use std::io::{Cursor, Write};
use actix_http::StatusCode;
use actix_web::test::{self, TestRequest};
use labrinth::models::{
oauth_clients::OAuthClient,
organizations::Organization,
pats::Scopes,
projects::{Project, ProjectId, Version},
};
use serde_json::json;
use sqlx::Executor;
use zip::{CompressionMethod, ZipWriter, write::FileOptions};
use crate::{
assert_status,
common::{api_common::Api, api_v3, database::USER_USER_PAT},
};
use actix_http::StatusCode;
use actix_web::test::{self, TestRequest};
use labrinth::models::ids::ProjectId;
use labrinth::models::{
oauth_clients::OAuthClient,
organizations::Organization,
pats::Scopes,
projects::{Project, Version},
};
use serde_json::json;
use sqlx::Executor;
use zip::{CompressionMethod, ZipWriter, write::FileOptions};
use super::{
api_common::{ApiProject, AppendsOptionalPat, request_data::ImageData},

View File

@ -23,7 +23,7 @@ pub async fn create_test_pat(
name: format!("test_pat_{}", scopes.bits()),
access_token: format!("mrp_{}", id.0),
scopes,
user_id: database::models::ids::UserId(user_id),
user_id: database::models::ids::DBUserId(user_id),
created: Utc::now(),
expires: Utc::now() + chrono::Duration::days(1),
last_used: None,

Some files were not shown because too many files have changed in this diff Show More