Merge super-mario-maker into v0
Some checks failed
Build and Test / super-mario-maker (push) Failing after 16s
Build and Test / splatoon (push) Has been cancelled
Build and Test / friends (push) Has been cancelled

This commit is contained in:
red binder 2026-04-26 16:12:31 +02:00
commit e129f9c3b3
30 changed files with 2953 additions and 133 deletions

View file

@ -70,3 +70,33 @@ jobs:
CI_REGISTRY_IMAGE: git.spbr.net/spacebar/rust-nex CI_REGISTRY_IMAGE: git.spbr.net/spacebar/rust-nex
CI_COMMIT_SHORT_SHA: ${{ env.SHORT_SHA }} CI_COMMIT_SHORT_SHA: ${{ env.SHORT_SHA }}
run: ./.ci-scripts/make-edition.sh friends run: ./.ci-scripts/make-edition.sh friends
super-mario-maker:
runs-on: debian-trixie
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
submodules: recursive
- name: Cache container storage
uses: actions/cache@v4
with:
path: |
/var/lib/containers/storage
/run/containers/storage
~/.local/share/containers/storage
key: image-cache
- name: Set short SHA
run: echo "SHORT_SHA=${GITHUB_SHA::6}" >> $GITHUB_ENV
- name: Login to registry
run: podman login -u ${{ secrets.PACKAGE_USER }} -p ${{ secrets.PACKAGE_PWD }} git.spbr.net
- name: Build Friends edition
env:
CI_REGISTRY_IMAGE: git.spbr.net/spacebar/rust-nex
CI_COMMIT_SHORT_SHA: ${{ env.SHORT_SHA }}
run: ./.ci-scripts/make-edition.sh super-mario-maker

View file

@ -24,3 +24,7 @@ splatoon:
friends: friends:
stage: build_and_test stage: build_and_test
script: ./.ci-scripts/make-edition.sh friends script: ./.ci-scripts/make-edition.sh friends
super-mario-maker:
stage: build_and_test
script: ./.ci-scripts/make-edition.sh super-mario-maker

1208
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,13 @@
wiiu-chat:
features:
- prudpv1
- v3-8-15
settings:
AUTH_REPORT_VERSION: "branch:origin/project/wup-agmj build:3_8_15_2004_0"
RNEX_VIRTUAL_PORT_INSECURE: "1:10"
RNEX_VIRTUAL_PORT_SECURE: "1:10"
RNEX_DEFAULT_PORT: 10000
RNEX_ACCESS_KEY: "e7a47214"
splatoon: splatoon:
features: features:
- prudpv1 - prudpv1
@ -17,3 +27,14 @@ friends:
RNEX_VIRTUAL_PORT_SECURE: "1:10" RNEX_VIRTUAL_PORT_SECURE: "1:10"
RNEX_DEFAULT_PORT: 6000 RNEX_DEFAULT_PORT: 6000
RNEX_ACCESS_KEY: "ridfebb9" RNEX_ACCESS_KEY: "ridfebb9"
super-mario-maker:
features:
- prudpv1
- v3-8-15
- datastore
settings:
AUTH_REPORT_VERSION: "branch:origin/project/wup-ama build:3_8_29_3022_0"
RNEX_VIRTUAL_PORT_INSECURE: "1:10"
RNEX_VIRTUAL_PORT_SECURE: "1:10"
RNEX_DEFAULT_PORT: 6000
RNEX_ACCESS_KEY: "9f2b4678"

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT owner, under_review FROM datastore.objects WHERE data_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "owner",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "under_review",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
true,
false
]
},
"hash": "164708b549c483a041d2e54065ed3ffbd9f8d5304f6aa6d785dbddbb1626c0e9"
}

View file

@ -0,0 +1,112 @@
{
"db_name": "PostgreSQL",
"query": "SELECT data_id, owner, size, name, data_type, meta_binary,\n permission, permission_recipients, delete_permission, delete_permission_recipients,\n period, refer_data_id, flag, tags, creation_date, update_date\n FROM datastore.objects WHERE data_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "data_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "owner",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "size",
"type_info": "Int4"
},
{
"ordinal": 3,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "data_type",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "meta_binary",
"type_info": "Bytea"
},
{
"ordinal": 6,
"name": "permission",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "permission_recipients",
"type_info": "Int4Array"
},
{
"ordinal": 8,
"name": "delete_permission",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "delete_permission_recipients",
"type_info": "Int4Array"
},
{
"ordinal": 10,
"name": "period",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "refer_data_id",
"type_info": "Int8"
},
{
"ordinal": 12,
"name": "flag",
"type_info": "Int4"
},
{
"ordinal": 13,
"name": "tags",
"type_info": "TextArray"
},
{
"ordinal": 14,
"name": "creation_date",
"type_info": "Timestamp"
},
{
"ordinal": 15,
"name": "update_date",
"type_info": "Timestamp"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true
]
},
"hash": "1c2be699b4bfc7e5e6d3a74d7badf67d1812b99e1ec952a044fc03e1a5c63703"
}

View file

@ -0,0 +1,38 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO datastore.objects (\n owner, size, name, data_type, meta_binary,\n permission, permission_recipients,\n delete_permission, delete_permission_recipients,\n flag, period, refer_data_id, tags,\n persistence_slot_id, extra_data, creation_date, update_date\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17\n ) RETURNING data_id\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "data_id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4",
"Int4",
"Text",
"Int4",
"Bytea",
"Int4",
"Int4Array",
"Int4",
"Int4Array",
"Int4",
"Int4",
"Int8",
"TextArray",
"Int4",
"TextArray",
"Timestamp",
"Timestamp"
]
},
"nullable": [
false
]
},
"hash": "219fec3fc852f36de99e5f00ca7a1675439bb44c91158f8b8a696e326c45447c"
}

View file

@ -0,0 +1,16 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO datastore.object_custom_rankings (data_id, application_id, value)\n VALUES ($1, $2, $3)\n ON CONFLICT (data_id, application_id)\n DO UPDATE SET value = datastore.object_custom_rankings.value + EXCLUDED.value\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Int8"
]
},
"nullable": []
},
"hash": "29d4f5c07b36c3d3b6b54a86a1757f27247530878b7f82feeb65802d995a38c4"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS(SELECT 1 FROM datastore.objects WHERE data_id = $1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
},
"hash": "2ff34379bbc32276c3b78ef1283b8158ea907d36588e1e59f6cbe752d89361bb"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT data_id\n FROM datastore.objects\n WHERE owner = $1 AND data_type > 2 AND data_type < 50\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "data_id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false
]
},
"hash": "37d449b81e2aa3abdbdaf38587ae1a6a6c5c38acb06d91c5b0924c3f0a5d2e92"
}

View file

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT buffer\n FROM datastore.buffer_queues\n WHERE data_id = $1 AND slot = $2\n ORDER BY creation_date ASC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "buffer",
"type_info": "Bytea"
}
],
"parameters": {
"Left": [
"Int8",
"Int4"
]
},
"nullable": [
false
]
},
"hash": "3d06238fddc72d1ba452602e1a8002e9186ce1dfc6c68b52d9d2a8a38f5c3a1f"
}

View file

@ -0,0 +1,29 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n rankings.data_id,\n rankings.value\n FROM datastore.object_custom_rankings rankings\n JOIN UNNEST($1::bigint[]) WITH ORDINALITY AS rows(data_id, ord)\n ON rankings.data_id = rows.data_id\n AND rankings.application_id = $2\n ORDER BY rows.ord\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "data_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "value",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8Array",
"Int8"
]
},
"nullable": [
false,
true
]
},
"hash": "8605011b998a4608c739bf5ab388a7a9bf551126712c1d1089a4263453090e79"
}

View file

@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT slot, total_value, count, initial_value\n FROM datastore.object_ratings\n WHERE data_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "slot",
"type_info": "Int2"
},
{
"ordinal": 1,
"name": "total_value",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "count",
"type_info": "Int4"
},
{
"ordinal": 3,
"name": "initial_value",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
true,
false,
true
]
},
"hash": "8706ac06d78ffaa2a45418be7ae71340561031d8e5c91f46c041f83e54c31a7d"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT under_review, access_password\n FROM datastore.objects\n WHERE data_id = $1 AND upload_completed = TRUE AND deleted = FALSE\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "under_review",
"type_info": "Bool"
},
{
"ordinal": 1,
"name": "access_password",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false
]
},
"hash": "93be6b6b0ac5d85881e6e223a7d48f5eb4a3761dd71129ba6939cdd0d62569fb"
}

View file

@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE datastore.objects SET upload_completed = true WHERE data_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": []
},
"hash": "e28d8776cc49b55fe76cf33ac12fe18e500d243f1b55fd18e7d96d281605bcf9"
}

View file

@ -0,0 +1,125 @@
{
"db_name": "PostgreSQL",
"query": "SELECT data_id, owner, size, name, data_type, meta_binary,\n permission, permission_recipients, delete_permission, delete_permission_recipients,\n period, refer_data_id, flag, tags, creation_date, update_date,\n access_password, under_review\n FROM datastore.objects\n WHERE owner = $1 AND persistence_slot_id = $2\n AND upload_completed = TRUE AND deleted = FALSE",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "data_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "owner",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "size",
"type_info": "Int4"
},
{
"ordinal": 3,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "data_type",
"type_info": "Int4"
},
{
"ordinal": 5,
"name": "meta_binary",
"type_info": "Bytea"
},
{
"ordinal": 6,
"name": "permission",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "permission_recipients",
"type_info": "Int4Array"
},
{
"ordinal": 8,
"name": "delete_permission",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "delete_permission_recipients",
"type_info": "Int4Array"
},
{
"ordinal": 10,
"name": "period",
"type_info": "Int4"
},
{
"ordinal": 11,
"name": "refer_data_id",
"type_info": "Int8"
},
{
"ordinal": 12,
"name": "flag",
"type_info": "Int4"
},
{
"ordinal": 13,
"name": "tags",
"type_info": "TextArray"
},
{
"ordinal": 14,
"name": "creation_date",
"type_info": "Timestamp"
},
{
"ordinal": 15,
"name": "update_date",
"type_info": "Timestamp"
},
{
"ordinal": 16,
"name": "access_password",
"type_info": "Int8"
},
{
"ordinal": 17,
"name": "under_review",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4",
"Int4"
]
},
"nullable": [
false,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
true,
false,
false
]
},
"hash": "efe4bf3602782a0d521274956e0fcecccf8f0f8dd20d890a76acf85265b2192c"
}

View file

@ -28,6 +28,12 @@ anyhow = "1.0.100"
ureq = { version = "3.1.4", features = [ "json" ] } ureq = { version = "3.1.4", features = [ "json" ] }
serde = { version = "1.0.228", features = [ "derive" ] } serde = { version = "1.0.228", features = [ "derive" ] }
serde_json = "1.0.149" serde_json = "1.0.149"
sqlx = { version = "0.8.6", optional = true, features = ["postgres", "runtime-tokio", "chrono", "time"] }
aws-sdk-s3 = { version = "1.129.0", optional = true }
aws-config = { version = "1.8.15", optional = true }
base64 = "0.22.1"
sha2 = "0.10.9"
urlencoding = "2.1.3"
[dependencies.sqlx] [dependencies.sqlx]
version = "0.8.6" version = "0.8.6"
@ -44,7 +50,7 @@ big_pid = []
v3-8-15 = ["rmc_struct_header"] v3-8-15 = ["rmc_struct_header"]
v4-3-11 = ["v3-8-15"] v4-3-11 = ["v3-8-15"]
nx = ["big_pid"] nx = ["big_pid"]
datastore = ["dep:sqlx", "v3-8-15", "dep:aws-sdk-s3", "dep:aws-config"]
[[bench]] [[bench]]
name = "rmc_serialization" name = "rmc_serialization"

View file

@ -9,6 +9,19 @@ async fn main() {
if #[cfg(feature = "friends")]{ if #[cfg(feature = "friends")]{
use rnex_core::executables::friends_backend::start_friends_backend; use rnex_core::executables::friends_backend::start_friends_backend;
start_friends_backend().await; start_friends_backend().await;
} else if #[cfg(feature = "datastore")] {
use rnex_core::executables::common::DB_POOL;
use sqlx::PgPool;
let database_url = std::env::var("RNEX_DATASTORE_DATABASE_URL")
.expect("RNEX_DATASTORE_DATABASE_URL must be set");
let pool = PgPool::connect(&database_url)
.await
.expect("Failed to create pool");
DB_POOL.set(pool).expect("failed to set global DB_POOL");
use rnex_core::executables::regular_backend;
regular_backend::start_regular_backend().await
} else { } else {
use rnex_core::executables::regular_backend; use rnex_core::executables::regular_backend;
regular_backend::start_regular_backend().await regular_backend::start_regular_backend().await

View file

@ -8,14 +8,44 @@ use std::io::Cursor;
use std::net::{Ipv4Addr, SocketAddrV4}; use std::net::{Ipv4Addr, SocketAddrV4};
use std::sync::Arc; use std::sync::Arc;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use std::sync::LazyLock;
use std::sync::OnceLock;
cfg_if! {
if #[cfg(feature = "datastore")] {
use sqlx::postgres::PgPool;
}
}
use log::error; use log::error;
use std::error::Error; use std::error::Error;
use std::string::ToString;
use cfg_if::cfg_if;
use crate::reggie::UnitPacketRead; use crate::reggie::UnitPacketRead;
const IP_REQ_SERVICE_URL: &str = "https://ipinfo.io/ip"; const IP_REQ_SERVICE_URL: &str = "https://ipinfo.io/ip";
cfg_if! {
if #[cfg(feature = "datastore")] {
pub static RNEX_DATASTORE_DATABASE_URL: LazyLock<String> = LazyLock::new(|| {
std::env::var("RNEX_DATASTORE_DATABASE_URL")
.expect("RNEX_DATASTORE_DATABASE_URL must be set")
});
pub static DB_POOL: OnceLock<PgPool> = OnceLock::new();
pub fn get_db() -> &'static PgPool {
DB_POOL.get().expect("db_pool not initialized")
}
pub static RNEX_DATASTORE_S3_ENDPOINT: LazyLock<String> = LazyLock::new(|| {
std::env::var("RNEX_DATASTORE_S3_ENDPOINT")
.expect("RNEX_DATASTORE_S3_ENDPOINT must be set")
});
pub static RNEX_DATASTORE_S3_BUCKET: LazyLock<String> = LazyLock::new(|| {
std::env::var("RNEX_DATASTORE_S3_BUCKET")
.expect("RNEX_DATASTORE_S3_BUCKET must be set")
});
}
}
pub fn try_get_ip() -> Result<Ipv4Addr, Box<dyn Error>> { pub fn try_get_ip() -> Result<Ipv4Addr, Box<dyn Error>> {
let mut req = ureq::get(IP_REQ_SERVICE_URL).call()?; let mut req = ureq::get(IP_REQ_SERVICE_URL).call()?;

View file

@ -51,6 +51,23 @@ pub fn derive_key(pid: PID, password: &[u8]) -> [u8; 16] {
pub struct KerberosDateTime(pub u64); pub struct KerberosDateTime(pub u64);
impl KerberosDateTime { impl KerberosDateTime {
pub fn from_u64(val: u64) -> Self {
Self(val)
}
pub fn from_naive(dt: chrono::NaiveDateTime) -> Self {
use chrono::Datelike;
use chrono::Timelike;
Self::new(
dt.second() as u64,
dt.minute() as u64,
dt.hour() as u64,
dt.day() as u64,
dt.month() as u64,
dt.year() as u64,
)
}
pub fn new(second: u64, minute: u64, hour: u64, day: u64, month: u64, year: u64) -> Self { pub fn new(second: u64, minute: u64, hour: u64, day: u64, month: u64, year: u64) -> Self {
Self(second | (minute << 6) | (hour << 12) | (day << 17) | (month << 22) | (year << 26)) Self(second | (minute << 6) | (hour << 12) | (day << 17) | (month << 22) | (year << 26))
} }

View file

@ -0,0 +1,785 @@
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(feature = "datastore")] {
use crate::define_rmc_proto;
use macros::rmc_struct;
use rnex_core::prudp::socket_addr::PRUDPSockAddr;
use std::sync::{Weak};
use chrono::Utc;
use sqlx::types::time;
use sqlx::types::time::PrimitiveDateTime;
use rnex_core::PID;
use rnex_core::nex::remote_console::RemoteConsole;
use rnex_core::nex::s3presigner::S3Presigner;
use rnex_core::rmc::response::ErrorCode;
use rnex_core::rmc::protocols::secure::{Secure, RawSecure, RawSecureInfo, RemoteSecure};
use rnex_core::rmc::protocols::datastore::{BufferQueueParam, CompletePostParam, DataStoreCustomRankingResult, DataStoreGetCustomRankingByDataIDParam, DataStorePrepareGetParam, DataStoreReqGetInfo, DataStoreSearchParam, GetMetaInfo, GetMetaParam, KeyValue, Permission, PersistenceTarget, RateCustomRankingParam, RatingInfo, RatingInfoWithSlot};
use rnex_core::rmc::protocols::datastore::{DataStore, RawDataStore, RawDataStoreInfo, RemoteDataStore, PreparePostParam, ReqPostInfo};
use crate::nex::user::User;
use rnex_core::executables::common::{RNEX_DATASTORE_S3_BUCKET, RNEX_DATASTORE_S3_ENDPOINT, get_db};
use rnex_core::rmc::structures::qbuffer::QBuffer;
use sqlx::types::chrono::DateTime;
use rnex_core::kerberos::KerberosDateTime;
use rnex_core::rmc::structures::qresult::QResult;
fn map_row_to_meta_info(
row_data_id: i64,
row_owner: i32,
row_size: i32,
row_name: String,
row_data_type: i16,
row_meta_binary: Vec<u8>,
row_permission: i16,
row_permission_recipients: Vec<i64>,
row_delete_permission: i16,
row_delete_permission_recipients: Vec<i64>,
row_period: i16,
row_refer_data_id: i64,
row_flag: i32,
row_tags: Vec<String>,
row_creation_date: chrono::NaiveDateTime,
row_update_date: chrono::NaiveDateTime,
ratings: Vec<RatingInfoWithSlot>
) -> GetMetaInfo {
GetMetaInfo {
dataid: row_data_id as u64,
owner: row_owner as u32,
size: row_size as u32,
name: row_name,
data_type: row_data_type as u16,
meta_binary: QBuffer(row_meta_binary),
permission: Permission {
permission: row_permission as u8,
recipient_ids: row_permission_recipients.into_iter().map(|id| id as u32).collect(),
},
del_permission: Permission {
permission: row_delete_permission as u8,
recipient_ids: row_delete_permission_recipients.into_iter().map(|id| id as u32).collect(),
},
period: row_period as u16,
status: 0,
referred_count: 0,
refer_dat_id: row_refer_data_id as u32,
flag: row_flag as u32,
tags: row_tags,
expire_time: KerberosDateTime::from_u64(0x9C3F3E0000),
created_time: KerberosDateTime::from_naive(row_creation_date),
updated_time: KerberosDateTime::from_naive(row_update_date),
referred_time: KerberosDateTime::from_naive(row_creation_date),
ratings,
}
}
async fn check_object_availability(data_id: u64, password: u64) -> Result<(), ErrorCode> {
let row = sqlx::query!(
r#"
SELECT under_review, access_password
FROM datastore.objects
WHERE data_id = $1 AND upload_completed = TRUE AND deleted = FALSE
"#,
data_id as i64
)
.fetch_optional(get_db())
.await
.map_err(|e| {
eprintln!("Availability check DB error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?
.ok_or(ErrorCode::DataStore_NotFound)?;
let access_password = row.access_password as u64;
if access_password != 0 && access_password != password {
return Err(ErrorCode::DataStore_InvalidPassword);
}
if row.under_review {
return Err(ErrorCode::DataStore_UnderReviewing);
}
Ok(())
}
async fn get_object_ratings(data_id: u64, password: u64) -> Result<Vec<RatingInfoWithSlot>, ErrorCode> {
check_object_availability(data_id, password).await?;
let rows = sqlx::query!(
r#"
SELECT slot, total_value, count, initial_value
FROM datastore.object_ratings
WHERE data_id = $1
"#,
data_id as i64
)
.fetch_all(get_db())
.await
.map_err(|e| {
eprintln!("Ratings fetch error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?;
let ratings = rows.into_iter().map(|row| {
RatingInfoWithSlot {
slot: row.slot as i8,
rating: RatingInfo {
total_value: row.total_value.unwrap_or(0),
count: row.count as u32,
initial_value: row.initial_value.unwrap_or(0),
},
}
}).collect();
Ok(ratings)
}
async fn get_object_info_by_data_id(data_id: u64, password: u64) -> Result<GetMetaInfo, ErrorCode> {
check_object_availability(data_id, password).await?;
let row = sqlx::query!(
r#"SELECT data_id, owner, size, name, data_type, meta_binary,
permission, permission_recipients, delete_permission, delete_permission_recipients,
period, refer_data_id, flag, tags, creation_date, update_date
FROM datastore.objects WHERE data_id = $1"#,
data_id as i64
)
.fetch_optional(get_db())
.await
.map_err(|_| ErrorCode::DataStore_SystemFileError)?
.ok_or(ErrorCode::DataStore_NotFound)?;
let ratings = get_object_ratings(data_id, password).await?;
Ok(map_row_to_meta_info(
row.data_id,
row.owner.unwrap_or(0),
row.size.unwrap_or(0),
row.name.unwrap_or_default(),
row.data_type.unwrap_or(0) as i16,
row.meta_binary.unwrap_or_default(),
row.permission.unwrap_or(0) as i16,
row.permission_recipients.unwrap_or_default().into_iter().map(|id| id as i64).collect(),
row.delete_permission.unwrap_or(0) as i16,
row.delete_permission_recipients.unwrap_or_default().into_iter().map(|id| id as i64).collect(),
row.period.unwrap_or(0) as i16,
row.refer_data_id.unwrap_or(0),
row.flag.unwrap_or(0),
row.tags.unwrap_or_default(),
row.creation_date.map(|dt| chrono::NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(dt.year(), dt.month() as u32, dt.day() as u32).unwrap(),
chrono::NaiveTime::from_hms_opt(dt.hour() as u32, dt.minute() as u32, dt.second() as u32).unwrap()
)).unwrap_or_default(),
row.update_date.map(|dt| chrono::NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(dt.year(), dt.month() as u32, dt.day() as u32).unwrap(),
chrono::NaiveTime::from_hms_opt(dt.hour() as u32, dt.minute() as u32, dt.second() as u32).unwrap()
)).unwrap_or_default(),
ratings
))
}
async fn get_object_info_by_persistence_target(target: PersistenceTarget, password: u64) -> Result<GetMetaInfo, ErrorCode> {
let row = sqlx::query!(
r#"SELECT data_id, owner, size, name, data_type, meta_binary,
permission, permission_recipients, delete_permission, delete_permission_recipients,
period, refer_data_id, flag, tags, creation_date, update_date,
access_password, under_review
FROM datastore.objects
WHERE owner = $1 AND persistence_slot_id = $2
AND upload_completed = TRUE AND deleted = FALSE"#,
target.owner as i32,
target.persistence_slot_id as i16
)
.fetch_optional(get_db())
.await
.map_err(|_| ErrorCode::DataStore_SystemFileError)?
.ok_or(ErrorCode::DataStore_NotFound)?;
let db_password = row.access_password as u64;
if db_password != 0 && db_password != password {
return Err(ErrorCode::DataStore_InvalidPassword);
}
if row.under_review {
return Err(ErrorCode::DataStore_UnderReviewing);
}
let ratings = get_object_ratings(row.data_id as u64, password).await?;
Ok(map_row_to_meta_info(
row.data_id,
row.owner.unwrap_or(0),
row.size.unwrap_or(0),
row.name.unwrap_or_default(),
row.data_type.unwrap_or(0) as i16,
row.meta_binary.unwrap_or_default(),
row.permission.unwrap_or(0) as i16,
row.permission_recipients.unwrap_or_default().into_iter().map(|id| id as i64).collect(),
row.delete_permission.unwrap_or(0) as i16,
row.delete_permission_recipients.unwrap_or_default().into_iter().map(|id| id as i64).collect(),
row.period.unwrap_or(0) as i16,
row.refer_data_id.unwrap_or(0),
row.flag.unwrap_or(0),
row.tags.unwrap_or_default(),
row.creation_date.map(|dt| chrono::NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(dt.year(), dt.month() as u32, dt.day() as u32).unwrap(),
chrono::NaiveTime::from_hms_opt(dt.hour() as u32, dt.minute() as u32, dt.second() as u32).unwrap()
)).unwrap_or_default(),
row.update_date.map(|dt| chrono::NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(dt.year(), dt.month() as u32, dt.day() as u32).unwrap(),
chrono::NaiveTime::from_hms_opt(dt.hour() as u32, dt.minute() as u32, dt.second() as u32).unwrap()
)).unwrap_or_default(),
ratings
))
}
async fn get_buffer_queues_by_data_id_and_slot(
data_id: u64,
slot: u32
) -> Result<Vec<QBuffer>, ErrorCode> {
check_object_availability(data_id, 0).await?;
let rows = sqlx::query!(
r#"
SELECT buffer
FROM datastore.buffer_queues
WHERE data_id = $1 AND slot = $2
ORDER BY creation_date ASC
"#,
data_id as i64,
slot as i32
)
.fetch_all(get_db())
.await
.map_err(|e| {
log::error!("Buffer queue fetch error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?;
let buffer_queues = rows
.into_iter()
.map(|row| QBuffer(row.buffer))
.collect();
Ok(buffer_queues)
}
fn verify_object_permission(
owner_id: u32,
viewer_id: u32,
permission: &Permission,
) -> Result<(), ErrorCode> {
if owner_id == viewer_id {
return Ok(());
}
match permission.permission {
0 => Ok(()), // All can read
1 => Err(ErrorCode::DataStore_PermissionDenied), // Friends only, unimplemented
2 => {
// Recipient IDs can read
if permission.recipient_ids.contains(&viewer_id) {
Ok(())
} else {
Err(ErrorCode::DataStore_PermissionDenied)
}
}
3 => Err(ErrorCode::DataStore_PermissionDenied), // Owner only, redundant
_ => Err(ErrorCode::DataStore_InvalidArgument), // ??? haxx0r
}
}
fn filter_properties_by_result_option(
meta_info: &mut GetMetaInfo,
result_option: u8,
) {
if (result_option & 0x01) == 0 {
meta_info.meta_binary = QBuffer(Vec::new());
}
if (result_option & 0x04) == 0 {
meta_info.ratings = Vec::new();
}
// No idea what the other things do. :shrug:
}
// Dawg...
async fn get_custom_rankings_by_data_ids(
application_id: u32,
data_ids: Vec<u64>
) -> Vec<DataStoreCustomRankingResult> {
let mut results = Vec::with_capacity(data_ids.len());
let rows = sqlx::query!(
r#"
SELECT
rankings.data_id,
rankings.value
FROM datastore.object_custom_rankings rankings
JOIN UNNEST($1::bigint[]) WITH ORDINALITY AS rows(data_id, ord)
ON rankings.data_id = rows.data_id
AND rankings.application_id = $2
ORDER BY rows.ord
"#,
&data_ids.iter().map(|&id| id as i64).collect::<Vec<i64>>(),
application_id as i32
)
.fetch_all(get_db())
.await;
let rows = match rows {
Ok(r) => r,
Err(e) => {
log::error!("Custom ranking query error: {:?}", e);
return results;
}
};
for row in rows {
let data_id = row.data_id as u64;
let score = row.value.unwrap_or(0) as u32;
if let Ok(meta) = get_object_info_by_data_id(data_id, 0).await {
results.push(DataStoreCustomRankingResult {
order: 0,
score,
meta_info: meta,
});
} else {
log::warn!("Could not find metadata for ranked object {}", data_id);
}
}
results
}
async fn get_user_course_object_ids(owner_pid: u32) -> Result<Vec<u64>, ErrorCode> {
let rows = sqlx::query!(
r#"
SELECT data_id
FROM datastore.objects
WHERE owner = $1 AND data_type > 2 AND data_type < 50
"#,
owner_pid as i64
)
.fetch_all(get_db())
.await
.map_err(|e| {
log::error!("error fetching course IDs for PID {}: {:?}", owner_pid, e);
ErrorCode::DataStore_SystemFileError
})?;
let mut valid_ids = Vec::new();
for row in rows {
let data_id = row.data_id as u64;
// always check avail
if check_object_availability(data_id, 0).await.is_ok() {
valid_ids.push(data_id);
}
}
Ok(valid_ids)
}
fn get_blacklist_1() -> Vec<String> {
vec![
"けされ", "消され", "削除され", "リセットされ", "BANされ", "BANされ",
"キミのコース", "君のコース", "きみのコース", "い い ね", "遊びます", "地震",
"震災", "被災", "津波", "バンされ", "い~ね", "震度", "じしん", "banされ",
"くわしくは", "詳しくは", "ちんちん", "ち0こ", "bicth", "い.い.ね",
"ナイ~ス", "い&い", "い-いね", "いぃね", "nigger", "ngger", "star if u",
"Star if u", "Star if you", "star if you", "PENlS", "マンコ", "butthole",
"LILI", "vagina", "vagyna", "うんち", "うんこ", "ウンコ", "",
"EENE", "まんこ", "ウンチ", "niglet", "nigglet", "please like", "きんたま",
"Butthole", "llね", "iいね", "give a star", "ちんぽ", "亀頭", "penis",
"ウンコ", "plz more stars", "star plz", "い()ね", "PLEASE star", "Bitte Sterne",
].into_iter().map(String::from).collect()
}
fn get_blacklist_2() -> Vec<String> {
vec![
"ゼロから", "0から", "0から", "い  い  ね", "いい", "東日本", "大震",
].into_iter().map(String::from).collect()
}
fn get_blacklist_3() -> Vec<String> {
vec![
"いいね", "下さい", "ください", "押して", "おして", "返す", "かえす",
"", "してくれ", "するよ", "☆くれたら", "☆あげます", "★くれたら",
"★あげます", "しね", "ころす", "ころされた", "アナル", "ファック",
"キンタマ", "○ね", "キチガイ", "うんこ", "KITIGAI", "金玉", "おっぱい",
"☆おす", "☆押す", "★おす", "★押す", "いいする", "いいよ", "イイネ",
"ケツ", "うんち", "かくせいざい", "覚せい剤", "シャブ", "きんたま",
"ちんちん", "おしっこ", "ちんぽこ", "ころして", "グッド", "グット",
"レ●プ", "バーカ", "きちがい", "ちんげ", "マンコ", "まんこ", "チンポ",
"クズ", "ウンコ", "ナイスおねがいします", "penis", "イイね", "☆よろ",
"ナイス!して", "ま/んこ", "まん/こ",
].into_iter().map(String::from).collect()
}
impl DataStore for User {
async fn get_meta(&self, mut metaparam: GetMetaParam) -> Result<GetMetaInfo, ErrorCode> {
let mut meta_info = if metaparam.dataid != 0 {
get_object_info_by_data_id(metaparam.dataid, metaparam.access_password).await?
} else {
get_object_info_by_persistence_target(metaparam.persistence_target, metaparam.access_password).await?
};
let current_pid = self.pid;
verify_object_permission(meta_info.owner, current_pid, &meta_info.permission)?;
filter_properties_by_result_option(&mut meta_info, metaparam.result_option);
Ok(meta_info)
}
async fn prepare_post_object(&self, postparam: PreparePostParam) -> Result<ReqPostInfo, ErrorCode> {
let recipient_ids: Vec<i32> = postparam.permission.recipient_ids.iter().map(|&id| id as i32).collect();
let del_recipient_ids: Vec<i32> = postparam.del_permission.recipient_ids.iter().map(|&id| id as i32).collect();
let now = time::OffsetDateTime::now_utc();
let row = sqlx::query!(
r#"
INSERT INTO datastore.objects (
owner, size, name, data_type, meta_binary,
permission, permission_recipients,
delete_permission, delete_permission_recipients,
flag, period, refer_data_id, tags,
persistence_slot_id, extra_data, creation_date, update_date
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17
) RETURNING data_id
"#,
self.pid as i32,
postparam.size as i32,
postparam.name,
postparam.data_type as i32,
&postparam.meta_binary.0,
postparam.permission.permission as i32,
&recipient_ids,
postparam.del_permission.permission as i32,
&del_recipient_ids,
postparam.flag as i32,
postparam.period as i32,
postparam.refer_data_id as i64,
&postparam.tags,
postparam.persistence_init_param.persistence_slot_id as i32,
&postparam.extra_data,
time::PrimitiveDateTime::new(now.date(), now.time()),
time::PrimitiveDateTime::new(now.date(), now.time())
)
.fetch_one(get_db())
.await
.map_err(|e| {
log::error!("DB Error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?;
let data_id = row.data_id as u64;
let presigner = S3Presigner::new(
&format!("https://{}", *RNEX_DATASTORE_S3_ENDPOINT),
format!("{}", *RNEX_DATASTORE_S3_BUCKET)
).await;
let key = format!("data/{}.bin", data_id);
let (upload_url, fields) = presigner.generate_presigned_post(&key).await;
let form_fields = fields.into_iter().map(|(k, v)| {
KeyValue { key: k, value: v }
}).collect();
Ok(ReqPostInfo {
dataid: data_id,
url: upload_url,
request_headers: vec![],
form_fields,
root_ca_cert: vec![],
})
}
async fn complete_post_object(&self, completeparam: CompletePostParam) -> Result<(), ErrorCode> {
log::info!("Data ID: {:?}", completeparam.dataid);
log::info!("Success: {:?}", completeparam.success);
let record = sqlx::query!(
r#"SELECT owner, under_review FROM datastore.objects WHERE data_id = $1"#,
completeparam.dataid as i64
)
.fetch_optional(get_db())
.await
.map_err(|e| {
eprintln!("select error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?;
let record = record.ok_or(ErrorCode::DataStore_NotFound)?;
if record.under_review {
return Err(ErrorCode::DataStore_UnderReviewing);
}
if record.owner.unwrap_or(0) as u32 != self.pid {
return Err(ErrorCode::DataStore_PermissionDenied);
}
if completeparam.success {
sqlx::query!(
r#"UPDATE datastore.objects SET upload_completed = true WHERE data_id = $1"#,
completeparam.dataid as i64
)
.execute(get_db())
.await
.map_err(|e| {
eprintln!("update error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?;
} else {
return Err(ErrorCode::Transport_TemporaryServerError);
}
Ok(())
}
async fn rate_custom_ranking(&self, rankingparam: Vec<RateCustomRankingParam>) -> Result<(), ErrorCode> {
for abcparam in rankingparam {
let exists = sqlx::query_scalar!(
r#"SELECT EXISTS(SELECT 1 FROM datastore.objects WHERE data_id = $1)"#,
abcparam.dataid as i64
)
.fetch_one(get_db())
.await
.map_err(|_| ErrorCode::DataStore_SystemFileError)?;
if !exists.unwrap_or(false) {
return Err(ErrorCode::DataStore_NotFound);
}
sqlx::query!(
r#"
INSERT INTO datastore.object_custom_rankings (data_id, application_id, value)
VALUES ($1, $2, $3)
ON CONFLICT (data_id, application_id)
DO UPDATE SET value = datastore.object_custom_rankings.value + EXCLUDED.value
"#,
abcparam.dataid as i64,
abcparam.appid as i32,
abcparam.score as i32
)
.execute(get_db())
.await
.map_err(|e| {
log::error!("update/insert error: {:?}", e);
ErrorCode::DataStore_SystemFileError
})?;
}
Ok(())
}
async fn get_application_config(&self, appid: u32) -> Result<Vec<i32>, ErrorCode> {
const MAX_COURSE_UPLOADS: i32 = 100;
let config = match appid {
0 => vec![
0x00000001, 0x00000032, 0x00000096, 0x0000012c, 0x000001f4,
0x00000320, 0x00000514, 0x000007d0, 0x00000bb8, 0x00001388,
MAX_COURSE_UPLOADS, 0x00000014, 0x0000001e, 0x00000028, 0x00000032,
0x0000003c, 0x00000046, 0x00000050, 0x0000005a, 0x00000064,
0x00000023, 0x0000004b, 0x00000023, 0x0000004b, 0x00000032,
0x00000000, 0x00000003, 0x00000003, 0x00000064, 0x00000006,
0x00000001, 0x00000060, 0x00000005, 0x00000060, 0x00000000,
0x000007e4, 0x00000001, 0x00000001, 0x0000000c, 0x00000000,
],
1 => vec![
2,
1770179696,
1770179664,
1770179640,
1770180827,
1770180777,
1770180745,
1770177625,
1770177590,
],
2 => vec![0x000007df, 0x0000000c, 0x00000016, 0x00000005, 0x00000000],
10 => vec![35, 75, 96, 40, 5, 6],
_ => {
log::error!("unknown SMM app id: {}", appid);
return Err(ErrorCode::DataStore_Unknown);
}
};
Ok(config)
}
async fn get_custom_ranking_by_data_id(
&self,
custom_ranking_param: DataStoreGetCustomRankingByDataIDParam
) -> Result<(Vec<DataStoreCustomRankingResult>, Vec<QResult>), ErrorCode> {
println!("appid: {:?}", custom_ranking_param.application_id);
println!("dataid list: {:?}", custom_ranking_param.data_id_list);
println!("result option: {:?}", custom_ranking_param.result_option);
let mut ranking_results = get_custom_rankings_by_data_ids(custom_ranking_param.application_id, custom_ranking_param.data_id_list).await;
let mut q_results = Vec::with_capacity(ranking_results.len());
for result in &mut ranking_results {
if (custom_ranking_param.result_option & 0x01) == 0 {
result.meta_info.tags = Vec::new();
}
if (custom_ranking_param.result_option & 0x02) == 0 {
result.meta_info.ratings = Vec::new();
}
if (custom_ranking_param.result_option & 0x04) == 0 {
result.meta_info.meta_binary = QBuffer(Vec::new());
}
if (custom_ranking_param.result_option & 0x20) == 0 {
result.score = 0;
}
q_results.push(QResult::success(ErrorCode::Core_Unknown));
}
Ok((ranking_results, q_results))
}
async fn get_buffer_queue(&self, bufferparam: BufferQueueParam) -> Result<Vec<QBuffer>, ErrorCode> {
// log::info!("GetBufferQueue: dataid={}, slot={}", param.dataid, param.slot);
let buffers = get_buffer_queues_by_data_id_and_slot(bufferparam.dataid, bufferparam.slot).await?;
Ok(buffers)
}
async fn prepare_get_object(&self, prepare_get_param: DataStorePrepareGetParam) -> Result<DataStoreReqGetInfo, ErrorCode> {
let meta_info = if prepare_get_param.dataid != 0 {
get_object_info_by_data_id(prepare_get_param.dataid, prepare_get_param.access_password).await?
} else {
get_object_info_by_persistence_target(prepare_get_param.persistence_target, prepare_get_param.access_password).await?
};
verify_object_permission(meta_info.owner, self.pid, &meta_info.permission)?;
let presigner = S3Presigner::new(
&format!("https://{}", *RNEX_DATASTORE_S3_ENDPOINT),
format!("{}", *RNEX_DATASTORE_S3_BUCKET)
).await;
let key = format!("data/{}.bin", meta_info.dataid);
let download_url = presigner.generate_presigned_get(&key);
Ok(DataStoreReqGetInfo {
url: download_url,
request_headers: vec![],
size: meta_info.size,
root_ca_cert: vec![],
dataid: meta_info.dataid,
})
}
async fn followings_latest_course_search_object(
&self,
course_search_param: DataStoreSearchParam,
_extra_data: Vec<String>
) -> Result<Vec<DataStoreCustomRankingResult>, ErrorCode> {
let mut all_results = Vec::new();
for &owner_pid in &course_search_param.owner_ids {
let course_ids = get_user_course_object_ids(owner_pid).await?;
if course_ids.is_empty() {
continue;
}
let mut results = get_custom_rankings_by_data_ids(0, course_ids).await;
// Flag 0x1: Return Tags
// Flag 0x2: Return Ratings
// Flag 0x4: Return MetaBinary
// Flag 0x20: Return Score
for res in &mut results {
if course_search_param.result_option & 0x1 == 0 {
res.meta_info.tags = Vec::new();
}
if course_search_param.result_option & 0x2 == 0 {
res.meta_info.ratings = Vec::new();
}
if course_search_param.result_option & 0x4 == 0 {
res.meta_info.meta_binary = rnex_core::rmc::structures::qbuffer::QBuffer(Vec::new());
}
if course_search_param.result_option & 0x20 == 0 {
res.score = 0;
}
}
all_results.extend(results);
}
// note: we assume the client sorts the data lol
Ok(all_results)
}
async fn get_application_config_string(&self, application_id: u32) -> Result<Vec<String>, ErrorCode> {
let config = match application_id {
128 => get_blacklist_1(),
129 => get_blacklist_2(),
130 => get_blacklist_3(),
_ => {
log::warn!("unsupported application_id in GetApplicationConfigString: {}", application_id);
Vec::new()
}
};
Ok(config)
}
async fn get_metas_multiple_param(
&self,
params: Vec<GetMetaParam>
) -> Result<(Vec<GetMetaInfo>, Vec<QResult>), ErrorCode> {
let mut metas = Vec::with_capacity(params.len());
let mut results = Vec::with_capacity(params.len());
for param in params {
let info_result = if param.dataid != 0 {
get_object_info_by_data_id(param.dataid, param.access_password).await
} else {
get_object_info_by_persistence_target(param.persistence_target, param.access_password).await
};
match info_result {
Ok(mut meta) => {
if let Err(e) = verify_object_permission(meta.owner, self.pid, &meta.permission) {
metas.push(GetMetaInfo::default());
results.push(QResult::error(e));
} else {
if param.result_option & 0x1 == 0 {
meta.tags = Vec::new();
}
if param.result_option & 0x2 == 0 {
meta.ratings = Vec::new();
}
if param.result_option & 0x4 == 0 {
meta.meta_binary = rnex_core::rmc::structures::qbuffer::QBuffer(Vec::new());
}
metas.push(meta);
results.push(QResult::success(ErrorCode::Core_Unknown));
}
}
Err(e) => {
metas.push(GetMetaInfo::default());
results.push(QResult::error(e));
}
}
}
Ok((metas, results))
}
}
}
}

View file

@ -13,3 +13,10 @@ cfg_if! {
pub mod user; pub mod user;
} }
} }
cfg_if! {
if #[cfg(feature = "datastore")] {
pub mod s3presigner;
pub mod datastore;
}
}

View file

@ -0,0 +1,117 @@
use aws_sdk_s3::presigning::PresigningConfig;
use base64::{engine::general_purpose, Engine as _};
use hmac::{Hmac, Mac};
use sha2::{Sha256, Digest};
use chrono::{Utc, Duration};
use serde_json::json;
use rnex_core::executables::common::RNEX_DATASTORE_S3_ENDPOINT;
pub struct S3Presigner {
endpoint: String,
bucket: String,
}
impl S3Presigner {
pub async fn new(endpoint: &str, bucket: String) -> Self {
Self {
endpoint: endpoint.trim_end_matches('/').to_string(),
bucket,
}
}
pub async fn generate_presigned_post(&self, key: &str) -> (String, Vec<(String, String)>) {
let access_key = std::env::var("AWS_ACCESS_KEY_ID").expect("Missing Access Key");
let secret_key = std::env::var("AWS_SECRET_ACCESS_KEY").expect("Missing Secret Key");
let region = "us-east-1"; // hardcoded because its the default region for most s3 clones
let date_short = Utc::now().format("%Y%m%d").to_string();
let date_full = Utc::now().format("%Y%m%dT%H%M%SZ").to_string();
let expiration = (Utc::now() + Duration::minutes(15)).format("%Y-%m-%dT%H:%M:%SZ").to_string();
let credential = format!("{}/{}/{}/s3/aws4_request", access_key, date_short, region);
let policy_json = json!({
"expiration": expiration,
"conditions": [
{"bucket": self.bucket},
["starts-with", "$key", key],
{"x-amz-credential": credential},
{"x-amz-algorithm": "AWS4-HMAC-SHA256"},
{"x-amz-date": date_full}
]
});
let policy_base64 = general_purpose::STANDARD.encode(policy_json.to_string());
let signature = self.calculate_signature(&secret_key, &date_short, region, &policy_base64);
let mut fields = vec![
("key".to_string(), key.to_string()),
("X-Amz-Algorithm".to_string(), "AWS4-HMAC-SHA256".to_string()),
("X-Amz-Credential".to_string(), credential),
("X-Amz-Date".to_string(), date_full),
("Policy".to_string(), policy_base64),
("X-Amz-Signature".to_string(), signature),
];
let url = format!("https://{}/{}", *RNEX_DATASTORE_S3_ENDPOINT, self.bucket);
(url, fields)
}
pub fn generate_presigned_get(&self, key: &str) -> String {
let access_key = std::env::var("AWS_ACCESS_KEY_ID").expect("Missing Access Key");
let secret_key = std::env::var("AWS_SECRET_ACCESS_KEY").expect("Missing Secret Key");
let region = "us-east-1";
let date_short = Utc::now().format("%Y%m%d").to_string();
let date_full = Utc::now().format("%Y%m%dT%H%M%SZ").to_string();
let credential_scope = format!("{}/{}/s3/aws4_request", date_short, region);
let query_string = format!(
"X-Amz-Algorithm=AWS4-HMAC-SHA256&\
X-Amz-Credential={}%2F{}&\
X-Amz-Date={}&\
X-Amz-Expires=900&\
X-Amz-SignedHeaders=host",
access_key,
urlencoding::encode(&credential_scope),
date_full
);
let canonical_request = format!(
"GET\n/{}/{}\n{}\nhost:{}\n\nhost\nUNSIGNED-PAYLOAD",
self.bucket, key, query_string, *RNEX_DATASTORE_S3_ENDPOINT
);
let hashed_request = hex::encode(Sha256::digest(canonical_request.as_bytes()));
let string_to_sign = format!(
"AWS4-HMAC-SHA256\n{}\n{}\n{}",
date_full, credential_scope, hashed_request
);
let k_date = self.hmac_sha256(format!("AWS4{}", secret_key).as_bytes(), &date_short);
let k_region = self.hmac_sha256(&k_date, region);
let k_service = self.hmac_sha256(&k_region, "s3");
let k_signing = self.hmac_sha256(&k_service, "aws4_request");
let signature = hex::encode(self.hmac_sha256(&k_signing, &string_to_sign));
format!(
"https://{}/{}/{}?{}&X-Amz-Signature={}",
*RNEX_DATASTORE_S3_ENDPOINT, self.bucket, key, query_string, signature
)
}
fn calculate_signature(&self, secret: &str, date: &str, region: &str, policy: &str) -> String {
let k_date = self.hmac_sha256(format!("AWS4{}", secret).as_bytes(), date);
let k_region = self.hmac_sha256(&k_date, region);
let k_service = self.hmac_sha256(&k_region, "s3");
let k_signing = self.hmac_sha256(&k_service, "aws4_request");
hex::encode(self.hmac_sha256(&k_signing, policy))
}
fn hmac_sha256(&self, key: &[u8], data: &str) -> Vec<u8> {
let mut mac = Hmac::<Sha256>::new_from_slice(key).expect("HMAC can take key of any size");
mac.update(data.as_bytes());
mac.finalize().into_bytes().to_vec()
}
}

View file

@ -23,6 +23,7 @@ use rnex_core::rmc::protocols::matchmake_extension::{
}; };
use rnex_core::rmc::protocols::ranking::{Ranking, RawRanking, RawRankingInfo, RemoteRanking}; use rnex_core::rmc::protocols::ranking::{Ranking, RawRanking, RawRankingInfo, RemoteRanking};
use rnex_core::rmc::protocols::secure::{RawSecure, RawSecureInfo, RemoteSecure, Secure}; use rnex_core::rmc::protocols::secure::{RawSecure, RawSecureInfo, RemoteSecure, Secure};
use rnex_core::rmc::protocols::datastore::{DataStore, RawDataStore, RawDataStoreInfo, RemoteDataStore};
use rnex_core::rmc::response::ErrorCode; use rnex_core::rmc::response::ErrorCode;
use rnex_core::rmc::structures::any::Any; use rnex_core::rmc::structures::any::Any;
use rnex_core::rmc::structures::matchmake::{ use rnex_core::rmc::structures::matchmake::{
@ -44,18 +45,37 @@ use rnex_core::rmc::structures::qbuffer::QBuffer;
use rnex_core::rmc::structures::qresult::QResult; use rnex_core::rmc::structures::qresult::QResult;
use rnex_core::rmc::structures::ranking::UploadCompetitionData; use rnex_core::rmc::structures::ranking::UploadCompetitionData;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use cfg_if::cfg_if;
use rnex_core::rmc::protocols::ranking::{CompetitionRankingScoreData, CompetitionRankingGetParam, CompetitionRankingScoreInfo};
use rnex_core::rmc::structures::ranking::{UploadCompetitionData};
use tokio::sync::{Mutex, RwLock}; use tokio::sync::{Mutex, RwLock};
define_rmc_proto!( cfg_if! {
proto UserProtocol{ if #[cfg(feature = "datastore")] {
Secure, define_rmc_proto!(
MatchmakeExtension, proto UserProtocol{
MatchmakeExt, Secure,
Matchmake, MatchmakeExtension,
NatTraversal, MatchmakeExt,
Ranking Matchmake,
NatTraversal,
Ranking,
DataStore
}
);
} else {
define_rmc_proto!(
proto UserProtocol{
Secure,
MatchmakeExtension,
MatchmakeExt,
Matchmake,
NatTraversal,
Ranking
}
);
} }
); }
#[rmc_struct(UserProtocol)] #[rmc_struct(UserProtocol)]
pub struct User { pub struct User {

View file

@ -0,0 +1,240 @@
use macros::{method_id, rmc_proto, RmcSerialize, rmc_struct};
use rnex_core::rmc::structures::qbuffer::QBuffer;
use rnex_core::rmc::response::ErrorCode;
use rnex_core::rmc::structures::qresult::QResult;
use rnex_core::kerberos::KerberosDateTime;
use rnex_core::PID;
use rnex_core::rmc::structures::resultsrange::ResultsRange;
#[derive(RmcSerialize, Clone, Debug, Default)]
#[rmc_struct(0)]
pub struct PersistenceTarget {
pub owner: PID,
pub persistence_slot_id: u16,
}
#[derive(RmcSerialize, Clone, Debug, Default)]
#[rmc_struct(0)]
pub struct Permission {
pub permission: u8,
pub recipient_ids: Vec<PID>,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct RatingInfoWithSlot {
pub slot: i8,
pub rating: RatingInfo,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct RatingInfo {
pub total_value: i64,
pub count: u32,
pub initial_value: i64,
}
#[derive(RmcSerialize, Clone, Default)]
#[rmc_struct(0)]
pub struct GetMetaParam {
pub dataid: u64,
pub persistence_target: PersistenceTarget,
pub result_option: u8,
pub access_password: u64,
}
#[derive(RmcSerialize, Clone, Default)]
#[rmc_struct(0)]
pub struct GetMetaInfo {
pub dataid: u64,
pub owner: PID,
pub size: u32,
pub name: String,
pub data_type: u16,
pub meta_binary: QBuffer,
pub permission: Permission,
pub del_permission: Permission,
pub created_time: KerberosDateTime,
pub updated_time: KerberosDateTime,
pub period: u16,
pub status: u8,
pub referred_count: u32,
pub refer_dat_id: u32,
pub flag: u32,
pub referred_time: KerberosDateTime,
pub expire_time: KerberosDateTime,
pub tags: Vec<String>,
pub ratings: Vec<RatingInfoWithSlot>,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct RatingInitParam {
pub flag: u8,
pub internal_flag: u8,
pub lock_type: u8,
pub intial_valie: i64,
pub range_min: i32,
pub range_max: i32,
pub period_hour: i8,
pub period_duration: i16
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct RatingInitParamWithSlot {
pub slot: i8,
pub param: RatingInitParam,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct PersistenceInitParam {
pub persistence_slot_id: u16,
pub delete_last_object: bool,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct KeyValue {
pub key: String,
pub value: String,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct PreparePostParam {
pub size: u32,
pub name: String,
pub data_type: u16,
pub meta_binary: QBuffer,
pub permission: Permission,
pub del_permission: Permission,
pub flag: u32,
pub period: u16,
pub refer_data_id: u32,
pub tags: Vec<String>,
pub rating_init_params: Vec<RatingInitParamWithSlot>,
pub persistence_init_param: PersistenceInitParam,
pub extra_data: Vec<String>,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct ReqPostInfo {
pub dataid: u64,
pub url: String,
pub request_headers: Vec<KeyValue>,
pub form_fields: Vec<KeyValue>,
pub root_ca_cert: Vec<u8>,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct CompletePostParam {
pub dataid: u64,
pub success: bool,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct RateCustomRankingParam {
pub dataid: u64,
pub appid: u32,
pub score: u32,
pub period: u16,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct BufferQueueParam {
pub dataid: u64,
pub slot: u32,
}
// I just realized I forgot to add "DataStore" in front of the structs. I can't be assed to change it, sucks to be you lol.
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct DataStoreGetCustomRankingByDataIDParam {
pub application_id: u32,
pub data_id_list: Vec<u64>,
pub result_option: u8,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct DataStoreCustomRankingResult {
pub order: u32,
pub score: u32,
pub meta_info: GetMetaInfo,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct DataStorePrepareGetParam {
pub dataid: u64,
pub lockid: u32,
pub persistence_target: PersistenceTarget,
pub access_password: u64,
pub extra_data: Vec<String>,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(0)]
pub struct DataStoreReqGetInfo {
pub url: String,
pub request_headers: Vec<KeyValue>,
pub size: u32,
pub root_ca_cert: Vec<u8>,
pub dataid: u64,
}
#[derive(RmcSerialize, Clone)]
#[rmc_struct(1)]
pub struct DataStoreSearchParam {
pub search_target: u8,
pub owner_ids: Vec<PID>,
pub owner_type: u8,
pub destination_ids: Vec<u64>,
pub data_type: u16,
pub created_after: KerberosDateTime,
pub created_before: KerberosDateTime,
pub updated_after: KerberosDateTime,
pub updated_before: KerberosDateTime,
pub refer_dat_id: u32,
pub tags: Vec<String>,
pub result_order_column: u8,
pub result_order: u8,
pub result_range: ResultsRange,
pub result_option: u8,
pub minimal_rating_frequency: u32,
pub use_cache: bool,
}
#[rmc_proto(115)]
pub trait DataStore{
#[method_id(8)]
async fn get_meta(&self, metaparam: GetMetaParam) -> Result<GetMetaInfo, ErrorCode>;
#[method_id(36)]
async fn get_metas_multiple_param(&self, params: Vec<GetMetaParam>) -> Result<(Vec<GetMetaInfo>, Vec<QResult>), ErrorCode>;
#[method_id(24)]
async fn prepare_post_object(&self, postparam: PreparePostParam) -> Result<ReqPostInfo, ErrorCode>;
#[method_id(26)]
async fn complete_post_object(&self, completeparam: CompletePostParam) -> Result<(), ErrorCode>;
#[method_id(48)]
async fn rate_custom_ranking(&self, rankingparam: Vec<RateCustomRankingParam>) -> Result<(), ErrorCode>;
#[method_id(61)]
async fn get_application_config(&self, appid: u32) -> Result<Vec<i32>, ErrorCode>;
#[method_id(50)]
async fn get_custom_ranking_by_data_id(&self, custom_ranking_param: DataStoreGetCustomRankingByDataIDParam) -> Result<(Vec<DataStoreCustomRankingResult>, Vec<QResult>), ErrorCode>;
#[method_id(54)]
async fn get_buffer_queue(&self, bufferparam: BufferQueueParam) -> Result<Vec<QBuffer>, ErrorCode>;
#[method_id(25)]
async fn prepare_get_object(&self, prepare_get_param: DataStorePrepareGetParam) -> Result<DataStoreReqGetInfo, ErrorCode>;
#[method_id(65)]
async fn followings_latest_course_search_object(&self, course_search_param: DataStoreSearchParam, extra_data: Vec<String>) -> Result<Vec<DataStoreCustomRankingResult>, ErrorCode>;
#[method_id(74)]
async fn get_application_config_string(&self, application_id: u32) -> Result<Vec<String>, ErrorCode>;
}

View file

@ -11,6 +11,7 @@ pub mod nintendo_notification;
pub mod notifications; pub mod notifications;
pub mod ranking; pub mod ranking;
pub mod secure; pub mod secure;
pub mod datastore;
use crate::result::ResultExtension; use crate::result::ResultExtension;
use crate::rmc::message::RMCMessage; use crate::rmc::message::RMCMessage;

View file

@ -2,17 +2,10 @@ use macros::{RmcSerialize, method_id, rmc_proto};
use rnex_core::kerberos::KerberosDateTime; use rnex_core::kerberos::KerberosDateTime;
use rnex_core::rmc::structures::qbuffer::QBuffer; use rnex_core::rmc::structures::qbuffer::QBuffer;
use rnex_core::rmc::structures::resultsrange::ResultsRange;
use rnex_core::rmc::response::ErrorCode; use rnex_core::rmc::response::ErrorCode;
use rnex_core::rmc::structures::ranking::UploadCompetitionData; use rnex_core::rmc::structures::ranking::UploadCompetitionData;
#[derive(RmcSerialize, Debug, Default, Clone)]
#[rmc_struct(0)]
pub struct ResultsRange {
pub offset: u32,
pub size: u32,
}
#[derive(RmcSerialize, Debug, Default, Clone)] #[derive(RmcSerialize, Debug, Default, Clone)]
#[rmc_struct(1)] #[rmc_struct(1)]
pub struct CompetitionRankingGetParam { pub struct CompetitionRankingGetParam {

View file

@ -38,6 +38,7 @@ pub mod ranking;
pub mod rmc_struct; pub mod rmc_struct;
pub mod string; pub mod string;
pub mod variant; pub mod variant;
pub mod resultsrange;
pub trait RmcSerialize { pub trait RmcSerialize {
fn serialize(&self, writer: &mut impl Write) -> Result<()>; fn serialize(&self, writer: &mut impl Write) -> Result<()>;

View file

@ -3,7 +3,7 @@ use bytemuck::bytes_of;
use v_byte_helpers::{IS_BIG_ENDIAN, ReadExtensions}; use v_byte_helpers::{IS_BIG_ENDIAN, ReadExtensions};
use crate::rmc::structures::{Result, RmcSerialize}; use crate::rmc::structures::{Result, RmcSerialize};
#[derive(Clone, Debug)] #[derive(Clone, Debug, Default)]
pub struct QBuffer(pub Vec<u8>); pub struct QBuffer(pub Vec<u8>);
impl RmcSerialize for QBuffer{ impl RmcSerialize for QBuffer{

View file

@ -0,0 +1,8 @@
use macros::RmcSerialize;
#[derive(RmcSerialize, Debug, Default, Clone)]
#[rmc_struct(0)]
pub struct ResultsRange{
pub offset: u32,
pub size: u32
}