Skip to content

Commit

Permalink
fix: experimental patch that enforces IPv4 (#16)
Browse files Browse the repository at this point in the history
* Experimental build that removes IPV6 support.

This moves the core HTTP client into an external crate and implements a
custom resolver to attempt to limit connections to IPV4 only.

The reasoning for the external crate: it simplifies debugging this and
enables swapping out the underlying HTTP client adapter for any further
debugging. I wouldn't necessarily consider this design complete and/or
necessary, but this bug is tricky to reproduce and I'm not jumping over
a bunch of files hunting down imports.

The reasoning for the bug: GCP appears to have some oddities with
regards to IPV6 support, and if a device is configured to be
IPV6-capable and a user does not have a DNS cache entry for the API,
they may run into a randomly broken connection. I believe we never saw
this with the curl connections that were in use on the C++ side as that
implements a happy-eyeballs flow that attempts to find the "best" route
and follow it accordingly. This patch just attempts to restrict things
to ipv4 which we know works.

This patch is subject to further iterations as it may require vending a
few debug builds to users to see if it works out.
  • Loading branch information
ryanmcgrath authored Feb 7, 2024
1 parent 6d8737c commit 1e9fbf1
Show file tree
Hide file tree
Showing 14 changed files with 187 additions and 68 deletions.
16 changes: 12 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ members = [
"ffi",
"game-reporter",
"jukebox",
"slippi-gg-api",
"user",
]

Expand All @@ -32,7 +33,7 @@ serde_repr = { version = "0.1" }
# in extra dependencies.
tracing = { version = "0.1", default-features = false, features = ["std"] }

ureq = { version = "2.7.1", features = ["json"] }
ureq = { version = "2.9.1", features = ["json"] }

[patch.crates-io]
# We need to patch this dependency to fix a bug in Windows where a crash can occur
Expand Down
1 change: 1 addition & 0 deletions dolphin/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ publish = false
default = []
ishiiruka = []
mainline = []
playback = []

[dependencies]
time = { workspace = true }
Expand Down
13 changes: 10 additions & 3 deletions exi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,20 @@ publish = false

[features]
default = []
ishiiruka = []
mainline = []
ishiiruka = [
"slippi-gg-api/ishiiruka"
]
mainline = [
"slippi-gg-api/mainline"
]
playback = [
"slippi-gg-api/playback"
]

[dependencies]
dolphin-integrations = { path = "../dolphin" }
slippi-game-reporter = { path = "../game-reporter" }
slippi-gg-api = { path = "../slippi-gg-api" }
slippi-jukebox = { path = "../jukebox" }
slippi-user = { path = "../user" }
tracing = { workspace = true }
ureq = { workspace = true }
18 changes: 4 additions & 14 deletions exi/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,9 @@
//! `SlippiEXIDevice` and forwards calls over the C FFI. This has a fairly clean mapping to "when
//! Slippi stuff is happening" and enables us to let the Rust side live in its own world.
use std::time::Duration;

use ureq::AgentBuilder;

use dolphin_integrations::Log;
use slippi_game_reporter::GameReporter;
use slippi_gg_api::APIClient;
use slippi_jukebox::Jukebox;
use slippi_user::UserManager;

Expand Down Expand Up @@ -41,22 +38,15 @@ impl SlippiEXIDevice {
pub fn new(config: Config) -> Self {
tracing::info!(target: Log::SlippiOnline, "Starting SlippiEXIDevice");

// We set `max_idle_connections` to `5` to mimic how CURL was configured in
// the old C++ logic. This gets cloned and passed down into modules so that
// the underlying connection pool is shared.
let http_client = AgentBuilder::new()
.max_idle_connections(5)
.timeout(Duration::from_millis(5000))
.user_agent(&format!("SlippiDolphin/{} (Rust)", config.scm.slippi_semver))
.build();
let api_client = APIClient::new(&config.scm.slippi_semver);

let user_manager = UserManager::new(
http_client.clone(),
api_client.clone(),
config.paths.user_json.clone().into(),
config.scm.slippi_semver.clone(),
);

let game_reporter = GameReporter::new(http_client.clone(), user_manager.clone(), config.paths.iso.clone());
let game_reporter = GameReporter::new(api_client.clone(), user_manager.clone(), config.paths.iso.clone());

// Playback has no need to deal with this.
// (We could maybe silo more?)
Expand Down
14 changes: 12 additions & 2 deletions ffi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,18 @@ ishiiruka = [
"slippi-exi-device/ishiiruka",
"slippi-user/ishiiruka"
]
mainline = []
playback = []
mainline = [
"dolphin-integrations/mainline",
"slippi-game-reporter/mainline",
"slippi-exi-device/mainline",
"slippi-user/mainline"
]
playback = [
"dolphin-integrations/playback",
"slippi-game-reporter/playback",
"slippi-exi-device/playback",
"slippi-user/playback"
]

[dependencies]
dolphin-integrations = { path = "../dolphin" }
Expand Down
3 changes: 2 additions & 1 deletion game-reporter/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ publish = false
default = []
ishiiruka = []
mainline = []
playback = []

[dependencies]
chksum = { version = "0.2.2", default-features = false, features = ["md5"] }
Expand All @@ -18,6 +19,6 @@ flate2 = "1.0"
serde = { workspace = true }
serde_json = { workspace = true }
serde_repr = { workspace = true }
slippi-gg-api = { path = "../slippi-gg-api" }
slippi-user = { path = "../user" }
tracing = { workspace = true }
ureq = { workspace = true }
9 changes: 4 additions & 5 deletions game-reporter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@ use std::sync::Arc;
use std::sync::Mutex;
use std::thread;

use ureq::Agent;

use dolphin_integrations::Log;
use slippi_gg_api::APIClient;
use slippi_user::UserManager;

mod iso_md5_hasher;
Expand Down Expand Up @@ -68,8 +67,8 @@ impl GameReporter {
///
/// Currently, failure to spawn any thread should result in a crash - i.e, if we can't
/// spawn an OS thread, then there are probably far bigger issues at work here.
pub fn new(http_client: Agent, user_manager: UserManager, iso_path: String) -> Self {
let queue = GameReporterQueue::new(http_client.clone());
pub fn new(api_client: APIClient, user_manager: UserManager, iso_path: String) -> Self {
let queue = GameReporterQueue::new(api_client.clone());

// This is a thread-safe "one time" setter that the MD5 hasher thread
// will set when it's done computing.
Expand Down Expand Up @@ -97,7 +96,7 @@ impl GameReporter {
let completion_thread = thread::Builder::new()
.name("GameReporterCompletionProcessingThread".into())
.spawn(move || {
queue::run_completion(http_client, completion_receiver);
queue::run_completion(api_client, completion_receiver);
})
.expect("Failed to spawn GameReporterCompletionProcessingThread.");

Expand Down
43 changes: 21 additions & 22 deletions game-reporter/src/queue.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,22 @@
//! This module implements the background queue for the Game Reporter.
use std::collections::VecDeque;
use std::io::Write;
use std::sync::mpsc::Receiver;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;

use flate2::write::GzEncoder;
use flate2::Compression;
use serde_json::{json, Value};
use ureq::Agent;

use dolphin_integrations::{Color, Dolphin, Duration as OSDDuration, Log};
use slippi_gg_api::APIClient;

use crate::types::{GameReport, GameReportRequestPayload, OnlinePlayMode};
use crate::{CompletionEvent, ProcessingEvent};

use flate2::write::GzEncoder;
use flate2::Compression;
use std::io::Write;

const GRAPHQL_URL: &str = "https://gql-gateway-dot-slippi.uc.r.appspot.com/graphql";

/// How many times a report should attempt to send.
Expand All @@ -40,16 +39,16 @@ struct ReportResponse {
/// of data around various threads.
#[derive(Clone, Debug)]
pub struct GameReporterQueue {
pub http_client: ureq::Agent,
pub api_client: APIClient,
pub iso_hash: Arc<Mutex<String>>,
inner: Arc<Mutex<VecDeque<GameReport>>>,
}

impl GameReporterQueue {
/// Initializes and returns a new game reporter.
pub(crate) fn new(http_client: Agent) -> Self {
pub(crate) fn new(api_client: APIClient) -> Self {
Self {
http_client,
api_client,
iso_hash: Arc::new(Mutex::new(String::new())),
inner: Arc::new(Mutex::new(VecDeque::new())),
}
Expand Down Expand Up @@ -90,7 +89,7 @@ impl GameReporterQueue {
}
}));

let res = execute_graphql_query(&self.http_client, mutation, variables, Some("abandonOnlineGame"));
let res = execute_graphql_query(&self.api_client, mutation, variables, Some("abandonOnlineGame"));

match res {
Ok(value) if value == "true" => {
Expand All @@ -102,7 +101,7 @@ impl GameReporterQueue {
}
}

pub(crate) fn run_completion(http_client: ureq::Agent, receiver: Receiver<CompletionEvent>) {
pub(crate) fn run_completion(api_client: APIClient, receiver: Receiver<CompletionEvent>) {
loop {
// Watch for notification to do work
match receiver.recv() {
Expand All @@ -112,7 +111,7 @@ pub(crate) fn run_completion(http_client: ureq::Agent, receiver: Receiver<Comple
match_id,
end_mode,
}) => {
report_completion(&http_client, uid, match_id, play_key, end_mode);
report_completion(&api_client, uid, match_id, play_key, end_mode);
},

Ok(CompletionEvent::Shutdown) => {
Expand Down Expand Up @@ -140,7 +139,7 @@ pub(crate) fn run_completion(http_client: ureq::Agent, receiver: Receiver<Comple
///
/// This doesn't necessarily need to be here, but it's easier to grok the codebase
/// if we keep all reporting network calls in one module.
pub fn report_completion(http_client: &ureq::Agent, uid: String, match_id: String, play_key: String, end_mode: u8) {
pub fn report_completion(api_client: &APIClient, uid: String, match_id: String, play_key: String, end_mode: u8) {
let mutation = r#"
mutation ($report: OnlineGameCompleteInput!) {
completeOnlineGame (report: $report)
Expand All @@ -156,7 +155,7 @@ pub fn report_completion(http_client: &ureq::Agent, uid: String, match_id: Strin
}
}));

let res = execute_graphql_query(http_client, mutation, variables, Some("completeOnlineGame"));
let res = execute_graphql_query(api_client, mutation, variables, Some("completeOnlineGame"));

match res {
Ok(value) if value == "true" => {
Expand Down Expand Up @@ -218,7 +217,7 @@ fn process_reports(queue: &GameReporterQueue, event: ProcessingEvent) {
// (e.g, max attempts). We pass the locked queue over to work with the borrow checker
// here, since otherwise we can't pop without some ugly block work to coerce letting
// a mutable borrow drop.
match try_send_next_report(&mut *report_queue, event, &queue.http_client, &iso_hash) {
match try_send_next_report(&mut *report_queue, event, &queue.api_client, &iso_hash) {
Ok(upload_url) => {
// Pop the front of the queue. If we have a URL, chuck it all over
// to the replay uploader.
Expand All @@ -227,7 +226,7 @@ fn process_reports(queue: &GameReporterQueue, event: ProcessingEvent) {
tracing::info!(target: Log::SlippiOnline, "Successfully sent report, popping from queue");

if let (Some(report), Some(upload_url)) = (report, upload_url) {
try_upload_replay_data(report.replay_data, upload_url, &queue.http_client);
try_upload_replay_data(report.replay_data, upload_url, &queue.api_client);
}

thread::sleep(Duration::ZERO)
Expand Down Expand Up @@ -266,7 +265,7 @@ fn process_reports(queue: &GameReporterQueue, event: ProcessingEvent) {
/// The true inner error, minus any metadata.
#[derive(Debug)]
enum ReportSendErrorKind {
Net(ureq::Error),
Net(slippi_gg_api::Error),
JSON(serde_json::Error),
GraphQL(String),
NotSuccessful(String),
Expand All @@ -287,7 +286,7 @@ struct ReportSendError {
fn try_send_next_report(
queue: &mut VecDeque<GameReport>,
event: ProcessingEvent,
http_client: &ureq::Agent,
api_client: &APIClient,
iso_hash: &str,
) -> Result<Option<String>, ReportSendError> {
let report = (*queue).front_mut().expect("Reporter queue is empty yet it shouldn't be");
Expand Down Expand Up @@ -324,7 +323,7 @@ fn try_send_next_report(

// Call execute_graphql_query and get the response body as a String.
let response_body =
execute_graphql_query(http_client, mutation, variables, Some("reportOnlineGame")).map_err(|e| ReportSendError {
execute_graphql_query(api_client, mutation, variables, Some("reportOnlineGame")).map_err(|e| ReportSendError {
is_last_attempt,
sleep_ms: error_sleep_ms,
kind: e,
Expand All @@ -350,7 +349,7 @@ fn try_send_next_report(

/// Prepares and executes a GraphQL query.
fn execute_graphql_query(
http_client: &ureq::Agent,
api_client: &APIClient,
query: &str,
variables: Option<Value>,
field: Option<&str>,
Expand All @@ -367,7 +366,7 @@ fn execute_graphql_query(
};

// Make the GraphQL request
let response = http_client
let response = api_client
.post(GRAPHQL_URL)
.send_json(&request_body)
.map_err(ReportSendErrorKind::Net)?;
Expand Down Expand Up @@ -427,7 +426,7 @@ fn add_slp_header_and_footer(data: Arc<Mutex<Vec<u8>>>) -> Vec<u8> {
}

/// Attempts to compress and upload replay data to the url at `upload_url`.
fn try_upload_replay_data(data: Arc<Mutex<Vec<u8>>>, upload_url: String, http_client: &ureq::Agent) {
fn try_upload_replay_data(data: Arc<Mutex<Vec<u8>>>, upload_url: String, api_client: &APIClient) {
let contents = add_slp_header_and_footer(data);

let mut gzipped_data = vec![0u8; contents.len()]; // Resize to some initial size
Expand All @@ -443,7 +442,7 @@ fn try_upload_replay_data(data: Arc<Mutex<Vec<u8>>>, upload_url: String, http_cl

gzipped_data.resize(res_size, 0);

let response = http_client
let response = api_client
.put(upload_url.as_str())
.set("Content-Type", "application/octet-stream")
.set("Content-Encoding", "gzip")
Expand Down
Loading

0 comments on commit 1e9fbf1

Please sign in to comment.