Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add crate file size upload limits #794

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions src/bin/cratesfyi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@ use std::path::PathBuf;

use cratesfyi::db::{self, add_path_into_database, connect_db};
use cratesfyi::utils::{add_crate_to_queue, remove_crate_priority, set_crate_priority};
use cratesfyi::Server;
use cratesfyi::{DocBuilder, DocBuilderOptions, RustwideBuilder};
use cratesfyi::{DocBuilder, DocBuilderOptions, Limits, RustwideBuilder, Server};
use structopt::StructOpt;

pub fn main() {
Expand Down Expand Up @@ -412,7 +411,7 @@ impl DatabaseSubcommand {

Self::AddDirectory { directory, prefix } => {
let conn = db::connect_db().expect("failed to connect to the database");
add_path_into_database(&conn, &prefix, directory)
add_path_into_database(&conn, &prefix, directory, &Limits::default())
.expect("Failed to add directory into database");
}

Expand Down
7 changes: 4 additions & 3 deletions src/db/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
//! They are using so many inodes and it is better to store them in database instead of
//! filesystem. This module is adding files into database and retrieving them.

use crate::error::Result;
use crate::storage::Storage;
use crate::{docbuilder::Limits, error::Result, storage::Storage};
use postgres::Connection;

use serde_json::Value;
Expand All @@ -30,9 +29,11 @@ pub fn add_path_into_database<P: AsRef<Path>>(
conn: &Connection,
prefix: &str,
path: P,
limits: &Limits,
) -> Result<Value> {
let mut backend = Storage::new(conn);
let file_list = backend.store_all(conn, prefix, path.as_ref())?;
let file_list = backend.store_all(conn, prefix, path.as_ref(), limits)?;

file_list_to_json(file_list.into_iter().collect())
}

Expand Down
11 changes: 11 additions & 0 deletions src/db/migrate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,17 @@ pub fn migrate(version: Option<Version>, conn: &Connection) -> CratesfyiResult<(
ALTER TABLE releases ALTER COLUMN doc_targets DROP NOT NULL;
"
),
migration!(
context,
// version
13,
// description
"Allow max file upload size to be overridden",
// upgrade query
"ALTER TABLE sandbox_overrides ADD COLUMN upload_size BIGINT;",
// downgrade query
"ALTER TABLE sandbox_overrides DROP COLUMN upload_size;"
),
];

for migration in migrations {
Expand Down
96 changes: 75 additions & 21 deletions src/docbuilder/limits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,32 @@ use postgres::Connection;
use std::collections::BTreeMap;
use std::time::Duration;

/// The limits imposed on a crate for building its documentation
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Limits {
pub struct Limits {
/// The maximum memory usage allowed for a crate
memory: usize,
/// The maximum number of targets
targets: usize,
/// The build timeout
timeout: Duration,
/// Whether networking is enabled
networking: bool,
/// The maximum log size kept
max_log_size: usize,
/// The maximum allowed size of a file upload
upload_size: usize,
}

impl Default for Limits {
fn default() -> Self {
Self {
memory: 3 * 1024 * 1024 * 1024, // 3 GB
timeout: Duration::from_secs(15 * 60), // 15 minutes
targets: 10,
networking: false,
max_log_size: 100 * 1024, // 100 KB
targets: 10, // 10 documentation targets
networking: false, // Networking disabled
max_log_size: 100 * 1024, // 100 KB
upload_size: 500 * 1024 * 1024, // 500 MB
}
}
}
Expand All @@ -32,17 +41,25 @@ impl Limits {
"SELECT * FROM sandbox_overrides WHERE crate_name = $1;",
&[&name],
)?;

if !res.is_empty() {
let row = res.get(0);

if let Some(memory) = row.get::<_, Option<i64>>("max_memory_bytes") {
limits.memory = memory as usize;
}

if let Some(timeout) = row.get::<_, Option<i32>>("timeout_seconds") {
limits.timeout = Duration::from_secs(timeout as u64);
}

if let Some(targets) = row.get::<_, Option<i32>>("max_targets") {
limits.targets = targets as usize;
}

if let Some(upload_size) = row.get::<_, Option<i64>>("upload_size") {
limits.upload_size = upload_size as usize;
}
}

Ok(limits)
Expand All @@ -68,6 +85,15 @@ impl Limits {
self.targets
}

pub(crate) fn upload_size(&self) -> usize {
self.upload_size
}

#[cfg(test)]
pub(crate) fn set_upload_size(&mut self, size: usize) {
self.upload_size = size;
}

pub(crate) fn for_website(&self) -> BTreeMap<String, String> {
let mut res = BTreeMap::new();
res.insert("Available RAM".into(), SIZE_SCALE(self.memory));
Expand All @@ -88,6 +114,10 @@ impl Limits {
"Maximum number of build targets".into(),
self.targets.to_string(),
);
res.insert(
"Maximum uploaded file size".into(),
SIZE_SCALE(self.upload_size),
);
res
}
}
Expand Down Expand Up @@ -119,22 +149,23 @@ fn scale(value: usize, interval: usize, labels: &[&str]) -> String {
mod test {
use super::*;
use crate::test::*;

#[test]
fn retrieve_limits() {
wrapper(|env| {
let db = env.db();

let krate = "hexponent";
// limits work if no crate has limits set
let hexponent = Limits::for_crate(&db.conn(), krate)?;
let hexponent = Limits::for_crate(&db.conn(), "hexponent")?;
assert_eq!(hexponent, Limits::default());

db.conn().query(
"INSERT INTO sandbox_overrides (crate_name, max_targets) VALUES ($1, 15)",
&[&krate],
&[&"hexponent"],
)?;

// limits work if crate has limits set
let hexponent = Limits::for_crate(&db.conn(), krate)?;
let hexponent = Limits::for_crate(&db.conn(), "hexponent")?;
assert_eq!(
hexponent,
Limits {
Expand All @@ -149,42 +180,65 @@ mod test {
memory: 100_000,
timeout: Duration::from_secs(300),
targets: 1,
upload_size: 32,
..Limits::default()
};
db.conn().query("INSERT INTO sandbox_overrides (crate_name, max_memory_bytes, timeout_seconds, max_targets)
VALUES ($1, $2, $3, $4)",
&[&krate, &(limits.memory as i64), &(limits.timeout.as_secs() as i32), &(limits.targets as i32)])?;

db.conn().query(
"INSERT INTO sandbox_overrides (
crate_name,
max_memory_bytes,
timeout_seconds,
max_targets,
upload_size
)
VALUES ($1, $2, $3, $4, $5)",
&[
&krate,
&(limits.memory as i64),
&(limits.timeout.as_secs() as i32),
&(limits.targets as i32),
&(limits.upload_size as i64),
],
)?;

assert_eq!(limits, Limits::for_crate(&db.conn(), krate)?);

Ok(())
});
}

#[test]
fn display_limits() {
let limits = Limits {
memory: 102400,
timeout: Duration::from_secs(300),
targets: 1,
upload_size: 32,
..Limits::default()
};
let display = limits.for_website();

assert_eq!(display.get("Network access"), Some(&"blocked".to_string()));
assert_eq!(
display.get("Network access".into()),
Some(&"blocked".into())
);
assert_eq!(
display.get("Maximum size of a build log".into()),
Some(&"100 KB".into())
display.get("Maximum size of a build log"),
Some(&"100 KB".to_string())
);
assert_eq!(
display.get("Maximum number of build targets".into()),
display.get("Maximum number of build targets"),
Some(&limits.targets.to_string())
);
assert_eq!(
display.get("Maximum rustdoc execution time".into()),
Some(&"5 minutes".into())
display.get("Maximum rustdoc execution time"),
Some(&"5 minutes".to_string())
);
assert_eq!(display.get("Available RAM".into()), Some(&"100 KB".into()));
assert_eq!(display.get("Available RAM"), Some(&"100 KB".to_string()));
assert_eq!(
display.get("Maximum uploaded file size"),
Some(&"32 bytes".to_string())
)
}

#[test]
fn scale_limits() {
// time
Expand Down
2 changes: 1 addition & 1 deletion src/docbuilder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ pub(crate) mod options;
mod queue;
mod rustwide_builder;

pub(crate) use self::limits::Limits;
pub use self::limits::Limits;
pub(self) use self::metadata::Metadata;
pub(crate) use self::rustwide_builder::BuildResult;
pub use self::rustwide_builder::RustwideBuilder;
Expand Down
8 changes: 6 additions & 2 deletions src/docbuilder/rustwide_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ impl RustwideBuilder {
})?;
}

add_path_into_database(&conn, "", &dest)?;
add_path_into_database(&conn, "", &dest, &limits)?;
conn.query(
"INSERT INTO config (name, value) VALUES ('rustc_version', $1) \
ON CONFLICT (name) DO UPDATE SET value = $1;",
Expand Down Expand Up @@ -349,6 +349,7 @@ impl RustwideBuilder {
&conn,
&prefix,
build.host_source_dir(),
&limits,
)?);

if let Some(name) = res.cargo_metadata.root().library_name() {
Expand Down Expand Up @@ -376,7 +377,7 @@ impl RustwideBuilder {
&metadata,
)?;
}
self.upload_docs(&conn, name, version, local_storage.path())?;
self.upload_docs(&conn, name, version, local_storage.path(), &limits)?;
}

let has_examples = build.host_source_dir().join("examples").is_dir();
Expand Down Expand Up @@ -572,13 +573,16 @@ impl RustwideBuilder {
name: &str,
version: &str,
local_storage: &Path,
limits: &Limits,
) -> Result<()> {
debug!("Adding documentation into database");
add_path_into_database(
conn,
&format!("rustdoc/{}/{}", name, version),
local_storage,
limits,
)?;

Ok(())
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

pub use self::docbuilder::options::DocBuilderOptions;
pub use self::docbuilder::DocBuilder;
pub use self::docbuilder::RustwideBuilder;
pub use self::docbuilder::{Limits, RustwideBuilder};
pub use self::web::Server;

pub mod db;
Expand Down
Loading