Compare commits
30 Commits
Author | SHA1 | Date | |
---|---|---|---|
f1e9d1806f | |||
8c62d01f3c | |||
26a5025a32 | |||
6f931ce46f | |||
2d46592c4a | |||
c2d496f2b4 | |||
19d37806f2 | |||
b4bd9b8830 | |||
3c8227d0e9 | |||
d6409c21ec | |||
990b5acbda | |||
f9e14e2ffd | |||
a73afe6851 | |||
20544b27d9 | |||
288573b1a4 | |||
a12755d7be | |||
a248531ce2 | |||
de7919a031 | |||
3913039010 | |||
078d3cdcf9 | |||
c552a84870 | |||
7c1a14b6ad | |||
c69b8207b8 | |||
a319f400e9 | |||
ddcb041f3f | |||
2a73370c9f | |||
689a2109fa | |||
e82146c912 | |||
568398f1c6 | |||
1cc6363cba |
717
Cargo.lock
generated
717
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
29
Cargo.toml
29
Cargo.toml
@ -1,27 +1,34 @@
|
||||
[package]
|
||||
name = "hoc"
|
||||
version = "0.14.1"
|
||||
version = "0.14.4"
|
||||
authors = ["Valentin Brandl <vbrandl@riseup.net>"]
|
||||
edition = "2018"
|
||||
build = "build.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3.0.0"
|
||||
actix-rt = "1.1.1"
|
||||
actix-slog = "0.2.1"
|
||||
actix-web = "3.1.0"
|
||||
badge = "0.3.0"
|
||||
bytes = "0.5.6"
|
||||
futures = "0.3.5"
|
||||
git2 = "0.13.11"
|
||||
bytes = "0.6.0"
|
||||
futures = "0.3.7"
|
||||
git2 = "0.13.12"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.11"
|
||||
log4rs = "0.13.0"
|
||||
number_prefix = "0.4.0"
|
||||
openssl-probe = "0.1.2"
|
||||
reqwest = "0.10.8"
|
||||
serde = "1.0.115"
|
||||
serde = "1.0.117"
|
||||
serde_derive = "1.0.103"
|
||||
serde_json = "1.0.57"
|
||||
structopt = "0.3.17"
|
||||
actix-rt = "1.1.1"
|
||||
serde_json = "1.0.59"
|
||||
slog = "2.5.2"
|
||||
slog-async = "2.5.0"
|
||||
slog-atomic = "3.0.0"
|
||||
slog-term = "2.6.0"
|
||||
structopt = "0.3.20"
|
||||
tracing = "0.1.21"
|
||||
tracing-subscriber = "0.2.14"
|
||||
tracing-actix-web = "0.2.1"
|
||||
tracing-futures = "0.2.4"
|
||||
|
||||
[build-dependencies]
|
||||
ructe = "0.12.0"
|
||||
|
19
src/cache.rs
19
src/cache.rs
@ -8,6 +8,7 @@ use std::{
|
||||
};
|
||||
|
||||
/// Enum to indicate the state of the cache
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum CacheState<'a> {
|
||||
/// Current head and cached head are the same
|
||||
Current {
|
||||
@ -26,11 +27,13 @@ pub(crate) enum CacheState<'a> {
|
||||
}
|
||||
|
||||
impl<'a> CacheState<'a> {
|
||||
#[instrument]
|
||||
pub(crate) fn read_from_file(
|
||||
path: impl AsRef<Path>,
|
||||
path: impl AsRef<Path> + std::fmt::Debug,
|
||||
branch: &str,
|
||||
head: &str,
|
||||
) -> Result<CacheState<'a>> {
|
||||
trace!("Reading cache");
|
||||
if path.as_ref().exists() {
|
||||
let cache: Cache = serde_json::from_reader(BufReader::new(File::open(path)?))?;
|
||||
Ok(cache
|
||||
@ -38,6 +41,7 @@ impl<'a> CacheState<'a> {
|
||||
.get(branch)
|
||||
.map(|c| {
|
||||
if c.head == head {
|
||||
trace!("Cache is up to date");
|
||||
CacheState::Current {
|
||||
count: c.count,
|
||||
commits: c.commits,
|
||||
@ -45,6 +49,7 @@ impl<'a> CacheState<'a> {
|
||||
cache: cache.clone(),
|
||||
}
|
||||
} else {
|
||||
trace!("Cache is out of date");
|
||||
CacheState::Old {
|
||||
head: c.head.to_string(),
|
||||
// TODO: get rid of clone
|
||||
@ -59,6 +64,7 @@ impl<'a> CacheState<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub(crate) fn calculate_new_cache(
|
||||
self,
|
||||
count: u64,
|
||||
@ -66,6 +72,7 @@ impl<'a> CacheState<'a> {
|
||||
head: Cow<'a, str>,
|
||||
branch: &'a str,
|
||||
) -> Cache<'a> {
|
||||
trace!("Calculating new cache");
|
||||
match self {
|
||||
CacheState::Old { mut cache, .. } => {
|
||||
if let Some(mut cache) = cache.entries.get_mut(branch) {
|
||||
@ -77,6 +84,7 @@ impl<'a> CacheState<'a> {
|
||||
}
|
||||
CacheState::Current { cache, .. } => cache,
|
||||
CacheState::NoneForBranch(mut cache) => {
|
||||
trace!("Creating new cache for branch");
|
||||
cache.entries.insert(
|
||||
branch.into(),
|
||||
CacheEntry {
|
||||
@ -88,6 +96,7 @@ impl<'a> CacheState<'a> {
|
||||
cache
|
||||
}
|
||||
CacheState::No => {
|
||||
trace!("Creating new cache file");
|
||||
let mut entries = HashMap::with_capacity(1);
|
||||
entries.insert(
|
||||
branch.into(),
|
||||
@ -103,12 +112,12 @@ impl<'a> CacheState<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub(crate) struct Cache<'a> {
|
||||
pub entries: HashMap<Cow<'a, str>, CacheEntry<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub(crate) struct CacheEntry<'a> {
|
||||
/// HEAD commit ref
|
||||
pub head: Cow<'a, str>,
|
||||
@ -119,7 +128,9 @@ pub(crate) struct CacheEntry<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Cache<'a> {
|
||||
pub(crate) fn write_to_file(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||
#[instrument]
|
||||
pub(crate) fn write_to_file(&self, path: impl AsRef<Path> + std::fmt::Debug) -> Result<()> {
|
||||
trace!("Persisting cache to disk");
|
||||
create_dir_all(path.as_ref().parent().ok_or(Error::Internal)?)?;
|
||||
serde_json::to_writer(
|
||||
OpenOptions::new()
|
||||
|
@ -1,10 +1,3 @@
|
||||
use crate::{error::Result, statics::OPT};
|
||||
use log::LevelFilter;
|
||||
use log4rs::{
|
||||
append::{console::ConsoleAppender, file::FileAppender},
|
||||
config::{Appender, Config, Root},
|
||||
encode::pattern::PatternEncoder,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use structopt::StructOpt;
|
||||
|
||||
@ -38,33 +31,11 @@ pub(crate) struct Opt {
|
||||
#[structopt(short = "w", long = "workers", default_value = "4")]
|
||||
/// Number of worker threads
|
||||
pub(crate) workers: usize,
|
||||
#[structopt(
|
||||
short = "l",
|
||||
long = "logfile",
|
||||
parse(from_os_str),
|
||||
default_value = "./hoc.log"
|
||||
)]
|
||||
/// The logfile
|
||||
pub(crate) logfile: PathBuf,
|
||||
}
|
||||
|
||||
pub(crate) async fn init() -> Result<()> {
|
||||
pub(crate) fn init() {
|
||||
std::env::set_var("RUST_LOG", "actix_web=info,hoc=info");
|
||||
openssl_probe::init_ssl_cert_env_vars();
|
||||
let stdout = ConsoleAppender::builder().build();
|
||||
let file = FileAppender::builder()
|
||||
.encoder(Box::new(PatternEncoder::new("{d} - {m}{n}")))
|
||||
.build(&OPT.logfile)
|
||||
.unwrap();
|
||||
let config = Config::builder()
|
||||
.appender(Appender::builder().build("stdout", Box::new(stdout)))
|
||||
.appender(Appender::builder().build("file", Box::new(file)))
|
||||
.build(
|
||||
Root::builder()
|
||||
.appender("stdout")
|
||||
.appender("file")
|
||||
.build(LevelFilter::Info),
|
||||
)?;
|
||||
log4rs::init_config(config)?;
|
||||
Ok(())
|
||||
|
||||
tracing_subscriber::fmt().init();
|
||||
}
|
||||
|
@ -1,10 +1,12 @@
|
||||
use crate::error::Result;
|
||||
use std::{fs::read_dir, path::Path, result::Result as StdResult};
|
||||
|
||||
#[instrument]
|
||||
pub(crate) fn count_repositories<P>(repo_path: P) -> Result<usize>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
trace!("Counting repositories");
|
||||
std::fs::create_dir_all(&repo_path)?;
|
||||
Ok(read_dir(repo_path)?
|
||||
.filter_map(StdResult::ok)
|
||||
|
16
src/error.rs
16
src/error.rs
@ -14,8 +14,6 @@ pub(crate) enum Error {
|
||||
Git(git2::Error),
|
||||
Internal,
|
||||
Io(std::io::Error),
|
||||
Log(log::SetLoggerError),
|
||||
LogBuilder(log4rs::config::Errors),
|
||||
Parse(std::num::ParseIntError),
|
||||
Serial(serde_json::Error),
|
||||
BranchNotFound,
|
||||
@ -29,8 +27,6 @@ impl fmt::Display for Error {
|
||||
Error::Git(e) => write!(fmt, "Git({})", e),
|
||||
Error::Internal => write!(fmt, "Internal Error"),
|
||||
Error::Io(e) => write!(fmt, "Io({})", e),
|
||||
Error::Log(e) => write!(fmt, "Log({})", e),
|
||||
Error::LogBuilder(e) => write!(fmt, "LogBuilder({})", e),
|
||||
Error::Parse(e) => write!(fmt, "Parse({})", e),
|
||||
Error::Serial(e) => write!(fmt, "Serial({})", e),
|
||||
Error::BranchNotFound => write!(fmt, "Repo doesn't have master branch"),
|
||||
@ -76,12 +72,6 @@ impl From<git2::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<log::SetLoggerError> for Error {
|
||||
fn from(err: log::SetLoggerError) -> Self {
|
||||
Error::Log(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
Error::Io(err)
|
||||
@ -100,12 +90,6 @@ impl From<reqwest::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<log4rs::config::Errors> for Error {
|
||||
fn from(err: log4rs::config::Errors) -> Self {
|
||||
Error::LogBuilder(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::num::ParseIntError> for Error {
|
||||
fn from(err: std::num::ParseIntError) -> Self {
|
||||
Error::Parse(err)
|
||||
|
170
src/main.rs
170
src/main.rs
@ -5,9 +5,9 @@ extern crate actix_web;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
#[macro_use]
|
||||
extern crate tracing;
|
||||
|
||||
mod cache;
|
||||
mod config;
|
||||
@ -29,7 +29,8 @@ use crate::{
|
||||
};
|
||||
use actix_web::{
|
||||
http::header::{CacheControl, CacheDirective, Expires, LOCATION},
|
||||
middleware, web, App, HttpResponse, HttpServer, Responder,
|
||||
middleware::{self, normalize::TrailingSlash},
|
||||
web, App, HttpResponse, HttpServer, Responder,
|
||||
};
|
||||
use badge::{Badge, BadgeOptions};
|
||||
use git2::{BranchType, Repository};
|
||||
@ -44,6 +45,7 @@ use std::{
|
||||
sync::Arc,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
use tracing::Instrument;
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/templates.rs"));
|
||||
|
||||
@ -107,16 +109,16 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str, branch: &str) -> Result<(u64
|
||||
let cache = CacheState::read_from_file(&cache_dir, branch, &head)?;
|
||||
match &cache {
|
||||
CacheState::Current { count, commits, .. } => {
|
||||
info!("Using cache for {}", repo_dir);
|
||||
info!("Using cache");
|
||||
return Ok((*count, head, *commits));
|
||||
}
|
||||
CacheState::Old { head, .. } => {
|
||||
info!("Updating cache for {}", repo_dir);
|
||||
info!("Updating cache");
|
||||
arg.push(format!("{}..{}", head, branch));
|
||||
arg_commit_count.push(format!("{}..{}", head, branch));
|
||||
}
|
||||
CacheState::No | CacheState::NoneForBranch(..) => {
|
||||
info!("Creating cache for {}", repo_dir);
|
||||
info!("Creating cache");
|
||||
arg.push(branch.to_string());
|
||||
arg_commit_count.push(branch.to_string());
|
||||
}
|
||||
@ -124,7 +126,7 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str, branch: &str) -> Result<(u64
|
||||
arg.push("--".to_string());
|
||||
arg.push(".".to_string());
|
||||
let output = Command::new("git")
|
||||
.args(&dbg!(arg))
|
||||
.args(&arg)
|
||||
.current_dir(&repo_dir)
|
||||
.output()?
|
||||
.stdout;
|
||||
@ -179,36 +181,45 @@ where
|
||||
T: Service,
|
||||
{
|
||||
let data = data.into_inner();
|
||||
let repo = format!(
|
||||
"{}/{}/{}",
|
||||
T::domain(),
|
||||
data.0.to_lowercase(),
|
||||
data.1.to_lowercase()
|
||||
let span = info_span!(
|
||||
"deleting repository and cache",
|
||||
service = T::domain(),
|
||||
user = data.0.as_str(),
|
||||
repo = data.1.as_str()
|
||||
);
|
||||
info!("Deleting cache and repository for {}", repo);
|
||||
let cache_dir = dbg!(format!("{}/{}.json", &state.cache, repo));
|
||||
let repo_dir = dbg!(format!("{}/{}", &state.repos, repo));
|
||||
std::fs::remove_file(&cache_dir).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
std::fs::remove_dir_all(&repo_dir).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
REPO_COUNT.fetch_sub(1, Ordering::Relaxed);
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.header(
|
||||
LOCATION,
|
||||
format!("/view/{}/{}/{}", T::url_path(), data.0, data.1),
|
||||
)
|
||||
.finish())
|
||||
let future = async {
|
||||
let repo = format!(
|
||||
"{}/{}/{}",
|
||||
T::domain(),
|
||||
data.0.to_lowercase(),
|
||||
data.1.to_lowercase()
|
||||
);
|
||||
info!("Deleting cache and repository");
|
||||
let cache_dir = format!("{}/{}.json", &state.cache, repo);
|
||||
let repo_dir = format!("{}/{}", &state.repos, repo);
|
||||
std::fs::remove_file(&cache_dir).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
std::fs::remove_dir_all(&repo_dir).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
REPO_COUNT.fetch_sub(1, Ordering::Relaxed);
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.header(
|
||||
LOCATION,
|
||||
format!("/view/{}/{}/{}", T::url_path(), data.0, data.1),
|
||||
)
|
||||
.finish())
|
||||
};
|
||||
future.instrument(span).await
|
||||
}
|
||||
|
||||
async fn handle_hoc_request<T, F>(
|
||||
@ -222,41 +233,51 @@ where
|
||||
F: Fn(HocResult) -> Result<HttpResponse>,
|
||||
{
|
||||
let data = data.into_inner();
|
||||
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
||||
let service_path = format!("{}/{}", T::url_path(), repo);
|
||||
let service_url = format!("{}/{}", T::domain(), repo);
|
||||
let path = format!("{}/{}", state.repos, service_url);
|
||||
let url = format!("https://{}", service_url);
|
||||
let remote_exists = remote_exists(&url).await?;
|
||||
let file = Path::new(&path);
|
||||
if !file.exists() {
|
||||
if !remote_exists {
|
||||
warn!("Repository does not exist: {}", url);
|
||||
return mapper(HocResult::NotFound);
|
||||
let span = info_span!(
|
||||
"handling hoc calculation",
|
||||
service = T::domain(),
|
||||
user = data.0.as_str(),
|
||||
repo = data.1.as_str(),
|
||||
branch
|
||||
);
|
||||
let future = async {
|
||||
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
||||
let service_path = format!("{}/{}", T::url_path(), repo);
|
||||
let service_url = format!("{}/{}", T::domain(), repo);
|
||||
let path = format!("{}/{}", state.repos, service_url);
|
||||
let url = format!("https://{}", service_url);
|
||||
let remote_exists = remote_exists(&url).await?;
|
||||
let file = Path::new(&path);
|
||||
if !file.exists() {
|
||||
if !remote_exists {
|
||||
warn!("Repository does not exist");
|
||||
return mapper(HocResult::NotFound);
|
||||
}
|
||||
info!("Cloning for the first time");
|
||||
create_dir_all(file)?;
|
||||
let repo = Repository::init_bare(file)?;
|
||||
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
||||
repo.remote_set_url("origin", &url)?;
|
||||
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
info!("Cloning {} for the first time", url);
|
||||
create_dir_all(file)?;
|
||||
let repo = Repository::init_bare(file)?;
|
||||
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
||||
repo.remote_set_url("origin", &url)?;
|
||||
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
pull(&path)?;
|
||||
let (hoc, head, commits) = hoc(&service_url, &state.repos, &state.cache, branch)?;
|
||||
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
||||
NumberPrefix::Standalone(hoc) => hoc.to_string(),
|
||||
NumberPrefix::Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
||||
pull(&path)?;
|
||||
let (hoc, head, commits) = hoc(&service_url, &state.repos, &state.cache, branch)?;
|
||||
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
||||
NumberPrefix::Standalone(hoc) => hoc.to_string(),
|
||||
NumberPrefix::Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
||||
};
|
||||
let res = HocResult::Hoc {
|
||||
hoc,
|
||||
commits,
|
||||
hoc_pretty,
|
||||
head,
|
||||
url,
|
||||
repo,
|
||||
service_path,
|
||||
};
|
||||
mapper(res)
|
||||
};
|
||||
let res = HocResult::Hoc {
|
||||
hoc,
|
||||
commits,
|
||||
hoc_pretty,
|
||||
head,
|
||||
url,
|
||||
repo,
|
||||
service_path,
|
||||
};
|
||||
mapper(res)
|
||||
future.instrument(span).await
|
||||
}
|
||||
|
||||
pub(crate) async fn json_hoc<T: Service>(
|
||||
@ -411,11 +432,10 @@ async fn start_server() -> std::io::Result<()> {
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(state.clone())
|
||||
.wrap(middleware::Logger::default())
|
||||
// .wrap(middleware::NormalizePath::default())
|
||||
.wrap(tracing_actix_web::TracingLogger)
|
||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||
.service(index)
|
||||
.service(web::resource("/tacit-css.min.css").route(web::get().to(css)))
|
||||
// TODO
|
||||
.service(web::resource("/favicon.ico").route(web::get().to(favicon32)))
|
||||
.service(generate)
|
||||
.service(web::resource("/github/{user}/{repo}").to(calculate_hoc::<GitHub>))
|
||||
@ -449,6 +469,8 @@ async fn start_server() -> std::io::Result<()> {
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
config::init().await.unwrap();
|
||||
start_server().await
|
||||
config::init();
|
||||
let span = info_span!("hoc", version = env!("CARGO_PKG_VERSION"));
|
||||
let _ = span.enter();
|
||||
start_server().instrument(span).await
|
||||
}
|
||||
|
Reference in New Issue
Block a user