2019-06-12 21:51:54 +02:00
|
|
|
#![type_length_limit = "2257138"]
|
|
|
|
|
2019-04-16 20:55:25 +02:00
|
|
|
#[macro_use]
|
2019-04-16 16:57:06 +02:00
|
|
|
extern crate actix_web;
|
2019-04-21 20:45:36 +02:00
|
|
|
#[macro_use]
|
|
|
|
extern crate lazy_static;
|
2019-04-19 16:01:47 +02:00
|
|
|
#[macro_use]
|
2019-04-30 15:22:58 +02:00
|
|
|
extern crate log;
|
|
|
|
#[macro_use]
|
2019-04-19 16:01:47 +02:00
|
|
|
extern crate serde_derive;
|
|
|
|
|
2019-04-19 22:51:58 +02:00
|
|
|
mod cache;
|
2019-05-14 01:12:13 +02:00
|
|
|
mod config;
|
2019-06-12 21:51:54 +02:00
|
|
|
mod count;
|
2019-04-19 16:01:47 +02:00
|
|
|
mod error;
|
2019-04-29 20:41:53 +02:00
|
|
|
mod service;
|
2019-05-14 01:12:13 +02:00
|
|
|
mod statics;
|
2019-04-16 16:57:06 +02:00
|
|
|
|
2019-04-29 20:42:17 +02:00
|
|
|
use crate::{
|
2019-06-16 21:45:13 +02:00
|
|
|
cache::CacheState,
|
2019-07-07 14:52:42 +02:00
|
|
|
config::Migration,
|
2019-05-04 15:45:25 +02:00
|
|
|
error::{Error, Result},
|
|
|
|
service::{Bitbucket, FormService, GitHub, Gitlab, Service},
|
2019-06-12 21:50:45 +02:00
|
|
|
statics::{CLIENT, CSS, FAVICON, OPT, REPO_COUNT, VERSION_INFO},
|
2019-04-29 20:42:17 +02:00
|
|
|
};
|
2019-04-18 14:45:57 +02:00
|
|
|
use actix_web::{
|
2019-04-19 16:01:47 +02:00
|
|
|
error::ErrorBadRequest,
|
2019-04-29 20:42:17 +02:00
|
|
|
http::header::{CacheControl, CacheDirective, Expires},
|
2019-04-19 16:01:47 +02:00
|
|
|
middleware, web, App, HttpResponse, HttpServer,
|
2019-04-18 14:45:57 +02:00
|
|
|
};
|
2019-04-16 16:57:06 +02:00
|
|
|
use badge::{Badge, BadgeOptions};
|
2019-04-16 21:37:39 +02:00
|
|
|
use bytes::Bytes;
|
2019-05-19 13:56:11 +02:00
|
|
|
use futures::{unsync::mpsc, Future, Stream};
|
2019-04-19 12:18:16 +02:00
|
|
|
use git2::Repository;
|
2019-04-30 14:20:33 +02:00
|
|
|
use number_prefix::{NumberPrefix, Prefixed, Standalone};
|
2019-04-16 16:57:06 +02:00
|
|
|
use std::{
|
2019-05-04 15:45:25 +02:00
|
|
|
borrow::Cow,
|
2019-07-07 14:52:42 +02:00
|
|
|
fs::{create_dir_all, read_dir, rename},
|
2019-05-14 01:12:13 +02:00
|
|
|
path::Path,
|
2019-04-16 16:57:06 +02:00
|
|
|
process::Command,
|
2019-06-12 21:51:54 +02:00
|
|
|
sync::atomic::Ordering,
|
2019-04-16 16:57:06 +02:00
|
|
|
sync::Arc,
|
2019-04-18 14:45:57 +02:00
|
|
|
time::{Duration, SystemTime},
|
2019-04-16 16:57:06 +02:00
|
|
|
};
|
|
|
|
|
2019-04-23 18:21:03 +02:00
|
|
|
include!(concat!(env!("OUT_DIR"), "/templates.rs"));
|
|
|
|
|
2019-05-04 15:45:25 +02:00
|
|
|
#[derive(Deserialize, Serialize)]
|
|
|
|
struct GeneratorForm<'a> {
|
|
|
|
service: FormService,
|
|
|
|
user: Cow<'a, str>,
|
|
|
|
repo: Cow<'a, str>,
|
|
|
|
}
|
|
|
|
|
2019-04-19 22:51:58 +02:00
|
|
|
struct State {
|
|
|
|
repos: String,
|
|
|
|
cache: String,
|
|
|
|
}
|
2019-04-16 16:57:06 +02:00
|
|
|
|
2019-06-16 21:45:13 +02:00
|
|
|
#[derive(Serialize)]
|
|
|
|
struct JsonResponse<'a> {
|
|
|
|
head: &'a str,
|
|
|
|
count: u64,
|
2019-07-07 13:30:17 +02:00
|
|
|
commits: u64,
|
2019-06-16 21:45:13 +02:00
|
|
|
}
|
|
|
|
|
2019-05-04 15:45:25 +02:00
|
|
|
fn pull(path: impl AsRef<Path>) -> Result<()> {
|
2019-04-19 12:18:16 +02:00
|
|
|
let repo = Repository::open_bare(path)?;
|
2019-04-16 16:57:06 +02:00
|
|
|
let mut origin = repo.find_remote("origin")?;
|
|
|
|
origin.fetch(&["refs/heads/*:refs/heads/*"], None, None)?;
|
2019-04-19 12:18:16 +02:00
|
|
|
Ok(())
|
2019-04-16 16:57:06 +02:00
|
|
|
}
|
|
|
|
|
2019-07-07 13:30:17 +02:00
|
|
|
fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String, u64)> {
|
2019-04-19 22:51:58 +02:00
|
|
|
let repo_dir = format!("{}/{}", repo_dir, repo);
|
|
|
|
let cache_dir = format!("{}/{}.json", cache_dir, repo);
|
|
|
|
let cache_dir = Path::new(&cache_dir);
|
2019-07-07 13:30:17 +02:00
|
|
|
let repo = Repository::open_bare(&repo_dir)?;
|
|
|
|
let head = format!("{}", repo.head()?.target().ok_or(Error::Internal)?);
|
|
|
|
let mut arg_commit_count = vec!["rev-list".to_string(), "--count".to_string()];
|
2019-04-19 22:51:58 +02:00
|
|
|
let mut arg = vec![
|
|
|
|
"log".to_string(),
|
|
|
|
"--pretty=tformat:".to_string(),
|
|
|
|
"--numstat".to_string(),
|
|
|
|
"--ignore-space-change".to_string(),
|
|
|
|
"--ignore-all-space".to_string(),
|
|
|
|
"--ignore-submodules".to_string(),
|
|
|
|
"--no-color".to_string(),
|
|
|
|
"--find-copies-harder".to_string(),
|
|
|
|
"-M".to_string(),
|
|
|
|
"--diff-filter=ACDM".to_string(),
|
|
|
|
];
|
|
|
|
let cache = CacheState::read_from_file(&cache_dir, &head)?;
|
|
|
|
match &cache {
|
2019-07-07 13:30:17 +02:00
|
|
|
CacheState::Current { count, commits } => {
|
2019-04-30 15:22:58 +02:00
|
|
|
info!("Using cache for {}", repo_dir);
|
2019-07-07 13:30:17 +02:00
|
|
|
return Ok((*count, head, *commits));
|
2019-04-30 15:22:58 +02:00
|
|
|
}
|
2019-04-19 22:51:58 +02:00
|
|
|
CacheState::Old(cache) => {
|
2019-04-30 15:22:58 +02:00
|
|
|
info!("Updating cache for {}", repo_dir);
|
2019-04-19 22:51:58 +02:00
|
|
|
arg.push(format!("{}..HEAD", cache.head));
|
2019-07-07 13:30:17 +02:00
|
|
|
arg_commit_count.push(format!("{}..HEAD", cache.head));
|
2019-04-19 22:51:58 +02:00
|
|
|
}
|
2019-04-30 15:22:58 +02:00
|
|
|
CacheState::No => {
|
|
|
|
info!("Creating cache for {}", repo_dir);
|
2019-07-07 13:30:17 +02:00
|
|
|
arg_commit_count.push("HEAD".to_string());
|
2019-04-30 15:22:58 +02:00
|
|
|
}
|
2019-04-19 22:51:58 +02:00
|
|
|
};
|
|
|
|
arg.push("--".to_string());
|
|
|
|
arg.push(".".to_string());
|
2019-04-16 16:57:06 +02:00
|
|
|
let output = Command::new("git")
|
2019-04-19 22:51:58 +02:00
|
|
|
.args(&arg)
|
|
|
|
.current_dir(&repo_dir)
|
2019-04-16 16:57:06 +02:00
|
|
|
.output()?
|
|
|
|
.stdout;
|
|
|
|
let output = String::from_utf8_lossy(&output);
|
2019-07-07 13:30:17 +02:00
|
|
|
let output_commits = Command::new("git")
|
|
|
|
.args(&arg_commit_count)
|
|
|
|
.current_dir(&repo_dir)
|
|
|
|
.output()?
|
|
|
|
.stdout;
|
|
|
|
let output_commits = String::from_utf8_lossy(&output_commits);
|
|
|
|
let commits: u64 = output_commits.trim().parse()?;
|
2019-04-19 22:51:58 +02:00
|
|
|
let count: u64 = output
|
2019-04-16 16:57:06 +02:00
|
|
|
.lines()
|
|
|
|
.map(|s| {
|
|
|
|
s.split_whitespace()
|
|
|
|
.take(2)
|
|
|
|
.map(str::parse::<u64>)
|
2019-05-04 15:45:25 +02:00
|
|
|
.filter_map(std::result::Result::ok)
|
2019-04-16 16:57:06 +02:00
|
|
|
.sum::<u64>()
|
|
|
|
})
|
|
|
|
.sum();
|
|
|
|
|
2019-07-07 13:30:17 +02:00
|
|
|
let cache = cache.calculate_new_cache(count, commits, (&head).into());
|
2019-04-19 22:51:58 +02:00
|
|
|
cache.write_to_file(cache_dir)?;
|
|
|
|
|
2019-07-07 13:30:17 +02:00
|
|
|
Ok((cache.count, head, commits))
|
2019-04-16 16:57:06 +02:00
|
|
|
}
|
|
|
|
|
2019-05-04 15:45:25 +02:00
|
|
|
fn remote_exists(url: &str) -> Result<bool> {
|
2019-04-21 20:45:36 +02:00
|
|
|
Ok(CLIENT.head(url).send()?.status() == reqwest::StatusCode::OK)
|
|
|
|
}
|
|
|
|
|
2019-04-30 15:22:58 +02:00
|
|
|
enum HocResult {
|
|
|
|
Hoc {
|
|
|
|
hoc: u64,
|
2019-07-07 13:30:17 +02:00
|
|
|
commits: u64,
|
2019-04-30 15:22:58 +02:00
|
|
|
hoc_pretty: String,
|
|
|
|
head: String,
|
|
|
|
url: String,
|
|
|
|
repo: String,
|
|
|
|
service_path: String,
|
|
|
|
},
|
|
|
|
NotFound,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn handle_hoc_request<T, F>(
|
2019-04-19 22:51:58 +02:00
|
|
|
state: web::Data<Arc<State>>,
|
2019-04-16 16:57:06 +02:00
|
|
|
data: web::Path<(String, String)>,
|
2019-04-30 15:22:58 +02:00
|
|
|
mapper: F,
|
2019-05-19 13:56:11 +02:00
|
|
|
) -> impl Future<Item = HttpResponse, Error = Error>
|
2019-04-30 15:22:58 +02:00
|
|
|
where
|
|
|
|
T: Service,
|
2019-05-04 15:45:25 +02:00
|
|
|
F: Fn(HocResult) -> Result<HttpResponse>,
|
2019-04-30 15:22:58 +02:00
|
|
|
{
|
2019-06-16 14:33:15 +02:00
|
|
|
futures::future::result(Ok(()))
|
|
|
|
.and_then(move |_| {
|
|
|
|
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
|
|
|
let service_path = format!("{}/{}", T::domain(), repo);
|
|
|
|
let path = format!("{}/{}", state.repos, service_path);
|
|
|
|
let file = Path::new(&path);
|
|
|
|
let url = format!("https://{}", service_path);
|
|
|
|
if !file.exists() {
|
|
|
|
if !remote_exists(&url)? {
|
|
|
|
warn!("Repository does not exist: {}", url);
|
|
|
|
return Ok(HocResult::NotFound);
|
|
|
|
}
|
|
|
|
info!("Cloning {} for the first time", url);
|
|
|
|
create_dir_all(file)?;
|
|
|
|
let repo = Repository::init_bare(file)?;
|
|
|
|
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
|
|
|
repo.remote_set_url("origin", &url)?;
|
|
|
|
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
|
|
|
}
|
|
|
|
pull(&path)?;
|
2019-07-07 13:30:17 +02:00
|
|
|
let (hoc, head, commits) = hoc(&service_path, &state.repos, &state.cache)?;
|
2019-06-16 14:33:15 +02:00
|
|
|
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
|
|
|
Standalone(hoc) => hoc.to_string(),
|
|
|
|
Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
|
|
|
};
|
|
|
|
Ok(HocResult::Hoc {
|
|
|
|
hoc,
|
2019-07-07 13:30:17 +02:00
|
|
|
commits,
|
2019-06-16 14:33:15 +02:00
|
|
|
hoc_pretty,
|
2019-07-07 13:30:17 +02:00
|
|
|
head: head.to_string(),
|
2019-06-16 14:33:15 +02:00
|
|
|
url,
|
|
|
|
repo,
|
|
|
|
service_path,
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.and_then(mapper)
|
2019-04-16 16:57:06 +02:00
|
|
|
}
|
|
|
|
|
2019-06-16 14:33:15 +02:00
|
|
|
fn json_hoc<T: Service>(
|
2019-04-19 22:51:58 +02:00
|
|
|
state: web::Data<Arc<State>>,
|
2019-04-16 19:52:12 +02:00
|
|
|
data: web::Path<(String, String)>,
|
2019-06-16 14:33:15 +02:00
|
|
|
) -> impl Future<Item = HttpResponse, Error = Error> {
|
|
|
|
let mapper = |r| match r {
|
|
|
|
HocResult::NotFound => p404(),
|
2019-07-07 13:30:17 +02:00
|
|
|
HocResult::Hoc {
|
|
|
|
hoc, head, commits, ..
|
|
|
|
} => Ok(HttpResponse::Ok().json(JsonResponse {
|
2019-06-16 21:45:13 +02:00
|
|
|
head: &head,
|
2019-06-16 14:33:15 +02:00
|
|
|
count: hoc,
|
2019-07-07 13:30:17 +02:00
|
|
|
commits,
|
2019-06-16 14:33:15 +02:00
|
|
|
})),
|
|
|
|
};
|
|
|
|
handle_hoc_request::<T, _>(state, data, mapper)
|
2019-04-30 15:22:58 +02:00
|
|
|
}
|
2019-04-16 19:52:12 +02:00
|
|
|
|
2019-04-30 15:22:58 +02:00
|
|
|
fn calculate_hoc<T: Service>(
|
|
|
|
state: web::Data<Arc<State>>,
|
|
|
|
data: web::Path<(String, String)>,
|
2019-05-19 13:56:11 +02:00
|
|
|
) -> impl Future<Item = HttpResponse, Error = Error> {
|
2019-04-30 15:22:58 +02:00
|
|
|
let mapper = |r| match r {
|
2019-06-12 21:50:45 +02:00
|
|
|
HocResult::NotFound => p404(),
|
2019-04-30 15:22:58 +02:00
|
|
|
HocResult::Hoc { hoc_pretty, .. } => {
|
|
|
|
let badge_opt = BadgeOptions {
|
|
|
|
subject: "Hits-of-Code".to_string(),
|
|
|
|
color: "#007ec6".to_string(),
|
|
|
|
status: hoc_pretty,
|
|
|
|
};
|
|
|
|
let badge = Badge::new(badge_opt)?;
|
2019-04-16 19:52:12 +02:00
|
|
|
|
2019-04-30 15:22:58 +02:00
|
|
|
let (tx, rx_body) = mpsc::unbounded();
|
|
|
|
let _ = tx.unbounded_send(Bytes::from(badge.to_svg().as_bytes()));
|
|
|
|
|
|
|
|
let expiration = SystemTime::now() + Duration::from_secs(30);
|
|
|
|
Ok(HttpResponse::Ok()
|
|
|
|
.content_type("image/svg+xml")
|
|
|
|
.set(Expires(expiration.into()))
|
|
|
|
.set(CacheControl(vec![
|
|
|
|
CacheDirective::MaxAge(0u32),
|
|
|
|
CacheDirective::MustRevalidate,
|
|
|
|
CacheDirective::NoCache,
|
|
|
|
CacheDirective::NoStore,
|
|
|
|
]))
|
|
|
|
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
handle_hoc_request::<T, _>(state, data, mapper)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn overview<T: Service>(
|
|
|
|
state: web::Data<Arc<State>>,
|
|
|
|
data: web::Path<(String, String)>,
|
2019-05-19 13:56:11 +02:00
|
|
|
) -> impl Future<Item = HttpResponse, Error = Error> {
|
2019-04-30 15:22:58 +02:00
|
|
|
let mapper = |r| match r {
|
2019-06-12 21:50:45 +02:00
|
|
|
HocResult::NotFound => p404(),
|
2019-04-30 15:22:58 +02:00
|
|
|
HocResult::Hoc {
|
|
|
|
hoc,
|
2019-07-07 13:30:17 +02:00
|
|
|
commits,
|
2019-04-30 15:22:58 +02:00
|
|
|
hoc_pretty,
|
|
|
|
url,
|
|
|
|
head,
|
|
|
|
repo,
|
|
|
|
service_path,
|
|
|
|
} => {
|
|
|
|
let mut buf = Vec::new();
|
|
|
|
templates::overview(
|
|
|
|
&mut buf,
|
|
|
|
VERSION_INFO,
|
2019-06-12 21:50:45 +02:00
|
|
|
REPO_COUNT.load(Ordering::Relaxed),
|
2019-04-30 15:22:58 +02:00
|
|
|
&OPT.domain,
|
|
|
|
&service_path,
|
|
|
|
&url,
|
|
|
|
hoc,
|
|
|
|
&hoc_pretty,
|
|
|
|
&head,
|
|
|
|
&T::commit_url(&repo, &head),
|
2019-07-07 13:30:17 +02:00
|
|
|
commits,
|
2019-04-30 15:22:58 +02:00
|
|
|
)?;
|
|
|
|
|
|
|
|
let (tx, rx_body) = mpsc::unbounded();
|
|
|
|
let _ = tx.unbounded_send(Bytes::from(buf));
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok()
|
|
|
|
.content_type("text/html")
|
|
|
|
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
handle_hoc_request::<T, _>(state, data, mapper)
|
2019-04-18 14:32:30 +02:00
|
|
|
}
|
|
|
|
|
2019-04-16 20:55:25 +02:00
|
|
|
#[get("/")]
|
2019-06-12 21:50:45 +02:00
|
|
|
fn index() -> Result<HttpResponse> {
|
|
|
|
let mut buf = Vec::new();
|
|
|
|
templates::index(
|
|
|
|
&mut buf,
|
|
|
|
VERSION_INFO,
|
|
|
|
REPO_COUNT.load(Ordering::Relaxed),
|
|
|
|
&OPT.domain,
|
|
|
|
)?;
|
|
|
|
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
2019-04-21 17:57:57 +02:00
|
|
|
}
|
2019-04-16 21:37:39 +02:00
|
|
|
|
2019-05-04 15:45:25 +02:00
|
|
|
#[post("/generate")]
|
|
|
|
fn generate(params: web::Form<GeneratorForm>) -> Result<HttpResponse> {
|
|
|
|
let repo = format!("{}/{}", params.user, params.repo);
|
|
|
|
let mut buf = Vec::new();
|
|
|
|
templates::generate(
|
|
|
|
&mut buf,
|
|
|
|
VERSION_INFO,
|
2019-06-12 21:50:45 +02:00
|
|
|
REPO_COUNT.load(Ordering::Relaxed),
|
2019-05-04 15:45:25 +02:00
|
|
|
&OPT.domain,
|
|
|
|
params.service.url(),
|
|
|
|
params.service.service(),
|
|
|
|
&repo,
|
|
|
|
)?;
|
|
|
|
let (tx, rx_body) = mpsc::unbounded();
|
|
|
|
let _ = tx.unbounded_send(Bytes::from(buf));
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok()
|
|
|
|
.content_type("text/html")
|
|
|
|
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
|
|
|
}
|
|
|
|
|
2019-06-12 21:50:45 +02:00
|
|
|
fn p404() -> Result<HttpResponse> {
|
|
|
|
let mut buf = Vec::new();
|
|
|
|
templates::p404(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed))?;
|
|
|
|
Ok(HttpResponse::NotFound().content_type("text/html").body(buf))
|
2019-04-16 20:55:25 +02:00
|
|
|
}
|
|
|
|
|
2019-04-17 20:48:47 +02:00
|
|
|
#[get("/tacit-css.min.css")]
|
|
|
|
fn css() -> HttpResponse {
|
2019-04-21 17:57:57 +02:00
|
|
|
HttpResponse::Ok().content_type("text/css").body(CSS)
|
2019-04-17 20:48:47 +02:00
|
|
|
}
|
|
|
|
|
2019-05-13 21:58:07 +02:00
|
|
|
#[get("/favicon.ico")]
|
|
|
|
fn favicon32() -> HttpResponse {
|
|
|
|
HttpResponse::Ok().content_type("image/png").body(FAVICON)
|
|
|
|
}
|
|
|
|
|
2019-07-07 14:52:42 +02:00
|
|
|
fn start_server() -> Result<()> {
|
2019-04-23 22:50:51 +02:00
|
|
|
let interface = format!("{}:{}", OPT.host, OPT.port);
|
2019-04-19 22:51:58 +02:00
|
|
|
let state = Arc::new(State {
|
2019-04-23 22:50:51 +02:00
|
|
|
repos: OPT.outdir.display().to_string(),
|
|
|
|
cache: OPT.cachedir.display().to_string(),
|
2019-04-19 22:51:58 +02:00
|
|
|
});
|
2019-05-14 01:12:13 +02:00
|
|
|
Ok(HttpServer::new(move || {
|
2019-04-16 16:57:06 +02:00
|
|
|
App::new()
|
|
|
|
.data(state.clone())
|
|
|
|
.wrap(middleware::Logger::default())
|
2019-04-16 20:55:25 +02:00
|
|
|
.service(index)
|
2019-04-17 20:48:47 +02:00
|
|
|
.service(css)
|
2019-05-13 21:58:07 +02:00
|
|
|
.service(favicon32)
|
2019-05-04 15:45:25 +02:00
|
|
|
.service(generate)
|
2019-05-19 13:56:11 +02:00
|
|
|
.service(web::resource("/github/{user}/{repo}").to_async(calculate_hoc::<GitHub>))
|
|
|
|
.service(web::resource("/gitlab/{user}/{repo}").to_async(calculate_hoc::<Gitlab>))
|
|
|
|
.service(web::resource("/bitbucket/{user}/{repo}").to_async(calculate_hoc::<Bitbucket>))
|
2019-06-16 14:33:15 +02:00
|
|
|
.service(web::resource("/github/{user}/{repo}/json").to_async(json_hoc::<GitHub>))
|
|
|
|
.service(web::resource("/gitlab/{user}/{repo}/json").to_async(json_hoc::<Gitlab>))
|
|
|
|
.service(web::resource("/bitbucket/{user}/{repo}/json").to_async(json_hoc::<Bitbucket>))
|
2019-05-19 13:56:11 +02:00
|
|
|
.service(web::resource("/view/github/{user}/{repo}").to_async(overview::<GitHub>))
|
|
|
|
.service(web::resource("/view/gitlab/{user}/{repo}").to_async(overview::<Gitlab>))
|
|
|
|
.service(web::resource("/view/bitbucket/{user}/{repo}").to_async(overview::<Bitbucket>))
|
|
|
|
.default_service(web::resource("").route(web::get().to_async(p404)))
|
2019-04-16 16:57:06 +02:00
|
|
|
})
|
2019-05-01 16:37:51 +02:00
|
|
|
.workers(OPT.workers)
|
2019-04-16 16:57:06 +02:00
|
|
|
.bind(interface)?
|
2019-05-14 01:12:13 +02:00
|
|
|
.run()?)
|
2019-04-16 16:57:06 +02:00
|
|
|
}
|
2019-07-07 14:52:42 +02:00
|
|
|
|
|
|
|
fn migrate_cache() -> Result<()> {
|
|
|
|
let mut backup_cache = OPT.cachedir.clone();
|
|
|
|
backup_cache.set_extension("bak");
|
|
|
|
rename(&OPT.cachedir, backup_cache)?;
|
|
|
|
let outdir = OPT.outdir.display().to_string();
|
|
|
|
let cachedir = OPT.cachedir.display().to_string();
|
|
|
|
for service in read_dir(&OPT.outdir)? {
|
|
|
|
let service = service?;
|
|
|
|
for namespace in read_dir(service.path())? {
|
|
|
|
let namespace = namespace?;
|
|
|
|
for repo in read_dir(namespace.path())? {
|
|
|
|
let repo_path = repo?.path().display().to_string();
|
|
|
|
let repo_path: String =
|
|
|
|
repo_path
|
|
|
|
.split(&outdir)
|
|
|
|
.fold(String::new(), |mut acc, next| {
|
|
|
|
acc.push_str(next);
|
|
|
|
acc
|
|
|
|
});
|
|
|
|
println!("{}", repo_path);
|
|
|
|
hoc(&repo_path, &outdir, &cachedir)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn main() -> Result<()> {
|
|
|
|
config::init()?;
|
|
|
|
match &OPT.migrate {
|
|
|
|
None => start_server(),
|
|
|
|
Some(migration) => match migration {
|
|
|
|
Migration::CacheCommitCount => migrate_cache(),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|