Compare commits
296 Commits
v0.9.2
...
feature/st
Author | SHA1 | Date | |
---|---|---|---|
c2d496f2b4 | |||
19d37806f2 | |||
b4bd9b8830 | |||
3c8227d0e9 | |||
d6409c21ec | |||
990b5acbda | |||
f9e14e2ffd | |||
a73afe6851 | |||
20544b27d9 | |||
288573b1a4 | |||
a12755d7be | |||
a248531ce2 | |||
de7919a031 | |||
3913039010 | |||
078d3cdcf9 | |||
c552a84870 | |||
7c1a14b6ad | |||
c69b8207b8 | |||
a319f400e9 | |||
ddcb041f3f | |||
2a73370c9f | |||
689a2109fa | |||
e82146c912 | |||
568398f1c6 | |||
1cc6363cba | |||
dcfca96051 | |||
981c347acf | |||
22aad6ed77 | |||
b06bbbba8f | |||
e06e6736c0 | |||
0abfba8b4f | |||
ca87bf9282 | |||
4f37b101ae | |||
e16f02d729 | |||
1256996d19 | |||
c1d1551283 | |||
26bdb3dbb5 | |||
c9d959d4c5 | |||
15ca2f10b8 | |||
2a81fbe9f2 | |||
fe276458db | |||
73d034ff00 | |||
63e6a3d2db | |||
fbb5bbd20e | |||
7ff238fea6 | |||
06e9926cb5 | |||
c9a54eda49 | |||
382cd73bae | |||
8e5270a1d8 | |||
fdf3fda24a | |||
219099779b | |||
7393ba54f1 | |||
c14cf28a3e | |||
68a0604365 | |||
8dc07554f6 | |||
64bb6c2922 | |||
4be0cdeb5f | |||
0c9da804bb | |||
f95a2a7923 | |||
8eff7db8ae | |||
57df8ab209 | |||
ffea4f21ef | |||
0b0e88c604 | |||
c48e27c649 | |||
dfa353c356 | |||
2f1209d1e0 | |||
195815fc8a | |||
4e393fdd57 | |||
75f14f7878 | |||
349640acc5 | |||
1d0eca90a7 | |||
81bb65db4e | |||
6295477ccf | |||
b7324b3b38 | |||
e90fb7f54c | |||
253de8cea2 | |||
bd014301fd | |||
45c0d26e33 | |||
0edceb6a7d | |||
056d798d61 | |||
96f2e9422c | |||
df19951729 | |||
bfc97dad32 | |||
d28ac95aa2 | |||
8a0df3e52c | |||
e2c42a5287 | |||
f638124930 | |||
9cabea63c3 | |||
20074ac4e1 | |||
0ec7bd93d8 | |||
c1a1af0109 | |||
2c0094670b | |||
58fdc32627 | |||
cbeca19467 | |||
4cc8bd4385 | |||
fac1efeb7d | |||
1abc2d6333 | |||
a69c523e3c | |||
766c67f723 | |||
2f70e42f9a | |||
763910b515 | |||
dca32e4317 | |||
c301978f0b | |||
97fcb5a420 | |||
724c49d056 | |||
7b5d225701 | |||
90fc0ee584 | |||
978321e6ef | |||
7e5adbee24 | |||
803f95cde8 | |||
31fe058879 | |||
421c1a4164 | |||
06fa568225 | |||
520ac2442f | |||
61c4b18bf7 | |||
a3ccfdc4a3 | |||
70ce0f71e2 | |||
ce6150c48b | |||
95b60f3753 | |||
ec9f7a3635 | |||
9fef178a27 | |||
7af58de9fa | |||
6c414bf07d | |||
d39a81720a | |||
9ed5a5b257 | |||
f3c138b7d4 | |||
51b8519fc2 | |||
b1bce024de | |||
89a5e52857 | |||
da9f9263d1 | |||
62accd6e43 | |||
210a1f4592 | |||
3708768f34 | |||
94265ba39f | |||
7e3ca9c620 | |||
a3c978e0fc | |||
67db10460f | |||
615f71bfd7 | |||
1010d5cf0d | |||
6896a22409 | |||
3c2f06ebae | |||
3c945f5762 | |||
903abdefdc | |||
01689fc89e | |||
ede937d651 | |||
1805d86048 | |||
a81de6f89c | |||
dfc49c6b64 | |||
c7cde6222a | |||
fb889aa0ec | |||
36f29a410b | |||
e65efb5ea1 | |||
2127dbe879 | |||
9fe0c080c7 | |||
5f132ed0c8 | |||
5fe25b83e1 | |||
72655edcfc | |||
2ef4412f0a | |||
3863a870c5 | |||
7e29eab063 | |||
303445f3ad | |||
6a03ae9fff | |||
9ac60b8a26 | |||
d2c23ebf4f | |||
d0ddc7f62d | |||
164c97f453 | |||
c702bec8e2 | |||
2c002a1865 | |||
95efa4d875 | |||
65f1575045 | |||
9f95ae74a4 | |||
37159f8909 | |||
0142425f1e | |||
b72641221b | |||
50d985ffd4 | |||
8b0cc49724 | |||
6d0d53af1b | |||
b666a3e3b3 | |||
f5b37ba700 | |||
beb3c8330c | |||
34ee7e3853 | |||
84cf8b9a6f | |||
9ef0337605 | |||
bbf5bba490 | |||
9991f6c545 | |||
37e3c5ee72 | |||
edab6fd3dd | |||
97197190f5 | |||
4959fabd29 | |||
fecd5d0b6a | |||
1060edaebe | |||
bc9f411297 | |||
7fc920cddd | |||
6a4c1edf9e | |||
c1ea45d517 | |||
0b729da42f | |||
c0d0aa2176 | |||
41159c4ba4 | |||
b40b6bcad2 | |||
6e86f49585 | |||
c8c73b6b83 | |||
c6cc956f4d | |||
e37489d2cb | |||
8246be996e | |||
1f32f3eda4 | |||
ab6c317241 | |||
1223d429db | |||
9613aa7848 | |||
334fa8ced5 | |||
062343dd32 | |||
bdb2a151fa | |||
7e66393966 | |||
32ac5ca0d8 | |||
b3b4cd0e42 | |||
be08b87bbb | |||
cf4d427b96 | |||
b5ebc54372 | |||
231741629e | |||
135c5756b1 | |||
79cfee5fa5 | |||
1c9b5db4e1 | |||
6e0e452395 | |||
9e44731650 | |||
f673d71387 | |||
eda1822dce | |||
fb64af3456 | |||
0837284798 | |||
a7039f15b5 | |||
b1be281982 | |||
2795995f58 | |||
913cc9dc75 | |||
645d6e4a2b | |||
e455a89de4 | |||
f2cae46624 | |||
f47cbbe81a | |||
85f90c0bdb | |||
9143e5dbf8 | |||
865e3c7bbd | |||
5ebdd4067c | |||
55a1405e71 | |||
1436336b6a | |||
5a74306e49 | |||
614e5a6da5 | |||
0128e267cc | |||
c3dffac5da | |||
aaaff22907 | |||
ddfcbf11f1 | |||
3bee454548 | |||
9cff6f1f13 | |||
c946c5d0b4 | |||
88ae90af95 | |||
9f783dffa8 | |||
2cdf64ed80 | |||
9e024b530e | |||
0db7b26541 | |||
009d2c4813 | |||
f0ed6c2e23 | |||
d16a21e74e | |||
7e721ac212 | |||
4eb5d06cd1 | |||
b71207ca51 | |||
f3902f800f | |||
b8e454762e | |||
268734efdd | |||
c71925f61e | |||
c5bbd14a05 | |||
a239a3f80b | |||
00b4e60341 | |||
ed1cafafd0 | |||
bdb10fd54a | |||
1f01c3b964 | |||
3f94789b8b | |||
e4f8604861 | |||
25a179dad6 | |||
6ee44f7ba3 | |||
e9ebbee957 | |||
5211481226 | |||
fd230db0cb | |||
92a95f33d4 | |||
7e13f93ee3 | |||
36afc8732e | |||
abeed1f971 | |||
03a0743517 | |||
cbfd56b33a | |||
9c5208d6e4 | |||
92d7b6668a | |||
97510fa325 | |||
65ba104de1 | |||
9599d934a2 | |||
61e2dd174a | |||
fe00cddd47 | |||
9b73c99922 | |||
ed3da3a2c3 | |||
504476b145 | |||
9bc1b42750 | |||
140265b713 |
31
.github/workflows/audit.yml
vendored
Normal file
31
.github/workflows/audit.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
name: Security audit
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 1 * * *'
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'Cargo.toml'
|
||||||
|
- 'Cargo.lock'
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security_audit:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
|
||||||
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: /usr/share/rust/.cargo/registry
|
||||||
|
key: ${{ runner.os }}-cargo-registry
|
||||||
|
|
||||||
|
- name: Cache cargo index
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: /usr/share/rust/.cargo/git
|
||||||
|
key: ${{ runner.os }}-cargo-index
|
||||||
|
|
||||||
|
- uses: actions-rs/audit-check@v1
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
20
.github/workflows/nix-build.yml
vendored
Normal file
20
.github/workflows/nix-build.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
name: "Nix Build"
|
||||||
|
|
||||||
|
on: [push]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: cachix/install-nix-action@v8
|
||||||
|
- name: Cache nix store
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: /nix
|
||||||
|
key: ${{ runner.os }}-nix-store
|
||||||
|
- uses: cachix/cachix-action@v5
|
||||||
|
with:
|
||||||
|
name: hitsofcode
|
||||||
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
attributes: package
|
94
.github/workflows/rust.yml
vendored
Normal file
94
.github/workflows/rust.yml
vendored
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
name: Rust
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: Linting and Formatting Checks
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout sources
|
||||||
|
uses: actions/checkout@v1
|
||||||
|
|
||||||
|
- name: Install stable toolchain
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Install rustfmt
|
||||||
|
run: rustup component add rustfmt
|
||||||
|
|
||||||
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/registry
|
||||||
|
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Cache cargo index
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/git
|
||||||
|
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Cache cargo build
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Check Formatting
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: fmt
|
||||||
|
args: --all -- --check
|
||||||
|
|
||||||
|
- name: Install clippy
|
||||||
|
run: rustup component add clippy
|
||||||
|
|
||||||
|
- name: Clippy Linting
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: clippy
|
||||||
|
args: -- -D warnings
|
||||||
|
|
||||||
|
test:
|
||||||
|
name: Run Tests
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
# add windows-latest when it is clear why tests are failing
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout sources
|
||||||
|
uses: actions/checkout@v1
|
||||||
|
|
||||||
|
- name: Install stable toolchain
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/registry
|
||||||
|
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Cache cargo index
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/git
|
||||||
|
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Cache cargo build
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,3 +3,4 @@
|
|||||||
repos
|
repos
|
||||||
cache
|
cache
|
||||||
hoc.log
|
hoc.log
|
||||||
|
result
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
image: docker:stable
|
image: docker:19.03
|
||||||
services:
|
services:
|
||||||
- docker:dind
|
- docker:19.03-dind
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- build
|
- build
|
||||||
@ -9,6 +9,8 @@ stages:
|
|||||||
variables:
|
variables:
|
||||||
DOCKER_HOST: tcp://docker:2375
|
DOCKER_HOST: tcp://docker:2375
|
||||||
DOCKER_DRIVER: overlay2
|
DOCKER_DRIVER: overlay2
|
||||||
|
# DOCKER_TLS_CERTDIR: "/certs"
|
||||||
|
DOCKER_TLS_CERTDIR: ""
|
||||||
CONTAINER_BUILDER_IMAGE: $CI_REGISTRY_IMAGE:builder-latest
|
CONTAINER_BUILDER_IMAGE: $CI_REGISTRY_IMAGE:builder-latest
|
||||||
CONTAINER_TEST_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
|
CONTAINER_TEST_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
|
||||||
CONTAINER_RELEASE_IMAGE: $CI_REGISTRY_IMAGE:latest
|
CONTAINER_RELEASE_IMAGE: $CI_REGISTRY_IMAGE:latest
|
||||||
|
3261
Cargo.lock
generated
3261
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
41
Cargo.toml
41
Cargo.toml
@ -1,27 +1,34 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "hoc"
|
name = "hoc"
|
||||||
version = "0.9.2"
|
version = "0.14.3"
|
||||||
authors = ["Valentin Brandl <vbrandl@riseup.net>"]
|
authors = ["Valentin Brandl <vbrandl@riseup.net>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "1.0.0"
|
actix-rt = "1.1.1"
|
||||||
badge = "0.2.0"
|
actix-slog = "0.2.1"
|
||||||
bytes = "0.4.12"
|
actix-web = "3.1.0"
|
||||||
futures = "0.1.27"
|
badge = "0.3.0"
|
||||||
git2 = "0.9.1"
|
bytes = "0.6.0"
|
||||||
lazy_static = "1.3.0"
|
futures = "0.3.7"
|
||||||
log = "0.4.6"
|
git2 = "0.13.12"
|
||||||
log4rs = "0.8.3"
|
lazy_static = "1.4.0"
|
||||||
number_prefix = "0.3.0"
|
number_prefix = "0.4.0"
|
||||||
openssl-probe = "0.1.2"
|
openssl-probe = "0.1.2"
|
||||||
reqwest = "0.9.17"
|
reqwest = "0.10.8"
|
||||||
serde = "1.0.91"
|
serde = "1.0.117"
|
||||||
serde_derive = "1.0.91"
|
serde_derive = "1.0.103"
|
||||||
serde_json = "1.0.39"
|
serde_json = "1.0.59"
|
||||||
structopt = "0.2.16"
|
slog = "2.5.2"
|
||||||
|
slog-async = "2.5.0"
|
||||||
|
slog-atomic = "3.0.0"
|
||||||
|
slog-term = "2.6.0"
|
||||||
|
structopt = "0.3.20"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
ructe = "0.6.2"
|
ructe = "0.12.0"
|
||||||
vergen = "3.0.4"
|
vergen = "3.1.0"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.1.0"
|
||||||
|
@ -29,6 +29,10 @@ https://<host>/<service>/<user>/<repo>/json
|
|||||||
|
|
||||||
There is also an overview page available via `https://<host>/view/<service>/<user>/<repo>`
|
There is also an overview page available via `https://<host>/view/<service>/<user>/<repo>`
|
||||||
|
|
||||||
|
To delete a repository and the cache from the server, send a `POST` request to
|
||||||
|
`https://<host>/<service>/<user>/<repo>/delete`. On the overview page, there is a button to perform this operation. It
|
||||||
|
will respond with a redirect to the overview page so the cache is rebuilt directly.
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
The code can be built as a standalone binary, using `cargo` or as a Docker container. Run either
|
The code can be built as a standalone binary, using `cargo` or as a Docker container. Run either
|
||||||
@ -45,6 +49,10 @@ $ docker build .
|
|||||||
|
|
||||||
inside the repository.
|
inside the repository.
|
||||||
|
|
||||||
|
I'm currently working on migrating to [nix](https://nixos.org/nix). To get a
|
||||||
|
development shell, run `nix-shell`, to build the package run `nix-build --attr
|
||||||
|
package` and to build the Docker image, run `nix-build --attr dockerImage`.
|
||||||
|
|
||||||
|
|
||||||
## Running
|
## Running
|
||||||
|
|
||||||
|
1
crate-hashes.json
Normal file
1
crate-hashes.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
{}
|
56
default.nix
Normal file
56
default.nix
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
{ sources ? import ./nix/sources.nix
|
||||||
|
, system ? builtins.currentSystem
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
rustOverlay = import "${sources.nixpkgs-mozilla}/rust-overlay.nix";
|
||||||
|
cargo2nixOverlay = import "${sources.cargo2nix}/overlay";
|
||||||
|
|
||||||
|
pkgs = import sources.nixpkgs {
|
||||||
|
# pkgs = import <nixpkgs> {
|
||||||
|
inherit system;
|
||||||
|
overlays = [ cargo2nixOverlay rustOverlay ];
|
||||||
|
};
|
||||||
|
|
||||||
|
rustPkgs = pkgs.rustBuilder.makePackageSet' {
|
||||||
|
rustChannel = "stable";
|
||||||
|
packageFun = import ./Cargo.nix;
|
||||||
|
localPatterns =
|
||||||
|
[
|
||||||
|
''^(src|tests)(/.*)?''
|
||||||
|
''[^/]*\.(rs|toml)$''
|
||||||
|
# include other directory from the project repository
|
||||||
|
''^templates(/.*)?''
|
||||||
|
''^static(/.*)?''
|
||||||
|
''^.git.*(/.*)?''
|
||||||
|
];
|
||||||
|
# packageOverrides
|
||||||
|
};
|
||||||
|
in
|
||||||
|
rec {
|
||||||
|
inherit rustPkgs;
|
||||||
|
shell = pkgs.mkShell {
|
||||||
|
inputsFrom = pkgs.lib.mapAttrsToList (_: pkg: pkg { }) rustPkgs.noBuild.workspace;
|
||||||
|
nativeBuildInputs = with rustPkgs; [ cargo rustc ];
|
||||||
|
};
|
||||||
|
package = (rustPkgs.workspace.hoc {}).overrideAttrs (drv: {
|
||||||
|
buildInputs = drv.buildInputs or [ ] ++ [ pkgs.git ];
|
||||||
|
});
|
||||||
|
dockerImage =
|
||||||
|
pkgs.dockerTools.buildImage {
|
||||||
|
name = "vbrandl/hits-of-code";
|
||||||
|
tag = package.version;
|
||||||
|
|
||||||
|
contents =
|
||||||
|
[
|
||||||
|
package
|
||||||
|
pkgs.cacert
|
||||||
|
pkgs.gitMinimal
|
||||||
|
];
|
||||||
|
|
||||||
|
config = {
|
||||||
|
Cmd = [ "/bin/hoc" ];
|
||||||
|
WorkingDir = "/home/hoc";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
50
nix/sources.json
Normal file
50
nix/sources.json
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"cargo2nix": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "Convert a Cargo.lock to mkRustCrate statements for import in Nix",
|
||||||
|
"homepage": "",
|
||||||
|
"owner": "tenx-tech",
|
||||||
|
"repo": "cargo2nix",
|
||||||
|
"rev": "7bc062ccffc41dc7d3759b8b797e8b4f8dd23a15",
|
||||||
|
"sha256": "1z7xwk1hbp26aydsk3y07riy0ivwqss06n1470mvdl7allfcd1w5",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/tenx-tech/cargo2nix/archive/7bc062ccffc41dc7d3759b8b797e8b4f8dd23a15.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"niv": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "Easy dependency management for Nix projects",
|
||||||
|
"homepage": "https://github.com/nmattia/niv",
|
||||||
|
"owner": "nmattia",
|
||||||
|
"repo": "niv",
|
||||||
|
"rev": "98c74a80934123cb4c3bf3314567f67311eb711a",
|
||||||
|
"sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"branch": "nixpkgs-unstable",
|
||||||
|
"description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to",
|
||||||
|
"homepage": "https://github.com/NixOS/nixpkgs",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs-channels",
|
||||||
|
"rev": "f6bfb371cba2b5a02f200c2747c1fe2c72bd782f",
|
||||||
|
"sha256": "0y3hlbyvznrpr1d2vxj2511hkjg733wdnxfaib3fgy9i9jr8ivzn",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/NixOS/nixpkgs-channels/archive/f6bfb371cba2b5a02f200c2747c1fe2c72bd782f.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"nixpkgs-mozilla": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "mozilla related nixpkgs (extends nixos/nixpkgs repo)",
|
||||||
|
"homepage": null,
|
||||||
|
"owner": "mozilla",
|
||||||
|
"repo": "nixpkgs-mozilla",
|
||||||
|
"rev": "e912ed483e980dfb4666ae0ed17845c4220e5e7c",
|
||||||
|
"sha256": "08fvzb8w80bkkabc1iyhzd15f4sm7ra10jn32kfch5klgl0gj3j3",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/mozilla/nixpkgs-mozilla/archive/e912ed483e980dfb4666ae0ed17845c4220e5e7c.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
}
|
||||||
|
}
|
134
nix/sources.nix
Normal file
134
nix/sources.nix
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
# This file has been generated by Niv.
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
#
|
||||||
|
# The fetchers. fetch_<type> fetches specs of type <type>.
|
||||||
|
#
|
||||||
|
|
||||||
|
fetch_file = pkgs: spec:
|
||||||
|
if spec.builtin or true then
|
||||||
|
builtins_fetchurl { inherit (spec) url sha256; }
|
||||||
|
else
|
||||||
|
pkgs.fetchurl { inherit (spec) url sha256; };
|
||||||
|
|
||||||
|
fetch_tarball = pkgs: spec:
|
||||||
|
if spec.builtin or true then
|
||||||
|
builtins_fetchTarball { inherit (spec) url sha256; }
|
||||||
|
else
|
||||||
|
pkgs.fetchzip { inherit (spec) url sha256; };
|
||||||
|
|
||||||
|
fetch_git = spec:
|
||||||
|
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
|
||||||
|
|
||||||
|
fetch_builtin-tarball = spec:
|
||||||
|
builtins.trace
|
||||||
|
''
|
||||||
|
WARNING:
|
||||||
|
The niv type "builtin-tarball" will soon be deprecated. You should
|
||||||
|
instead use `builtin = true`.
|
||||||
|
|
||||||
|
$ niv modify <package> -a type=tarball -a builtin=true
|
||||||
|
''
|
||||||
|
builtins_fetchTarball { inherit (spec) url sha256; };
|
||||||
|
|
||||||
|
fetch_builtin-url = spec:
|
||||||
|
builtins.trace
|
||||||
|
''
|
||||||
|
WARNING:
|
||||||
|
The niv type "builtin-url" will soon be deprecated. You should
|
||||||
|
instead use `builtin = true`.
|
||||||
|
|
||||||
|
$ niv modify <package> -a type=file -a builtin=true
|
||||||
|
''
|
||||||
|
(builtins_fetchurl { inherit (spec) url sha256; });
|
||||||
|
|
||||||
|
#
|
||||||
|
# Various helpers
|
||||||
|
#
|
||||||
|
|
||||||
|
# The set of packages used when specs are fetched using non-builtins.
|
||||||
|
mkPkgs = sources:
|
||||||
|
let
|
||||||
|
sourcesNixpkgs =
|
||||||
|
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
|
||||||
|
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
|
||||||
|
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
|
||||||
|
in
|
||||||
|
if builtins.hasAttr "nixpkgs" sources
|
||||||
|
then sourcesNixpkgs
|
||||||
|
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
|
||||||
|
import <nixpkgs> {}
|
||||||
|
else
|
||||||
|
abort
|
||||||
|
''
|
||||||
|
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
|
||||||
|
add a package called "nixpkgs" to your sources.json.
|
||||||
|
'';
|
||||||
|
|
||||||
|
# The actual fetching function.
|
||||||
|
fetch = pkgs: name: spec:
|
||||||
|
|
||||||
|
if ! builtins.hasAttr "type" spec then
|
||||||
|
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
|
||||||
|
else if spec.type == "file" then fetch_file pkgs spec
|
||||||
|
else if spec.type == "tarball" then fetch_tarball pkgs spec
|
||||||
|
else if spec.type == "git" then fetch_git spec
|
||||||
|
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
|
||||||
|
else if spec.type == "builtin-url" then fetch_builtin-url spec
|
||||||
|
else
|
||||||
|
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
|
||||||
|
|
||||||
|
# Ports of functions for older nix versions
|
||||||
|
|
||||||
|
# a Nix version of mapAttrs if the built-in doesn't exist
|
||||||
|
mapAttrs = builtins.mapAttrs or (
|
||||||
|
f: set: with builtins;
|
||||||
|
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
|
||||||
|
);
|
||||||
|
|
||||||
|
# fetchTarball version that is compatible between all the versions of Nix
|
||||||
|
builtins_fetchTarball = { url, sha256 }@attrs:
|
||||||
|
let
|
||||||
|
inherit (builtins) lessThan nixVersion fetchTarball;
|
||||||
|
in
|
||||||
|
if lessThan nixVersion "1.12" then
|
||||||
|
fetchTarball { inherit url; }
|
||||||
|
else
|
||||||
|
fetchTarball attrs;
|
||||||
|
|
||||||
|
# fetchurl version that is compatible between all the versions of Nix
|
||||||
|
builtins_fetchurl = { url, sha256 }@attrs:
|
||||||
|
let
|
||||||
|
inherit (builtins) lessThan nixVersion fetchurl;
|
||||||
|
in
|
||||||
|
if lessThan nixVersion "1.12" then
|
||||||
|
fetchurl { inherit url; }
|
||||||
|
else
|
||||||
|
fetchurl attrs;
|
||||||
|
|
||||||
|
# Create the final "sources" from the config
|
||||||
|
mkSources = config:
|
||||||
|
mapAttrs (
|
||||||
|
name: spec:
|
||||||
|
if builtins.hasAttr "outPath" spec
|
||||||
|
then abort
|
||||||
|
"The values in sources.json should not have an 'outPath' attribute"
|
||||||
|
else
|
||||||
|
spec // { outPath = fetch config.pkgs name spec; }
|
||||||
|
) config.sources;
|
||||||
|
|
||||||
|
# The "config" used by the fetchers
|
||||||
|
mkConfig =
|
||||||
|
{ sourcesFile ? ./sources.json
|
||||||
|
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
|
||||||
|
, pkgs ? mkPkgs sources
|
||||||
|
}: rec {
|
||||||
|
# The sources, i.e. the attribute set of spec name to spec
|
||||||
|
inherit sources;
|
||||||
|
|
||||||
|
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
|
||||||
|
inherit pkgs;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
|
10
scripts/find-active.sh
Executable file
10
scripts/find-active.sh
Executable file
@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
DIR=${1:-repos}
|
||||||
|
|
||||||
|
for url in $(./scripts/list.sh "${DIR}")
|
||||||
|
do
|
||||||
|
(curl "${url}" --silent | grep -q hitsofcode) && echo "${url}" &
|
||||||
|
done
|
21
scripts/load-active.sh
Executable file
21
scripts/load-active.sh
Executable file
@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
ACTIVE=${1}
|
||||||
|
|
||||||
|
if [ -z "${ACTIVE}" ]
|
||||||
|
then
|
||||||
|
echo "Usage: $0 <list of active repos>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
while IFS= read -r url
|
||||||
|
do
|
||||||
|
imgs=$(curl "${url}" --silent | grep hitsofcode | grep -o -P 'https://camo.githubusercontent.com/[a-z0-9]+/[a-z0-9]+')
|
||||||
|
[ -z "${imgs}" ] || echo "${url}"
|
||||||
|
for img in ${imgs}
|
||||||
|
do
|
||||||
|
curl "$img" --silent > /dev/null &
|
||||||
|
done
|
||||||
|
done < "${ACTIVE}"
|
102
src/cache.rs
102
src/cache.rs
@ -1,6 +1,7 @@
|
|||||||
use crate::Error;
|
use crate::error::{Error, Result};
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
|
collections::HashMap,
|
||||||
fs::{create_dir_all, File, OpenOptions},
|
fs::{create_dir_all, File, OpenOptions},
|
||||||
io::BufReader,
|
io::BufReader,
|
||||||
path::Path,
|
path::Path,
|
||||||
@ -9,47 +10,116 @@ use std::{
|
|||||||
/// Enum to indicate the state of the cache
|
/// Enum to indicate the state of the cache
|
||||||
pub(crate) enum CacheState<'a> {
|
pub(crate) enum CacheState<'a> {
|
||||||
/// Current head and cached head are the same
|
/// Current head and cached head are the same
|
||||||
Current(u64),
|
Current {
|
||||||
|
count: u64,
|
||||||
|
commits: u64,
|
||||||
|
cache: Cache<'a>,
|
||||||
|
},
|
||||||
/// Cached head is older than current head
|
/// Cached head is older than current head
|
||||||
Old(Cache<'a>),
|
Old {
|
||||||
|
head: String,
|
||||||
|
cache: Cache<'a>,
|
||||||
|
},
|
||||||
|
NoneForBranch(Cache<'a>),
|
||||||
/// No cache was found
|
/// No cache was found
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CacheState<'a> {
|
impl<'a> CacheState<'a> {
|
||||||
pub(crate) fn read_from_file(path: impl AsRef<Path>, head: &str) -> Result<CacheState, Error> {
|
pub(crate) fn read_from_file(
|
||||||
|
path: impl AsRef<Path>,
|
||||||
|
branch: &str,
|
||||||
|
head: &str,
|
||||||
|
) -> Result<CacheState<'a>> {
|
||||||
if path.as_ref().exists() {
|
if path.as_ref().exists() {
|
||||||
let cache: Cache = serde_json::from_reader(BufReader::new(File::open(path)?))?;
|
let cache: Cache = serde_json::from_reader(BufReader::new(File::open(path)?))?;
|
||||||
if cache.head == head {
|
Ok(cache
|
||||||
Ok(CacheState::Current(cache.count))
|
.entries
|
||||||
} else {
|
.get(branch)
|
||||||
Ok(CacheState::Old(cache))
|
.map(|c| {
|
||||||
}
|
if c.head == head {
|
||||||
|
CacheState::Current {
|
||||||
|
count: c.count,
|
||||||
|
commits: c.commits,
|
||||||
|
// TODO: get rid of clone
|
||||||
|
cache: cache.clone(),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
CacheState::Old {
|
||||||
|
head: c.head.to_string(),
|
||||||
|
// TODO: get rid of clone
|
||||||
|
cache: cache.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
// TODO: get rid of clone
|
||||||
|
.unwrap_or_else(|| CacheState::NoneForBranch(cache.clone())))
|
||||||
} else {
|
} else {
|
||||||
Ok(CacheState::No)
|
Ok(CacheState::No)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn calculate_new_cache(self, count: u64, head: Cow<'a, str>) -> Cache {
|
pub(crate) fn calculate_new_cache(
|
||||||
|
self,
|
||||||
|
count: u64,
|
||||||
|
commits: u64,
|
||||||
|
head: Cow<'a, str>,
|
||||||
|
branch: &'a str,
|
||||||
|
) -> Cache<'a> {
|
||||||
match self {
|
match self {
|
||||||
CacheState::Old(mut cache) => {
|
CacheState::Old { mut cache, .. } => {
|
||||||
cache.head = head;
|
if let Some(mut cache) = cache.entries.get_mut(branch) {
|
||||||
cache.count += count;
|
cache.head = head;
|
||||||
|
cache.count += count;
|
||||||
|
cache.commits += commits;
|
||||||
|
}
|
||||||
cache
|
cache
|
||||||
}
|
}
|
||||||
CacheState::No | CacheState::Current(_) => Cache { head, count },
|
CacheState::Current { cache, .. } => cache,
|
||||||
|
CacheState::NoneForBranch(mut cache) => {
|
||||||
|
cache.entries.insert(
|
||||||
|
branch.into(),
|
||||||
|
CacheEntry {
|
||||||
|
head,
|
||||||
|
count,
|
||||||
|
commits,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
cache
|
||||||
|
}
|
||||||
|
CacheState::No => {
|
||||||
|
let mut entries = HashMap::with_capacity(1);
|
||||||
|
entries.insert(
|
||||||
|
branch.into(),
|
||||||
|
CacheEntry {
|
||||||
|
commits,
|
||||||
|
head,
|
||||||
|
count,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
Cache { entries }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
pub(crate) struct Cache<'a> {
|
pub(crate) struct Cache<'a> {
|
||||||
|
pub entries: HashMap<Cow<'a, str>, CacheEntry<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
|
pub(crate) struct CacheEntry<'a> {
|
||||||
|
/// HEAD commit ref
|
||||||
pub head: Cow<'a, str>,
|
pub head: Cow<'a, str>,
|
||||||
|
/// HoC value
|
||||||
pub count: u64,
|
pub count: u64,
|
||||||
|
/// Number of commits
|
||||||
|
pub commits: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Cache<'a> {
|
impl<'a> Cache<'a> {
|
||||||
pub(crate) fn write_to_file(&self, path: impl AsRef<Path>) -> Result<(), Error> {
|
pub(crate) fn write_to_file(&self, path: impl AsRef<Path>) -> Result<()> {
|
||||||
create_dir_all(path.as_ref().parent().ok_or(Error::Internal)?)?;
|
create_dir_all(path.as_ref().parent().ok_or(Error::Internal)?)?;
|
||||||
serde_json::to_writer(
|
serde_json::to_writer(
|
||||||
OpenOptions::new()
|
OpenOptions::new()
|
||||||
|
@ -1,10 +1,5 @@
|
|||||||
use crate::{error::Result, statics::OPT};
|
use slog::{Drain, Logger};
|
||||||
use log::LevelFilter;
|
use slog_atomic::AtomicSwitch;
|
||||||
use log4rs::{
|
|
||||||
append::{console::ConsoleAppender, file::FileAppender},
|
|
||||||
config::{Appender, Config, Root},
|
|
||||||
encode::pattern::PatternEncoder,
|
|
||||||
};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use structopt::StructOpt;
|
use structopt::StructOpt;
|
||||||
|
|
||||||
@ -38,34 +33,28 @@ pub(crate) struct Opt {
|
|||||||
#[structopt(short = "w", long = "workers", default_value = "4")]
|
#[structopt(short = "w", long = "workers", default_value = "4")]
|
||||||
/// Number of worker threads
|
/// Number of worker threads
|
||||||
pub(crate) workers: usize,
|
pub(crate) workers: usize,
|
||||||
#[structopt(
|
// #[structopt(
|
||||||
short = "l",
|
// short = "l",
|
||||||
long = "logfile",
|
// long = "logfile",
|
||||||
parse(from_os_str),
|
// parse(from_os_str),
|
||||||
default_value = "./hoc.log"
|
// default_value = "./hoc.log"
|
||||||
)]
|
// )]
|
||||||
/// The logfile
|
// /// The logfile
|
||||||
pub(crate) logfile: PathBuf,
|
// pub(crate) logfile: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn init() -> Result<()> {
|
pub(crate) fn init() -> Logger {
|
||||||
std::env::set_var("RUST_LOG", "actix_web=info,hoc=info");
|
std::env::set_var("RUST_LOG", "actix_web=info,hoc=info");
|
||||||
// pretty_env_logger::init();
|
|
||||||
openssl_probe::init_ssl_cert_env_vars();
|
openssl_probe::init_ssl_cert_env_vars();
|
||||||
let stdout = ConsoleAppender::builder().build();
|
|
||||||
let file = FileAppender::builder()
|
let decorator = slog_term::PlainDecorator::new(std::io::stdout());
|
||||||
.encoder(Box::new(PatternEncoder::new("{d} - {m}{n}")))
|
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||||
.build(&OPT.logfile)
|
let drain = slog_async::Async::new(drain).build().fuse();
|
||||||
.unwrap();
|
let drain = AtomicSwitch::new(drain);
|
||||||
let config = Config::builder()
|
|
||||||
.appender(Appender::builder().build("stdout", Box::new(stdout)))
|
let root = Logger::root(drain, o!("version" => env!("CARGO_PKG_VERSION")));
|
||||||
.appender(Appender::builder().build("file", Box::new(file)))
|
|
||||||
.build(
|
info!(root, "Logging initialized");
|
||||||
Root::builder()
|
|
||||||
.appender("stdout")
|
root
|
||||||
.appender("file")
|
|
||||||
.build(LevelFilter::Info),
|
|
||||||
)?;
|
|
||||||
log4rs::init_config(config)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ pub(crate) fn count_repositories<P>(repo_path: P) -> Result<usize>
|
|||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
{
|
{
|
||||||
|
std::fs::create_dir_all(&repo_path)?;
|
||||||
Ok(read_dir(repo_path)?
|
Ok(read_dir(repo_path)?
|
||||||
.filter_map(StdResult::ok)
|
.filter_map(StdResult::ok)
|
||||||
.filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
.filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
||||||
|
46
src/error.rs
46
src/error.rs
@ -14,9 +14,9 @@ pub(crate) enum Error {
|
|||||||
Git(git2::Error),
|
Git(git2::Error),
|
||||||
Internal,
|
Internal,
|
||||||
Io(std::io::Error),
|
Io(std::io::Error),
|
||||||
Log(log::SetLoggerError),
|
Parse(std::num::ParseIntError),
|
||||||
LogBuilder(log4rs::config::Errors),
|
|
||||||
Serial(serde_json::Error),
|
Serial(serde_json::Error),
|
||||||
|
BranchNotFound,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
impl fmt::Display for Error {
|
||||||
@ -27,9 +27,9 @@ impl fmt::Display for Error {
|
|||||||
Error::Git(e) => write!(fmt, "Git({})", e),
|
Error::Git(e) => write!(fmt, "Git({})", e),
|
||||||
Error::Internal => write!(fmt, "Internal Error"),
|
Error::Internal => write!(fmt, "Internal Error"),
|
||||||
Error::Io(e) => write!(fmt, "Io({})", e),
|
Error::Io(e) => write!(fmt, "Io({})", e),
|
||||||
Error::Log(e) => write!(fmt, "Log({})", e),
|
Error::Parse(e) => write!(fmt, "Parse({})", e),
|
||||||
Error::LogBuilder(e) => write!(fmt, "LogBuilder({})", e),
|
|
||||||
Error::Serial(e) => write!(fmt, "Serial({})", e),
|
Error::Serial(e) => write!(fmt, "Serial({})", e),
|
||||||
|
Error::BranchNotFound => write!(fmt, "Repo doesn't have master branch"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -37,14 +37,24 @@ impl fmt::Display for Error {
|
|||||||
impl ResponseError for Error {
|
impl ResponseError for Error {
|
||||||
fn error_response(&self) -> HttpResponse {
|
fn error_response(&self) -> HttpResponse {
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
templates::p500(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed)).unwrap();
|
match self {
|
||||||
HttpResponse::InternalServerError()
|
Error::BranchNotFound => {
|
||||||
.content_type("text/html")
|
templates::p404_no_master(
|
||||||
.body(buf)
|
&mut buf,
|
||||||
}
|
VERSION_INFO,
|
||||||
|
REPO_COUNT.load(Ordering::Relaxed),
|
||||||
fn render_response(&self) -> HttpResponse {
|
)
|
||||||
self.error_response()
|
.unwrap();
|
||||||
|
HttpResponse::NotFound().content_type("text/html").body(buf)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
templates::p500(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed))
|
||||||
|
.unwrap();
|
||||||
|
HttpResponse::InternalServerError()
|
||||||
|
.content_type("text/html")
|
||||||
|
.body(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,12 +72,6 @@ impl From<git2::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<log::SetLoggerError> for Error {
|
|
||||||
fn from(err: log::SetLoggerError) -> Self {
|
|
||||||
Error::Log(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
impl From<std::io::Error> for Error {
|
||||||
fn from(err: std::io::Error) -> Self {
|
fn from(err: std::io::Error) -> Self {
|
||||||
Error::Io(err)
|
Error::Io(err)
|
||||||
@ -86,8 +90,8 @@ impl From<reqwest::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<log4rs::config::Errors> for Error {
|
impl From<std::num::ParseIntError> for Error {
|
||||||
fn from(err: log4rs::config::Errors) -> Self {
|
fn from(err: std::num::ParseIntError) -> Self {
|
||||||
Error::LogBuilder(err)
|
Error::Parse(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
358
src/main.rs
358
src/main.rs
@ -4,10 +4,12 @@
|
|||||||
extern crate actix_web;
|
extern crate actix_web;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
#[macro_use]
|
// #[macro_use]
|
||||||
extern crate log;
|
// extern crate log;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate slog;
|
||||||
|
|
||||||
mod cache;
|
mod cache;
|
||||||
mod config;
|
mod config;
|
||||||
@ -15,26 +17,31 @@ mod count;
|
|||||||
mod error;
|
mod error;
|
||||||
mod service;
|
mod service;
|
||||||
mod statics;
|
mod statics;
|
||||||
|
mod template;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
cache::CacheState,
|
cache::CacheState,
|
||||||
error::{Error, Result},
|
error::{Error, Result},
|
||||||
service::{Bitbucket, FormService, GitHub, Gitlab, Service},
|
service::{Bitbucket, FormService, GitHub, Gitlab, Service},
|
||||||
statics::{CLIENT, CSS, FAVICON, OPT, REPO_COUNT, VERSION_INFO},
|
statics::{CLIENT, CSS, FAVICON, OPT, REPO_COUNT, VERSION_INFO},
|
||||||
|
template::RepoInfo,
|
||||||
};
|
};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
error::ErrorBadRequest,
|
http::header::{CacheControl, CacheDirective, Expires, LOCATION},
|
||||||
http::header::{CacheControl, CacheDirective, Expires},
|
middleware::{self, normalize::TrailingSlash},
|
||||||
middleware, web, App, HttpResponse, HttpServer,
|
web, App, HttpResponse, HttpServer, Responder,
|
||||||
};
|
};
|
||||||
use badge::{Badge, BadgeOptions};
|
use badge::{Badge, BadgeOptions};
|
||||||
use bytes::Bytes;
|
use git2::{BranchType, Repository};
|
||||||
use futures::{unsync::mpsc, Future, Stream};
|
use number_prefix::NumberPrefix;
|
||||||
use git2::Repository;
|
use slog::Logger;
|
||||||
use number_prefix::{NumberPrefix, Prefixed, Standalone};
|
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
fs::create_dir_all,
|
fs::create_dir_all,
|
||||||
|
io,
|
||||||
path::Path,
|
path::Path,
|
||||||
process::Command,
|
process::Command,
|
||||||
sync::atomic::Ordering,
|
sync::atomic::Ordering,
|
||||||
@ -51,15 +58,24 @@ struct GeneratorForm<'a> {
|
|||||||
repo: Cow<'a, str>,
|
repo: Cow<'a, str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct State {
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct State {
|
||||||
repos: String,
|
repos: String,
|
||||||
cache: String,
|
cache: String,
|
||||||
|
logger: Logger,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct JsonResponse<'a> {
|
struct JsonResponse<'a> {
|
||||||
head: &'a str,
|
head: &'a str,
|
||||||
|
branch: &'a str,
|
||||||
count: u64,
|
count: u64,
|
||||||
|
commits: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BranchQuery {
|
||||||
|
branch: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pull(path: impl AsRef<Path>) -> Result<()> {
|
fn pull(path: impl AsRef<Path>) -> Result<()> {
|
||||||
@ -69,17 +85,24 @@ fn pull(path: impl AsRef<Path>) -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
fn hoc(
|
||||||
|
repo: &str,
|
||||||
|
repo_dir: &str,
|
||||||
|
cache_dir: &str,
|
||||||
|
branch: &str,
|
||||||
|
logger: &Logger,
|
||||||
|
) -> Result<(u64, String, u64)> {
|
||||||
let repo_dir = format!("{}/{}", repo_dir, repo);
|
let repo_dir = format!("{}/{}", repo_dir, repo);
|
||||||
let cache_dir = format!("{}/{}.json", cache_dir, repo);
|
let cache_dir = format!("{}/{}.json", cache_dir, repo);
|
||||||
let cache_dir = Path::new(&cache_dir);
|
let cache_dir = Path::new(&cache_dir);
|
||||||
let head = format!(
|
let repo = Repository::open_bare(&repo_dir)?;
|
||||||
"{}",
|
// TODO: do better...
|
||||||
Repository::open_bare(&repo_dir)?
|
let head = repo
|
||||||
.head()?
|
.find_branch(branch, BranchType::Local)
|
||||||
.target()
|
.map_err(|_| Error::BranchNotFound)?
|
||||||
.ok_or(Error::Internal)?
|
.into_reference();
|
||||||
);
|
let head = format!("{}", head.target().ok_or(Error::BranchNotFound)?);
|
||||||
|
let mut arg_commit_count = vec!["rev-list".to_string(), "--count".to_string()];
|
||||||
let mut arg = vec![
|
let mut arg = vec![
|
||||||
"log".to_string(),
|
"log".to_string(),
|
||||||
"--pretty=tformat:".to_string(),
|
"--pretty=tformat:".to_string(),
|
||||||
@ -92,18 +115,21 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
|||||||
"-M".to_string(),
|
"-M".to_string(),
|
||||||
"--diff-filter=ACDM".to_string(),
|
"--diff-filter=ACDM".to_string(),
|
||||||
];
|
];
|
||||||
let cache = CacheState::read_from_file(&cache_dir, &head)?;
|
let cache = CacheState::read_from_file(&cache_dir, branch, &head)?;
|
||||||
match &cache {
|
match &cache {
|
||||||
CacheState::Current(res) => {
|
CacheState::Current { count, commits, .. } => {
|
||||||
info!("Using cache for {}", repo_dir);
|
info!(logger, "Using cache");
|
||||||
return Ok((*res, head));
|
return Ok((*count, head, *commits));
|
||||||
}
|
}
|
||||||
CacheState::Old(cache) => {
|
CacheState::Old { head, .. } => {
|
||||||
info!("Updating cache for {}", repo_dir);
|
info!(logger, "Updating cache");
|
||||||
arg.push(format!("{}..HEAD", cache.head));
|
arg.push(format!("{}..{}", head, branch));
|
||||||
|
arg_commit_count.push(format!("{}..{}", head, branch));
|
||||||
}
|
}
|
||||||
CacheState::No => {
|
CacheState::No | CacheState::NoneForBranch(..) => {
|
||||||
info!("Creating cache for {}", repo_dir);
|
info!(logger, "Creating cache");
|
||||||
|
arg.push(branch.to_string());
|
||||||
|
arg_commit_count.push(branch.to_string());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
arg.push("--".to_string());
|
arg.push("--".to_string());
|
||||||
@ -114,6 +140,13 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
|||||||
.output()?
|
.output()?
|
||||||
.stdout;
|
.stdout;
|
||||||
let output = String::from_utf8_lossy(&output);
|
let output = String::from_utf8_lossy(&output);
|
||||||
|
let output_commits = Command::new("git")
|
||||||
|
.args(&arg_commit_count)
|
||||||
|
.current_dir(&repo_dir)
|
||||||
|
.output()?
|
||||||
|
.stdout;
|
||||||
|
let output_commits = String::from_utf8_lossy(&output_commits);
|
||||||
|
let commits: u64 = output_commits.trim().parse()?;
|
||||||
let count: u64 = output
|
let count: u64 = output
|
||||||
.lines()
|
.lines()
|
||||||
.map(|s| {
|
.map(|s| {
|
||||||
@ -125,19 +158,21 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
|||||||
})
|
})
|
||||||
.sum();
|
.sum();
|
||||||
|
|
||||||
let cache = cache.calculate_new_cache(count, (&head).into());
|
let cache = cache.calculate_new_cache(count, commits, (&head).into(), branch);
|
||||||
cache.write_to_file(cache_dir)?;
|
cache.write_to_file(cache_dir)?;
|
||||||
|
|
||||||
Ok((cache.count, head))
|
Ok((count, head, commits))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remote_exists(url: &str) -> Result<bool> {
|
async fn remote_exists(url: &str) -> Result<bool> {
|
||||||
Ok(CLIENT.head(url).send()?.status() == reqwest::StatusCode::OK)
|
let resp = CLIENT.head(url).send().await?;
|
||||||
|
Ok(resp.status() == reqwest::StatusCode::OK)
|
||||||
}
|
}
|
||||||
|
|
||||||
enum HocResult {
|
enum HocResult {
|
||||||
Hoc {
|
Hoc {
|
||||||
hoc: u64,
|
hoc: u64,
|
||||||
|
commits: u64,
|
||||||
hoc_pretty: String,
|
hoc_pretty: String,
|
||||||
head: String,
|
head: String,
|
||||||
url: String,
|
url: String,
|
||||||
@ -147,71 +182,126 @@ enum HocResult {
|
|||||||
NotFound,
|
NotFound,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_hoc_request<T, F>(
|
async fn delete_repo_and_cache<T>(
|
||||||
state: web::Data<Arc<State>>,
|
state: web::Data<Arc<State>>,
|
||||||
data: web::Path<(String, String)>,
|
data: web::Path<(String, String)>,
|
||||||
|
) -> Result<impl Responder>
|
||||||
|
where
|
||||||
|
T: Service,
|
||||||
|
{
|
||||||
|
let data = data.into_inner();
|
||||||
|
let logger = state
|
||||||
|
.logger
|
||||||
|
.new(o!("service" => T::domain(), "user" => data.0.clone(), "repo" => data.1.clone()));
|
||||||
|
let repo = format!(
|
||||||
|
"{}/{}/{}",
|
||||||
|
T::domain(),
|
||||||
|
data.0.to_lowercase(),
|
||||||
|
data.1.to_lowercase()
|
||||||
|
);
|
||||||
|
info!(logger, "Deleting cache and repository");
|
||||||
|
let cache_dir = format!("{}/{}.json", &state.cache, repo);
|
||||||
|
let repo_dir = format!("{}/{}", &state.repos, repo);
|
||||||
|
std::fs::remove_file(&cache_dir).or_else(|e| {
|
||||||
|
if e.kind() == io::ErrorKind::NotFound {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
std::fs::remove_dir_all(&repo_dir).or_else(|e| {
|
||||||
|
if e.kind() == io::ErrorKind::NotFound {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
REPO_COUNT.fetch_sub(1, Ordering::Relaxed);
|
||||||
|
Ok(HttpResponse::TemporaryRedirect()
|
||||||
|
.header(
|
||||||
|
LOCATION,
|
||||||
|
format!("/view/{}/{}/{}", T::url_path(), data.0, data.1),
|
||||||
|
)
|
||||||
|
.finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_hoc_request<T, F>(
|
||||||
|
state: web::Data<Arc<State>>,
|
||||||
|
data: web::Path<(String, String)>,
|
||||||
|
branch: &str,
|
||||||
mapper: F,
|
mapper: F,
|
||||||
) -> impl Future<Item = HttpResponse, Error = Error>
|
) -> Result<HttpResponse>
|
||||||
where
|
where
|
||||||
T: Service,
|
T: Service,
|
||||||
F: Fn(HocResult) -> Result<HttpResponse>,
|
F: Fn(HocResult) -> Result<HttpResponse>,
|
||||||
{
|
{
|
||||||
futures::future::result(Ok(()))
|
let data = data.into_inner();
|
||||||
.and_then(move |_| {
|
let logger = state
|
||||||
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
.logger
|
||||||
let service_path = format!("{}/{}", T::domain(), repo);
|
.new(o!("service" => T::domain(), "user" => data.0.clone(), "repo" => data.1.clone(), "branch" => branch.to_string()));
|
||||||
let path = format!("{}/{}", state.repos, service_path);
|
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
||||||
let file = Path::new(&path);
|
let service_path = format!("{}/{}", T::url_path(), repo);
|
||||||
let url = format!("https://{}", service_path);
|
let service_url = format!("{}/{}", T::domain(), repo);
|
||||||
if !file.exists() {
|
let path = format!("{}/{}", state.repos, service_url);
|
||||||
if !remote_exists(&url)? {
|
let url = format!("https://{}", service_url);
|
||||||
warn!("Repository does not exist: {}", url);
|
let remote_exists = remote_exists(&url).await?;
|
||||||
return Ok(HocResult::NotFound);
|
let file = Path::new(&path);
|
||||||
}
|
if !file.exists() {
|
||||||
info!("Cloning {} for the first time", url);
|
if !remote_exists {
|
||||||
create_dir_all(file)?;
|
warn!(logger, "Repository does not exist");
|
||||||
let repo = Repository::init_bare(file)?;
|
return mapper(HocResult::NotFound);
|
||||||
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
}
|
||||||
repo.remote_set_url("origin", &url)?;
|
info!(logger, "Cloning for the first time");
|
||||||
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
create_dir_all(file)?;
|
||||||
}
|
let repo = Repository::init_bare(file)?;
|
||||||
pull(&path)?;
|
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
||||||
let (hoc, head) = hoc(&service_path, &state.repos, &state.cache)?;
|
repo.remote_set_url("origin", &url)?;
|
||||||
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
||||||
Standalone(hoc) => hoc.to_string(),
|
}
|
||||||
Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
pull(&path)?;
|
||||||
};
|
let (hoc, head, commits) = hoc(&service_url, &state.repos, &state.cache, branch, &logger)?;
|
||||||
Ok(HocResult::Hoc {
|
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
||||||
hoc,
|
NumberPrefix::Standalone(hoc) => hoc.to_string(),
|
||||||
hoc_pretty,
|
NumberPrefix::Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
||||||
head,
|
};
|
||||||
url,
|
let res = HocResult::Hoc {
|
||||||
repo,
|
hoc,
|
||||||
service_path,
|
commits,
|
||||||
})
|
hoc_pretty,
|
||||||
})
|
head,
|
||||||
.and_then(mapper)
|
url,
|
||||||
|
repo,
|
||||||
|
service_path,
|
||||||
|
};
|
||||||
|
mapper(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn json_hoc<T: Service>(
|
pub(crate) async fn json_hoc<T: Service>(
|
||||||
state: web::Data<Arc<State>>,
|
state: web::Data<Arc<State>>,
|
||||||
data: web::Path<(String, String)>,
|
data: web::Path<(String, String)>,
|
||||||
) -> impl Future<Item = HttpResponse, Error = Error> {
|
branch: web::Query<BranchQuery>,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||||
let mapper = |r| match r {
|
let mapper = |r| match r {
|
||||||
HocResult::NotFound => p404(),
|
HocResult::NotFound => p404(),
|
||||||
HocResult::Hoc { hoc, head, .. } => Ok(HttpResponse::Ok().json(JsonResponse {
|
HocResult::Hoc {
|
||||||
|
hoc, head, commits, ..
|
||||||
|
} => Ok(HttpResponse::Ok().json(JsonResponse {
|
||||||
|
branch,
|
||||||
head: &head,
|
head: &head,
|
||||||
count: hoc,
|
count: hoc,
|
||||||
|
commits,
|
||||||
})),
|
})),
|
||||||
};
|
};
|
||||||
handle_hoc_request::<T, _>(state, data, mapper)
|
handle_hoc_request::<T, _>(state, data, branch, mapper).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn calculate_hoc<T: Service>(
|
pub(crate) async fn calculate_hoc<T: Service>(
|
||||||
state: web::Data<Arc<State>>,
|
state: web::Data<Arc<State>>,
|
||||||
data: web::Path<(String, String)>,
|
data: web::Path<(String, String)>,
|
||||||
) -> impl Future<Item = HttpResponse, Error = Error> {
|
branch: web::Query<BranchQuery>,
|
||||||
let mapper = |r| match r {
|
) -> Result<HttpResponse> {
|
||||||
|
let mapper = move |r| match r {
|
||||||
HocResult::NotFound => p404(),
|
HocResult::NotFound => p404(),
|
||||||
HocResult::Hoc { hoc_pretty, .. } => {
|
HocResult::Hoc { hoc_pretty, .. } => {
|
||||||
let badge_opt = BadgeOptions {
|
let badge_opt = BadgeOptions {
|
||||||
@ -220,9 +310,8 @@ fn calculate_hoc<T: Service>(
|
|||||||
status: hoc_pretty,
|
status: hoc_pretty,
|
||||||
};
|
};
|
||||||
let badge = Badge::new(badge_opt)?;
|
let badge = Badge::new(badge_opt)?;
|
||||||
|
// TODO: remove clone
|
||||||
let (tx, rx_body) = mpsc::unbounded();
|
let body = badge.to_svg().as_bytes().to_vec();
|
||||||
let _ = tx.unbounded_send(Bytes::from(badge.to_svg().as_bytes()));
|
|
||||||
|
|
||||||
let expiration = SystemTime::now() + Duration::from_secs(30);
|
let expiration = SystemTime::now() + Duration::from_secs(30);
|
||||||
Ok(HttpResponse::Ok()
|
Ok(HttpResponse::Ok()
|
||||||
@ -234,20 +323,24 @@ fn calculate_hoc<T: Service>(
|
|||||||
CacheDirective::NoCache,
|
CacheDirective::NoCache,
|
||||||
CacheDirective::NoStore,
|
CacheDirective::NoStore,
|
||||||
]))
|
]))
|
||||||
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
.body(body))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
handle_hoc_request::<T, _>(state, data, mapper)
|
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||||
|
handle_hoc_request::<T, _>(state, data, branch, mapper).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn overview<T: Service>(
|
async fn overview<T: Service>(
|
||||||
state: web::Data<Arc<State>>,
|
state: web::Data<Arc<State>>,
|
||||||
data: web::Path<(String, String)>,
|
data: web::Path<(String, String)>,
|
||||||
) -> impl Future<Item = HttpResponse, Error = Error> {
|
branch: web::Query<BranchQuery>,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||||
let mapper = |r| match r {
|
let mapper = |r| match r {
|
||||||
HocResult::NotFound => p404(),
|
HocResult::NotFound => p404(),
|
||||||
HocResult::Hoc {
|
HocResult::Hoc {
|
||||||
hoc,
|
hoc,
|
||||||
|
commits,
|
||||||
hoc_pretty,
|
hoc_pretty,
|
||||||
url,
|
url,
|
||||||
head,
|
head,
|
||||||
@ -255,32 +348,32 @@ fn overview<T: Service>(
|
|||||||
service_path,
|
service_path,
|
||||||
} => {
|
} => {
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
|
let repo_info = RepoInfo {
|
||||||
|
commit_url: &T::commit_url(&repo, &head),
|
||||||
|
commits,
|
||||||
|
domain: &OPT.domain,
|
||||||
|
head: &head,
|
||||||
|
hoc,
|
||||||
|
hoc_pretty: &hoc_pretty,
|
||||||
|
path: &service_path,
|
||||||
|
url: &url,
|
||||||
|
branch,
|
||||||
|
};
|
||||||
templates::overview(
|
templates::overview(
|
||||||
&mut buf,
|
&mut buf,
|
||||||
VERSION_INFO,
|
VERSION_INFO,
|
||||||
REPO_COUNT.load(Ordering::Relaxed),
|
REPO_COUNT.load(Ordering::Relaxed),
|
||||||
&OPT.domain,
|
repo_info,
|
||||||
&service_path,
|
|
||||||
&url,
|
|
||||||
hoc,
|
|
||||||
&hoc_pretty,
|
|
||||||
&head,
|
|
||||||
&T::commit_url(&repo, &head),
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let (tx, rx_body) = mpsc::unbounded();
|
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||||
let _ = tx.unbounded_send(Bytes::from(buf));
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.content_type("text/html")
|
|
||||||
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
handle_hoc_request::<T, _>(state, data, mapper)
|
handle_hoc_request::<T, _>(state, data, branch, mapper).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn index() -> Result<HttpResponse> {
|
async fn index() -> Result<HttpResponse> {
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
templates::index(
|
templates::index(
|
||||||
&mut buf,
|
&mut buf,
|
||||||
@ -292,7 +385,7 @@ fn index() -> Result<HttpResponse> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/generate")]
|
#[post("/generate")]
|
||||||
fn generate(params: web::Form<GeneratorForm>) -> Result<HttpResponse> {
|
async fn generate(params: web::Form<GeneratorForm<'_>>) -> Result<HttpResponse> {
|
||||||
let repo = format!("{}/{}", params.user, params.repo);
|
let repo = format!("{}/{}", params.user, params.repo);
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
templates::generate(
|
templates::generate(
|
||||||
@ -304,12 +397,8 @@ fn generate(params: web::Form<GeneratorForm>) -> Result<HttpResponse> {
|
|||||||
params.service.service(),
|
params.service.service(),
|
||||||
&repo,
|
&repo,
|
||||||
)?;
|
)?;
|
||||||
let (tx, rx_body) = mpsc::unbounded();
|
|
||||||
let _ = tx.unbounded_send(Bytes::from(buf));
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||||
.content_type("text/html")
|
|
||||||
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn p404() -> Result<HttpResponse> {
|
fn p404() -> Result<HttpResponse> {
|
||||||
@ -318,43 +407,68 @@ fn p404() -> Result<HttpResponse> {
|
|||||||
Ok(HttpResponse::NotFound().content_type("text/html").body(buf))
|
Ok(HttpResponse::NotFound().content_type("text/html").body(buf))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/tacit-css.min.css")]
|
async fn async_p404() -> Result<HttpResponse> {
|
||||||
|
p404()
|
||||||
|
}
|
||||||
|
|
||||||
fn css() -> HttpResponse {
|
fn css() -> HttpResponse {
|
||||||
HttpResponse::Ok().content_type("text/css").body(CSS)
|
HttpResponse::Ok().content_type("text/css").body(CSS)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/favicon.ico")]
|
|
||||||
fn favicon32() -> HttpResponse {
|
fn favicon32() -> HttpResponse {
|
||||||
HttpResponse::Ok().content_type("image/png").body(FAVICON)
|
HttpResponse::Ok().content_type("image/png").body(FAVICON)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
async fn start_server(logger: Logger) -> std::io::Result<()> {
|
||||||
config::init()?;
|
|
||||||
let interface = format!("{}:{}", OPT.host, OPT.port);
|
let interface = format!("{}:{}", OPT.host, OPT.port);
|
||||||
let state = Arc::new(State {
|
let state = Arc::new(State {
|
||||||
repos: OPT.outdir.display().to_string(),
|
repos: OPT.outdir.display().to_string(),
|
||||||
cache: OPT.cachedir.display().to_string(),
|
cache: OPT.cachedir.display().to_string(),
|
||||||
|
logger,
|
||||||
});
|
});
|
||||||
Ok(HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
App::new()
|
App::new()
|
||||||
.data(state.clone())
|
.data(state.clone())
|
||||||
.wrap(middleware::Logger::default())
|
.wrap(actix_slog::StructuredLogger::new(
|
||||||
|
state.logger.new(o!("log_type" => "access")),
|
||||||
|
))
|
||||||
|
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||||
.service(index)
|
.service(index)
|
||||||
.service(css)
|
.service(web::resource("/tacit-css.min.css").route(web::get().to(css)))
|
||||||
.service(favicon32)
|
// TODO
|
||||||
|
.service(web::resource("/favicon.ico").route(web::get().to(favicon32)))
|
||||||
.service(generate)
|
.service(generate)
|
||||||
.service(web::resource("/github/{user}/{repo}").to_async(calculate_hoc::<GitHub>))
|
.service(web::resource("/github/{user}/{repo}").to(calculate_hoc::<GitHub>))
|
||||||
.service(web::resource("/gitlab/{user}/{repo}").to_async(calculate_hoc::<Gitlab>))
|
.service(web::resource("/gitlab/{user}/{repo}").to(calculate_hoc::<Gitlab>))
|
||||||
.service(web::resource("/bitbucket/{user}/{repo}").to_async(calculate_hoc::<Bitbucket>))
|
.service(web::resource("/bitbucket/{user}/{repo}").to(calculate_hoc::<Bitbucket>))
|
||||||
.service(web::resource("/github/{user}/{repo}/json").to_async(json_hoc::<GitHub>))
|
.service(
|
||||||
.service(web::resource("/gitlab/{user}/{repo}/json").to_async(json_hoc::<Gitlab>))
|
web::resource("/github/{user}/{repo}/delete")
|
||||||
.service(web::resource("/bitbucket/{user}/{repo}/json").to_async(json_hoc::<Bitbucket>))
|
.route(web::post().to(delete_repo_and_cache::<GitHub>)),
|
||||||
.service(web::resource("/view/github/{user}/{repo}").to_async(overview::<GitHub>))
|
)
|
||||||
.service(web::resource("/view/gitlab/{user}/{repo}").to_async(overview::<Gitlab>))
|
.service(
|
||||||
.service(web::resource("/view/bitbucket/{user}/{repo}").to_async(overview::<Bitbucket>))
|
web::resource("/gitlab/{user}/{repo}/delete")
|
||||||
.default_service(web::resource("").route(web::get().to_async(p404)))
|
.route(web::post().to(delete_repo_and_cache::<Gitlab>)),
|
||||||
|
)
|
||||||
|
.service(
|
||||||
|
web::resource("/bitbucket/{user}/{repo}/delete")
|
||||||
|
.route(web::post().to(delete_repo_and_cache::<Bitbucket>)),
|
||||||
|
)
|
||||||
|
.service(web::resource("/github/{user}/{repo}/json").to(json_hoc::<GitHub>))
|
||||||
|
.service(web::resource("/gitlab/{user}/{repo}/json").to(json_hoc::<Gitlab>))
|
||||||
|
.service(web::resource("/bitbucket/{user}/{repo}/json").to(json_hoc::<Bitbucket>))
|
||||||
|
.service(web::resource("/view/github/{user}/{repo}").to(overview::<GitHub>))
|
||||||
|
.service(web::resource("/view/gitlab/{user}/{repo}").to(overview::<Gitlab>))
|
||||||
|
.service(web::resource("/view/bitbucket/{user}/{repo}").to(overview::<Bitbucket>))
|
||||||
|
.default_service(web::resource("").route(web::get().to(async_p404)))
|
||||||
})
|
})
|
||||||
.workers(OPT.workers)
|
.workers(OPT.workers)
|
||||||
.bind(interface)?
|
.bind(interface)?
|
||||||
.run()?)
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
let logger = config::init();
|
||||||
|
start_server(logger).await
|
||||||
}
|
}
|
||||||
|
11
src/template.rs
Normal file
11
src/template.rs
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
pub struct RepoInfo<'a> {
|
||||||
|
pub commit_url: &'a str,
|
||||||
|
pub commits: u64,
|
||||||
|
pub domain: &'a str,
|
||||||
|
pub head: &'a str,
|
||||||
|
pub hoc: u64,
|
||||||
|
pub hoc_pretty: &'a str,
|
||||||
|
pub path: &'a str,
|
||||||
|
pub url: &'a str,
|
||||||
|
pub branch: &'a str,
|
||||||
|
}
|
70
src/tests.rs
Normal file
70
src/tests.rs
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
use crate::{
|
||||||
|
calculate_hoc, index, json_hoc,
|
||||||
|
service::{Bitbucket, GitHub, Gitlab, Service},
|
||||||
|
State,
|
||||||
|
};
|
||||||
|
|
||||||
|
use actix_web::{http, test, web, App};
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
macro_rules! test_app {
|
||||||
|
($path: expr) => {
|
||||||
|
test::init_service(App::new().service($path)).await
|
||||||
|
};
|
||||||
|
($state: expr, $path: expr) => {
|
||||||
|
test::init_service(App::new().data($state).service($path)).await
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! test_service {
|
||||||
|
($name: ident, $path: tt, $what: ident) => {
|
||||||
|
async fn $name<T: 'static + Service>(req_path: &str) {
|
||||||
|
let repo_dir = dbg!(tempdir().unwrap());
|
||||||
|
let cache_dir = dbg!(tempdir().unwrap());
|
||||||
|
let repos = format!("{}/", repo_dir.path().display());
|
||||||
|
let cache = format!("{}/", cache_dir.path().display());
|
||||||
|
let state = dbg!(State {
|
||||||
|
repos,
|
||||||
|
cache,
|
||||||
|
logger: crate::config::init(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut app = test_app!(state, web::resource($path).to($what::<T>));
|
||||||
|
|
||||||
|
let req = dbg!(test::TestRequest::with_uri(req_path).to_request());
|
||||||
|
let resp = dbg!(test::call_service(&mut app, req).await);
|
||||||
|
|
||||||
|
assert_eq!(resp.status(), http::StatusCode::OK);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_index() {
|
||||||
|
let mut app = test::init_service(App::new().service(index)).await;
|
||||||
|
|
||||||
|
let req = dbg!(test::TestRequest::with_uri("/").to_request());
|
||||||
|
let resp = dbg!(test::call_service(&mut app, req).await);
|
||||||
|
|
||||||
|
assert_eq!(resp.status(), http::StatusCode::OK);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix this test
|
||||||
|
// #[actix_rt::test]
|
||||||
|
async fn test_json() {
|
||||||
|
test_service!(test_json_service, "/service/{user}/{repo}/json", json_hoc);
|
||||||
|
|
||||||
|
test_json_service::<Gitlab>("/service/vbrandl/hoc/json").await;
|
||||||
|
test_json_service::<GitHub>("/service/vbrandl/hoc/json").await;
|
||||||
|
test_json_service::<Bitbucket>("/service/vbrandl/hoc/json").await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix this test
|
||||||
|
// #[actix_rt::test]
|
||||||
|
async fn test_badge() {
|
||||||
|
test_service!(test_badge_service, "/service/{user}/{repo}", calculate_hoc);
|
||||||
|
|
||||||
|
test_badge_service::<Gitlab>("/service/vbrandl/hoc").await;
|
||||||
|
test_badge_service::<GitHub>("/service/vbrandl/hoc").await;
|
||||||
|
test_badge_service::<Bitbucket>("/service/vbrandl/hoc").await;
|
||||||
|
}
|
@ -46,17 +46,24 @@ alt="example badge" /></a>
|
|||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
You can also request the HoC as JSON by appending <code>/json</code> to the request path. This will return a JSON
|
By default, this service assumes the existence of a branch named <code>master</code>. If no branch with that name exists
|
||||||
object with two fields: <code>count</code> and <code>head</code> with count being the HoC value and head being the
|
in your repository or you want a badge for another branch of your repository, just append
|
||||||
commit ref of <code>HEAD</code>. Requesting
|
<code>?branch=<branch-name></code> to the URL.
|
||||||
<a href="https://@domain/github/vbrandl/hoc/json">https://@domain/github/vbrandl/hoc/json</a> might return something
|
</p>
|
||||||
along the lines of
|
|
||||||
|
<p>
|
||||||
|
You can also request the HoC as JSON by appending <code>/json</code> to the request path. This will return a JSON object
|
||||||
|
with three fields: <code>count</code> (the HoC value), <code>commits</code> (the number of commits) and
|
||||||
|
<code>head</code> (the commit ref of HEAD). Requesting <a
|
||||||
|
href="https://@domain/github/vbrandl/hoc/json">https://@domain/github/vbrandl/hoc/json</a> might return something along
|
||||||
|
the lines of
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
{
|
{
|
||||||
"head" : "05736ee3ba256ec9a7227c436aef2bf43db109ab",
|
"head": "1f01c3b964b018fb0c0c2c5b572bf4ace2968546",
|
||||||
"count": 7582
|
"count": 8324,
|
||||||
|
"commits": 223
|
||||||
}
|
}
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
|
@ -1,12 +1,17 @@
|
|||||||
@use super::base;
|
@use super::base;
|
||||||
@use crate::statics::VersionInfo;
|
@use crate::statics::VersionInfo;
|
||||||
|
@use crate::template::RepoInfo;
|
||||||
|
|
||||||
@(version_info: VersionInfo, repo_count: usize, domain: &str, path: &str, url: &str, hoc: u64, hoc_pretty: &str, head: &str, commit_url: &str)
|
@(version_info: VersionInfo, repo_count: usize, repo_info: RepoInfo)
|
||||||
|
|
||||||
@:base("Hits-of-Code Badges", "Overview", {
|
@:base("Hits-of-Code Badges", "Overview", {
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
The project <a href="@url">@url</a> has <strong>@hoc_pretty</strong> (exactly @hoc) hits of code at <a href="@commit_url">@head</a>.
|
The project <a href="@repo_info.url">@repo_info.url</a> has
|
||||||
|
<strong>@repo_info.hoc_pretty</strong> (exactly @repo_info.hoc) hits of code at
|
||||||
|
<a href="@repo_info.commit_url">@repo_info.head</a> on the
|
||||||
|
<code>@repo_info.branch</code> branch. The repository contains
|
||||||
|
<strong>@repo_info.commits</strong> commits.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
@ -14,6 +19,11 @@ To include the badge in your readme, use the following markdown:
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
[](https://@domain/view/@path)
|
[](https://@repo_info.domain/view/@repo_info.path?branch=@repo_info.branch)
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
|
|
||||||
|
<form method="post" action="/@repo_info.path/delete">
|
||||||
|
<button type="submit">Rebuild Cache</button>
|
||||||
|
</form>
|
||||||
}, version_info, repo_count)
|
}, version_info, repo_count)
|
||||||
|
16
templates/p404_no_master.rs.html
Normal file
16
templates/p404_no_master.rs.html
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
@use super::base;
|
||||||
|
@use crate::statics::VersionInfo;
|
||||||
|
|
||||||
|
@(version_info: VersionInfo, repo_count: usize)
|
||||||
|
|
||||||
|
@:base("Branch not Found - Hits-of-Code Badges", "404 - Branch not Found", {
|
||||||
|
<p>
|
||||||
|
<big>Sorry</big>. I couldn't find the requested branch of your repositroy. Currently this service assumes the
|
||||||
|
extistence of a branch named <code>master</code>. If you'd like to request a badge for another branch, you can do so by
|
||||||
|
attaching <code>?branch=<branch-name></code> to the request.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
If you think, this is a mistake on my side, please <a href="mailto:mail+hoc@@vbrandl.net">drop me a mail</a>.
|
||||||
|
</p>
|
||||||
|
}, version_info, repo_count)
|
80
vm.nix
Normal file
80
vm.nix
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# Nix configuration for a VM to run a custom configured Vim
|
||||||
|
#
|
||||||
|
# It is intended as an example of building a VM that builds Vim for testing
|
||||||
|
# and evaluation purposes. It does not represent a production or secure
|
||||||
|
# deployment.
|
||||||
|
|
||||||
|
{ sources ? import ./nix/sources.nix
|
||||||
|
, pkgs ? import sources.nixpkgs { }
|
||||||
|
, callPackage ? pkgs.callPackage
|
||||||
|
, config
|
||||||
|
, lib
|
||||||
|
, ...
|
||||||
|
}:
|
||||||
|
# config, pkgs, lib, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
hoc = pkgs.callPackage ./default.nix { };
|
||||||
|
|
||||||
|
# hoc = cargoNix.rootCrate.build;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
environment = {
|
||||||
|
systemPackages = with pkgs; [
|
||||||
|
(
|
||||||
|
hoc
|
||||||
|
# import ./default.nix
|
||||||
|
)
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
networking.hostName = "hoc"; # Define your hostname.
|
||||||
|
|
||||||
|
system.stateVersion = "19.09"; # The version of NixOS originally installed
|
||||||
|
|
||||||
|
# Set security options:
|
||||||
|
security = {
|
||||||
|
sudo = {
|
||||||
|
enable = true; # Enable sudo
|
||||||
|
wheelNeedsPassword = false; # Allow wheel members to run sudo without a passowrd
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||||
|
|
||||||
|
# List services that you want to enable:
|
||||||
|
services.openssh = {
|
||||||
|
enable = true; # Enable the OpenSSH daemon.
|
||||||
|
#permitRootLogin = "yes"; # Probably want to change this in production
|
||||||
|
#challengeResponseAuthentication = true; # Probably want to change this in production
|
||||||
|
#passwordAuthentication = true; # Probably want to change this in production
|
||||||
|
openFirewall = true;
|
||||||
|
hostKeys = [
|
||||||
|
{
|
||||||
|
path = "/etc/ssh/ssh_host_ed25519_key"; # Generate a key for the vm
|
||||||
|
type = "ed25519"; # Use the current best key type
|
||||||
|
}
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
# Users of the Vim VM:
|
||||||
|
users.mutableUsers = false; # Remove any users not defined in here
|
||||||
|
|
||||||
|
users.users.root = {
|
||||||
|
password = "123456"; # Probably want to change this in production
|
||||||
|
};
|
||||||
|
|
||||||
|
# Misc groups:
|
||||||
|
users.groups.nixos.gid = 1000;
|
||||||
|
|
||||||
|
# NixOS users
|
||||||
|
users.users.nixos = {
|
||||||
|
isNormalUser = true;
|
||||||
|
uid = 1000;
|
||||||
|
group = "nixos";
|
||||||
|
extraGroups = [ "wheel" ];
|
||||||
|
password = "123456"; # Probably want to change this in production
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
Reference in New Issue
Block a user