Compare commits
300 Commits
Author | SHA1 | Date | |
---|---|---|---|
e267a4fc8a | |||
441ab76ca6 | |||
155d79a019 | |||
1fe39fbd3c | |||
1fae322993 | |||
13c193d04a | |||
701299ab5b | |||
2277931d3c | |||
6c59acc0ab | |||
79d947fb36 | |||
a1133259c2 | |||
9eb47bd94b | |||
8f6c52a6f5 | |||
b50112c7c0 | |||
62c80a81de | |||
04f1437d0c | |||
e706176d84 | |||
a2a0047e03 | |||
be7320e24e | |||
55f937aac2 | |||
d929a559b6 | |||
5e7374deea | |||
0821559471 | |||
a9ede0b2c7 | |||
be48ebd2d9 | |||
c80bef7cbd | |||
c9b67eb6e6 | |||
1ab4cb6c71 | |||
9785b38ce0 | |||
492e17b997 | |||
43586e534b | |||
70bca967f3 | |||
61013e43ab | |||
0c100e76ad | |||
000c7457f5 | |||
70361e8b70 | |||
4237f6cc06 | |||
04a90469fe | |||
117a052aa5 | |||
b2bc7fc217 | |||
cd2624ae11 | |||
fa9601bb9a | |||
ae9d31b82c | |||
2afe5fc172 | |||
ea7f074661 | |||
d5e3cad299 | |||
6570daef02 | |||
6ac389e3ee | |||
36196975ac | |||
1dd83829b2 | |||
1b0817bc25 | |||
f57dc8e890 | |||
bdda6292d2 | |||
a2826c9922 | |||
3b2a368df0 | |||
8b7444bd7c | |||
9d719e7d85 | |||
584379e047 | |||
951124e038 | |||
e574f4bdd0 | |||
5c8e3fa35d | |||
4c0df1fa19 | |||
f6b46a1e90 | |||
5eb16ac38e | |||
3486d44bc5 | |||
1311e724ce | |||
5da9e74992 | |||
8dadc0ddea | |||
cba235eadf | |||
5284249acf | |||
d28f56dac0 | |||
ab02a45bf6 | |||
837c1c160b | |||
3d26823425 | |||
d9553871fa | |||
d2bc9c2149 | |||
b0b97653a9 | |||
0b1bc60790 | |||
e93588a6d9 | |||
cd5046a276 | |||
919348d49d | |||
8378b0b51b | |||
0394ae52e9 | |||
fb34f6bf2f | |||
36622ad322 | |||
89bb27e137 | |||
7d4385764c | |||
5df97f0e64 | |||
a3d98a326e | |||
76bf5f2f98 | |||
e827d2ef39 | |||
65576140ce | |||
cea54cf2f3 | |||
8c28c68c17 | |||
b86a080ead | |||
1b0c959a8d | |||
806baf7460 | |||
080f7379ea | |||
7409de5dba | |||
07f4ef518a | |||
820ebdd899 | |||
527d793914 | |||
7df6c3fc75 | |||
17e1af1dd7 | |||
baade8fbc2 | |||
cfc07664fc | |||
52938c9106 | |||
b64f573a38 | |||
ebbd1ea8cb | |||
93a0147a78 | |||
2040e718d7 | |||
5679f67de0 | |||
8babdb978e | |||
b3388912c2 | |||
bce511ce0b | |||
50e36769d1 | |||
9477338331 | |||
c1d0930354 | |||
688067e5c5 | |||
529f36dc40 | |||
d8fdabfca6 | |||
8060b80ae6 | |||
15965c5b5f | |||
1eade03b63 | |||
f41fbc8baf | |||
14cd21dc1c | |||
1bdee4ee36 | |||
9e33742d81 | |||
8e78d13443 | |||
391fa39470 | |||
e6e90214e2 | |||
6f734b103a | |||
413bb824e1 | |||
58ad13dbad | |||
9f8b781f7b | |||
175b7c828b | |||
002119324f | |||
df78b6f1e5 | |||
e1bff0d280 | |||
4c9454aa9e | |||
e9e57495a6 | |||
b6505f6a37 | |||
a2463bf657 | |||
53cb73cd9b | |||
423a3aa0b0 | |||
7a2c6b6f06 | |||
ffb306a7a8 | |||
cef2ae2299 | |||
b91de72d19 | |||
fd08489587 | |||
ce0d6041ea | |||
3af60a82ce | |||
bcdf7db549 | |||
9b2f1f4ebb | |||
84e47237de | |||
909f6585b5 | |||
b48d7f1492 | |||
f1e9d1806f | |||
8c62d01f3c | |||
26a5025a32 | |||
6f931ce46f | |||
2d46592c4a | |||
c2d496f2b4 | |||
19d37806f2 | |||
b4bd9b8830 | |||
3c8227d0e9 | |||
d6409c21ec | |||
990b5acbda | |||
f9e14e2ffd | |||
a73afe6851 | |||
20544b27d9 | |||
288573b1a4 | |||
a12755d7be | |||
a248531ce2 | |||
de7919a031 | |||
3913039010 | |||
078d3cdcf9 | |||
c552a84870 | |||
7c1a14b6ad | |||
c69b8207b8 | |||
a319f400e9 | |||
ddcb041f3f | |||
2a73370c9f | |||
689a2109fa | |||
e82146c912 | |||
568398f1c6 | |||
1cc6363cba | |||
dcfca96051 | |||
981c347acf | |||
22aad6ed77 | |||
b06bbbba8f | |||
e06e6736c0 | |||
0abfba8b4f | |||
ca87bf9282 | |||
4f37b101ae | |||
e16f02d729 | |||
1256996d19 | |||
c1d1551283 | |||
26bdb3dbb5 | |||
c9d959d4c5 | |||
15ca2f10b8 | |||
2a81fbe9f2 | |||
fe276458db | |||
73d034ff00 | |||
63e6a3d2db | |||
fbb5bbd20e | |||
7ff238fea6 | |||
06e9926cb5 | |||
c9a54eda49 | |||
382cd73bae | |||
8e5270a1d8 | |||
fdf3fda24a | |||
219099779b | |||
7393ba54f1 | |||
c14cf28a3e | |||
68a0604365 | |||
8dc07554f6 | |||
64bb6c2922 | |||
4be0cdeb5f | |||
0c9da804bb | |||
f95a2a7923 | |||
8eff7db8ae | |||
57df8ab209 | |||
ffea4f21ef | |||
0b0e88c604 | |||
c48e27c649 | |||
dfa353c356 | |||
2f1209d1e0 | |||
195815fc8a | |||
4e393fdd57 | |||
75f14f7878 | |||
349640acc5 | |||
1d0eca90a7 | |||
81bb65db4e | |||
6295477ccf | |||
b7324b3b38 | |||
e90fb7f54c | |||
253de8cea2 | |||
bd014301fd | |||
45c0d26e33 | |||
0edceb6a7d | |||
056d798d61 | |||
96f2e9422c | |||
df19951729 | |||
bfc97dad32 | |||
d28ac95aa2 | |||
8a0df3e52c | |||
e2c42a5287 | |||
f638124930 | |||
9cabea63c3 | |||
20074ac4e1 | |||
0ec7bd93d8 | |||
c1a1af0109 | |||
2c0094670b | |||
58fdc32627 | |||
cbeca19467 | |||
4cc8bd4385 | |||
fac1efeb7d | |||
1abc2d6333 | |||
a69c523e3c | |||
766c67f723 | |||
2f70e42f9a | |||
763910b515 | |||
dca32e4317 | |||
c301978f0b | |||
97fcb5a420 | |||
724c49d056 | |||
7b5d225701 | |||
90fc0ee584 | |||
978321e6ef | |||
7e5adbee24 | |||
803f95cde8 | |||
31fe058879 | |||
421c1a4164 | |||
06fa568225 | |||
520ac2442f | |||
61c4b18bf7 | |||
a3ccfdc4a3 | |||
70ce0f71e2 | |||
ce6150c48b | |||
95b60f3753 | |||
ec9f7a3635 | |||
9fef178a27 | |||
7af58de9fa | |||
6c414bf07d | |||
d39a81720a | |||
9ed5a5b257 | |||
f3c138b7d4 | |||
51b8519fc2 | |||
b1bce024de | |||
89a5e52857 | |||
da9f9263d1 | |||
62accd6e43 | |||
210a1f4592 | |||
3708768f34 | |||
94265ba39f | |||
7e3ca9c620 | |||
a3c978e0fc | |||
67db10460f | |||
615f71bfd7 |
7
.env.example
Normal file
7
.env.example
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
HOC_REPODIR='./repos'
|
||||||
|
HOC_CACHEDIR='./cache'
|
||||||
|
HOC_PORT=8080
|
||||||
|
HOC_HOST='0.0.0.0'
|
||||||
|
HOC_WORKERS=4
|
||||||
|
|
||||||
|
HOC_BASE_URL='http://0.0.0.0:8080'
|
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: cargo
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "04:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
assignees:
|
||||||
|
- vbrandl
|
23
.github/workflows/audit.yml
vendored
23
.github/workflows/audit.yml
vendored
@ -4,27 +4,24 @@ on:
|
|||||||
- cron: '0 1 * * *'
|
- cron: '0 1 * * *'
|
||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- 'Cargo.toml'
|
- '**/Cargo.toml'
|
||||||
- 'Cargo.lock'
|
- '**/Cargo.lock'
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
security_audit:
|
security_audit:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Cache cargo registry
|
- name: Cache cargo registry, index and build directory
|
||||||
uses: actions/cache@v1
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /usr/share/rust/.cargo/registry
|
path: |
|
||||||
key: ${{ runner.os }}-cargo-registry
|
~/.cargo/registry
|
||||||
|
~/.cargo/git
|
||||||
- name: Cache cargo index
|
./target
|
||||||
uses: actions/cache@v1
|
key: audit-${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
with:
|
|
||||||
path: /usr/share/rust/.cargo/git
|
|
||||||
key: ${{ runner.os }}-cargo-index
|
|
||||||
|
|
||||||
- uses: actions-rs/audit-check@v1
|
- uses: actions-rs/audit-check@v1
|
||||||
with:
|
with:
|
||||||
|
76
.github/workflows/release.yml
vendored
Normal file
76
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
name: Publishing for ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||||
|
rust: [stable]
|
||||||
|
include:
|
||||||
|
- os: macos-latest
|
||||||
|
artifact_prefix: macos
|
||||||
|
target: x86_64-apple-darwin
|
||||||
|
binary_postfix: ""
|
||||||
|
- os: ubuntu-latest
|
||||||
|
artifact_prefix: linux
|
||||||
|
target: x86_64-unknown-linux-gnu
|
||||||
|
binary_postfix: ""
|
||||||
|
- os: windows-latest
|
||||||
|
artifact_prefix: windows
|
||||||
|
target: x86_64-pc-windows-msvc
|
||||||
|
binary_postfix: ".exe"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout sources
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install stable toolchain
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: ${{ matrix.rust }}
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Cache cargo registry, index and build directory
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/registry
|
||||||
|
~/.cargo/git
|
||||||
|
./target
|
||||||
|
key: release-${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Cargo build
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: build
|
||||||
|
toolchain: ${{ matrix.rust }}
|
||||||
|
args: --release --target ${{ matrix.target }}
|
||||||
|
|
||||||
|
- name: Packaging final binary
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd target/${{ matrix.target }}/release
|
||||||
|
strip hoc${{ matrix.binary_postfix }}
|
||||||
|
tar czvf hoc-${{ matrix.artifact_prefix }}.tar.gz hoc${{ matrix.binary_postfix }}
|
||||||
|
|
||||||
|
if [[ ${{ runner.os }} == 'Windows' ]]; then
|
||||||
|
certutil -hashfile hoc-${{ matrix.artifact_prefix }}.tar.gz sha256 | grep -E [A-Fa-f0-9]{64} > hoc-${{ matrix.artifact_prefix }}.sha256
|
||||||
|
else
|
||||||
|
shasum -a 256 hoc-${{ matrix.artifact_prefix }}.tar.gz > hoc-${{ matrix.artifact_prefix }}.sha256
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Releasing assets
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
target/${{ matrix.target }}/release/hoc-${{ matrix.artifact_prefix }}.tar.gz
|
||||||
|
target/${{ matrix.target }}/release/hoc-${{ matrix.artifact_prefix }}.sha256
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
94
.github/workflows/rust.yml
vendored
94
.github/workflows/rust.yml
vendored
@ -1,41 +1,30 @@
|
|||||||
name: Rust
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
|
||||||
name: Linting and Formatting Checks
|
rustfmt:
|
||||||
|
name: Rustfmt
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout sources
|
- name: Checkout sources
|
||||||
uses: actions/checkout@v1
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Install stable toolchain
|
- name: Install stable toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
|
profile: minimal
|
||||||
override: true
|
override: true
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
- name: Install rustfmt
|
- name: Cache cargo registry, index and build directory
|
||||||
run: rustup component add rustfmt
|
uses: actions/cache@v2
|
||||||
|
|
||||||
- name: Cache cargo registry
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
with:
|
||||||
path: ~/.cargo/registry
|
path: |
|
||||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
~/.cargo/registry
|
||||||
|
~/.cargo/git
|
||||||
- name: Cache cargo index
|
./target
|
||||||
uses: actions/cache@v1
|
key: rustfmt-${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
with:
|
|
||||||
path: ~/.cargo/git
|
|
||||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
|
||||||
|
|
||||||
- name: Cache cargo build
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: target
|
|
||||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
|
||||||
|
|
||||||
- name: Check Formatting
|
- name: Check Formatting
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
@ -43,8 +32,29 @@ jobs:
|
|||||||
command: fmt
|
command: fmt
|
||||||
args: --all -- --check
|
args: --all -- --check
|
||||||
|
|
||||||
- name: Install clippy
|
clippy:
|
||||||
run: rustup component add clippy
|
name: Clippy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout sources
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install stable toolchain
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
profile: minimal
|
||||||
|
override: true
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Cache cargo registry, index and build directory
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/registry
|
||||||
|
~/.cargo/git
|
||||||
|
./target
|
||||||
|
key: clippy-${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
- name: Clippy Linting
|
- name: Clippy Linting
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
@ -52,41 +62,35 @@ jobs:
|
|||||||
command: clippy
|
command: clippy
|
||||||
args: -- -D warnings
|
args: -- -D warnings
|
||||||
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
name: Run Tests
|
name: Test Suite
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
# add windows-latest when it is clear why tests are failing
|
# add windows-latest when it is clear why tests are failing
|
||||||
os: [ubuntu-latest, macos-latest]
|
# os: [ubuntu-latest, macos-latest]
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout sources
|
- name: Checkout sources
|
||||||
uses: actions/checkout@v1
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Install stable toolchain
|
- name: Install stable toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
|
profile: minimal
|
||||||
override: true
|
override: true
|
||||||
|
|
||||||
- name: Cache cargo registry
|
- name: Cache cargo registry, index and build directory
|
||||||
uses: actions/cache@v1
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: ~/.cargo/registry
|
path: |
|
||||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
~/.cargo/registry
|
||||||
|
~/.cargo/git
|
||||||
- name: Cache cargo index
|
./target
|
||||||
uses: actions/cache@v1
|
key: test-${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||||
with:
|
|
||||||
path: ~/.cargo/git
|
|
||||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
|
||||||
|
|
||||||
- name: Cache cargo build
|
|
||||||
uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: target
|
|
||||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Tests
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -3,3 +3,5 @@
|
|||||||
repos
|
repos
|
||||||
cache
|
cache
|
||||||
hoc.log
|
hoc.log
|
||||||
|
result
|
||||||
|
.env
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
image: docker:19.03
|
|
||||||
services:
|
|
||||||
- docker:19.03-dind
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- build
|
|
||||||
- release
|
|
||||||
|
|
||||||
variables:
|
|
||||||
DOCKER_HOST: tcp://docker:2375
|
|
||||||
DOCKER_DRIVER: overlay2
|
|
||||||
# DOCKER_TLS_CERTDIR: "/certs"
|
|
||||||
DOCKER_TLS_CERTDIR: ""
|
|
||||||
CONTAINER_BUILDER_IMAGE: $CI_REGISTRY_IMAGE:builder-latest
|
|
||||||
CONTAINER_TEST_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
|
|
||||||
CONTAINER_RELEASE_IMAGE: $CI_REGISTRY_IMAGE:latest
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- export CONTAINER_TAG_IMAGE="$CI_REGISTRY_IMAGE:${CI_COMMIT_TAG:1}"
|
|
||||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
|
||||||
|
|
||||||
build:
|
|
||||||
stage: build
|
|
||||||
script:
|
|
||||||
- docker pull $CONTAINER_TEST_IMAGE || true
|
|
||||||
- docker pull $CONTAINER_RELEASE_IMAGE || true
|
|
||||||
- docker pull $CONTAINER_TAG_IMAGE || true
|
|
||||||
- docker pull $CONTAINER_BUILDER_IMAGE || true
|
|
||||||
- docker build --pull -t $CONTAINER_BUILDER_IMAGE --target builder .
|
|
||||||
- docker build --pull -t $CONTAINER_TEST_IMAGE .
|
|
||||||
- docker push $CONTAINER_BUILDER_IMAGE
|
|
||||||
- docker push $CONTAINER_TEST_IMAGE
|
|
||||||
|
|
||||||
release-image:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- docker pull $CONTAINER_TEST_IMAGE
|
|
||||||
- docker tag $CONTAINER_TEST_IMAGE $CONTAINER_RELEASE_IMAGE
|
|
||||||
- docker push $CONTAINER_RELEASE_IMAGE
|
|
||||||
only:
|
|
||||||
- master
|
|
||||||
|
|
||||||
release-tag:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- docker pull $CONTAINER_TEST_IMAGE
|
|
||||||
- docker tag $CONTAINER_TEST_IMAGE $CONTAINER_TAG_IMAGE
|
|
||||||
- docker push $CONTAINER_TAG_IMAGE
|
|
||||||
only:
|
|
||||||
- /^v\d+\.\d+\.\d+/
|
|
||||||
except:
|
|
||||||
- branch
|
|
||||||
|
|
17
.travis.yml
17
.travis.yml
@ -1,17 +0,0 @@
|
|||||||
language: rust
|
|
||||||
|
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
- beta
|
|
||||||
- nightly
|
|
||||||
|
|
||||||
cache:
|
|
||||||
- cargo
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
allow_failures:
|
|
||||||
- rust: nightly
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
email:
|
|
||||||
on_failure: always
|
|
1922
Cargo.lock
generated
1922
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
53
Cargo.toml
53
Cargo.toml
@ -1,28 +1,45 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "hoc"
|
name = "hoc"
|
||||||
version = "0.11.5"
|
version = "0.17.4"
|
||||||
authors = ["Valentin Brandl <vbrandl@riseup.net>"]
|
authors = ["Valentin Brandl <vbrandl@riseup.net>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
path = "src/main.rs"
|
||||||
|
name = "hoc"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "2.0.0"
|
actix-rt = "1.1.1"
|
||||||
badge = "0.2.0"
|
actix-web = "3.3.2"
|
||||||
bytes = "0.5.4"
|
badge = "0.3.0"
|
||||||
futures = "0.3.1"
|
bytes = "1.0.1"
|
||||||
git2 = "0.11.0"
|
config = { version = "0.11.0", features = ["toml"] }
|
||||||
|
dotenv = "0.15.0"
|
||||||
|
futures = "0.3.15"
|
||||||
|
git2 = "0.13.20"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.8"
|
number_prefix = "0.4.0"
|
||||||
log4rs = "0.10.0"
|
openssl-probe = "0.1.4"
|
||||||
number_prefix = "0.3.0"
|
reqwest = "0.10.10"
|
||||||
openssl-probe = "0.1.2"
|
serde = "1.0.126"
|
||||||
reqwest = "0.10.1"
|
serde_derive = "1.0.123"
|
||||||
serde = "1.0.104"
|
serde_json = "1.0.64"
|
||||||
serde_derive = "1.0.103"
|
tracing = "0.1.26"
|
||||||
serde_json = "1.0.47"
|
tracing-actix-web = "0.2.1"
|
||||||
structopt = "0.3.9"
|
tracing-bunyan-formatter = "0.2.4"
|
||||||
actix-rt = "1.0.0"
|
tracing-futures = "0.2.5"
|
||||||
|
tracing-log = "0.1.2"
|
||||||
|
tracing-subscriber = { version = "0.2.19", features = ["registry", "env-filter"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
ructe = "0.9.2"
|
ructe = "0.13.4"
|
||||||
vergen = "3.0.4"
|
vergen = { version = "5.1.2", default-features = false, features = ["git"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
ructe = "0.13.4"
|
||||||
|
tempfile = "3.2.0"
|
||||||
|
tokio = "0.2.25"
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
FROM ekidd/rust-musl-builder:stable as builder
|
FROM ekidd/rust-musl-builder:stable as builder
|
||||||
|
|
||||||
# create new cargo project
|
# create new cargo project
|
||||||
RUN USER=rust cargo init --bin
|
RUN USER=rust cargo init --lib
|
||||||
|
RUN echo 'fn main() { println!("Hello, world!"); }' >> src/main.rs
|
||||||
# copy build config
|
# copy build config
|
||||||
COPY --chown=rust ./Cargo.lock ./Cargo.lock
|
COPY --chown=rust ./Cargo.lock ./Cargo.lock
|
||||||
COPY ./Cargo.toml ./Cargo.toml
|
COPY ./Cargo.toml ./Cargo.toml
|
||||||
|
29
README.md
29
README.md
@ -1,9 +1,8 @@
|
|||||||
# Hits-of-Code
|
# Hits-of-Code
|
||||||
|
|
||||||
[](https://hitsofcode.com/view/github/vbrandl/hoc)
|
[](https://hitsofcode.com/github/vbrandl/hoc/view)
|
||||||
[](https://drone.vbrandl.net/vbrandl/hoc)
|
[](https://drone.vbrandl.net/vbrandl/hoc)
|
||||||
[](https://gitlab.com/vbrandl/hoc/pipelines)
|
[](https://gitlab.com/vbrandl/hoc/pipelines)
|
||||||
[](https://travis-ci.org/vbrandl/hoc)
|
|
||||||
[](https://deps.rs/repo/github/vbrandl/hoc)
|
[](https://deps.rs/repo/github/vbrandl/hoc)
|
||||||
|
|
||||||
Small webservice, that returns a badge of the Hits-of-Code of a git repository, as described by [Yegor
|
Small webservice, that returns a badge of the Hits-of-Code of a git repository, as described by [Yegor
|
||||||
@ -27,7 +26,11 @@ where `<service>` is one of `gitub`, `gitlab` or `bitbucket`. The HoC data can a
|
|||||||
https://<host>/<service>/<user>/<repo>/json
|
https://<host>/<service>/<user>/<repo>/json
|
||||||
```
|
```
|
||||||
|
|
||||||
There is also an overview page available via `https://<host>/view/<service>/<user>/<repo>`
|
There is also an overview page available via `https://<host>/<service>/<user>/<repo>/view`
|
||||||
|
|
||||||
|
To delete a repository and the cache from the server, send a `POST` request to
|
||||||
|
`https://<host>/<service>/<user>/<repo>/delete`. On the overview page, there is a button to perform this operation. It
|
||||||
|
will respond with a redirect to the overview page so the cache is rebuilt directly.
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
@ -45,17 +48,29 @@ $ docker build .
|
|||||||
|
|
||||||
inside the repository.
|
inside the repository.
|
||||||
|
|
||||||
|
I'm currently working on migrating to [nix](https://nixos.org/nix). To get a development shell, run `nix-shell`, to
|
||||||
|
build the package run `nix-build --attr package` and to build the Docker image, run `nix-build --attr dockerImage`.
|
||||||
|
|
||||||
|
|
||||||
## Running
|
## Running
|
||||||
|
|
||||||
Run either the binary produced by cargo, the Docker container you just built (using docker-compose) or pull the image
|
Rename [`hoc.toml.example`](./hoc.toml.example) to `hoc.toml` or [`.env.example`](./.env.example) to `.env` and set the
|
||||||
from [Docker Hub](https://hub.docker.com/r/vbrandl/hits-of-code)
|
correct value for `base_url`/`HOC_BASE_URL`. If you don't want to use a configuration or dotenv file, you can pass all
|
||||||
|
parameters directly via environment variables. For variable names see [`.env.example`](./.env.example).
|
||||||
|
|
||||||
|
To start a local instance of the service just run:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ docker run -it --rm vbrandl/hits-of-code --help
|
$ HOC_BASE_URL='http://0.0.0.0:8080' ./hoc
|
||||||
```
|
```
|
||||||
|
|
||||||
When running the binary directly, you need a git binary in your `PATH`.
|
You can also use the Docker image:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ docker run -p 8080:8080 --env HOC_BASE_URL='http://0.0.0.0:8080' -it --rm vbrandl/hits-of-code
|
||||||
|
```
|
||||||
|
|
||||||
|
When running the binary directly, you need a `git` binary in your `PATH`.
|
||||||
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
7
build.rs
7
build.rs
@ -2,11 +2,12 @@ extern crate ructe;
|
|||||||
extern crate vergen;
|
extern crate vergen;
|
||||||
|
|
||||||
use ructe::Ructe;
|
use ructe::Ructe;
|
||||||
use vergen::{generate_cargo_keys, ConstantsFlags};
|
use vergen::{vergen, Config, ShaKind};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let flags = ConstantsFlags::SHA_SHORT;
|
let mut config = Config::default();
|
||||||
generate_cargo_keys(flags).expect("Unable to generate the cargo keys!");
|
*config.git_mut().sha_kind_mut() = ShaKind::Short;
|
||||||
|
vergen(config).expect("Unable to generate static repo info");
|
||||||
Ructe::from_env()
|
Ructe::from_env()
|
||||||
.expect("ructe")
|
.expect("ructe")
|
||||||
.compile_templates("templates")
|
.compile_templates("templates")
|
||||||
|
1
crate-hashes.json
Normal file
1
crate-hashes.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
{}
|
56
default.nix
Normal file
56
default.nix
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
{ sources ? import ./nix/sources.nix
|
||||||
|
, system ? builtins.currentSystem
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
rustOverlay = import "${sources.nixpkgs-mozilla}/rust-overlay.nix";
|
||||||
|
cargo2nixOverlay = import "${sources.cargo2nix}/overlay";
|
||||||
|
|
||||||
|
pkgs = import sources.nixpkgs {
|
||||||
|
# pkgs = import <nixpkgs> {
|
||||||
|
inherit system;
|
||||||
|
overlays = [ cargo2nixOverlay rustOverlay ];
|
||||||
|
};
|
||||||
|
|
||||||
|
rustPkgs = pkgs.rustBuilder.makePackageSet' {
|
||||||
|
rustChannel = "stable";
|
||||||
|
packageFun = import ./Cargo.nix;
|
||||||
|
localPatterns =
|
||||||
|
[
|
||||||
|
''^(src|tests)(/.*)?''
|
||||||
|
''[^/]*\.(rs|toml)$''
|
||||||
|
# include other directory from the project repository
|
||||||
|
''^templates(/.*)?''
|
||||||
|
''^static(/.*)?''
|
||||||
|
''^.git.*(/.*)?''
|
||||||
|
];
|
||||||
|
# packageOverrides
|
||||||
|
};
|
||||||
|
in
|
||||||
|
rec {
|
||||||
|
inherit rustPkgs;
|
||||||
|
shell = pkgs.mkShell {
|
||||||
|
inputsFrom = pkgs.lib.mapAttrsToList (_: pkg: pkg { }) rustPkgs.noBuild.workspace;
|
||||||
|
nativeBuildInputs = with rustPkgs; [ cargo rustc ];
|
||||||
|
};
|
||||||
|
package = (rustPkgs.workspace.hoc {}).overrideAttrs (drv: {
|
||||||
|
buildInputs = drv.buildInputs or [ ] ++ [ pkgs.git ];
|
||||||
|
});
|
||||||
|
dockerImage =
|
||||||
|
pkgs.dockerTools.buildImage {
|
||||||
|
name = "vbrandl/hits-of-code";
|
||||||
|
tag = package.version;
|
||||||
|
|
||||||
|
contents =
|
||||||
|
[
|
||||||
|
package
|
||||||
|
pkgs.cacert
|
||||||
|
pkgs.gitMinimal
|
||||||
|
];
|
||||||
|
|
||||||
|
config = {
|
||||||
|
Cmd = [ "/bin/hoc" ];
|
||||||
|
WorkingDir = "/home/hoc";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
version: "2"
|
version: "3"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
hoc:
|
hoc:
|
||||||
@ -9,3 +9,5 @@ services:
|
|||||||
# ports:
|
# ports:
|
||||||
# - "127.0.0.1:8080:8080"
|
# - "127.0.0.1:8080:8080"
|
||||||
restart: always
|
restart: always
|
||||||
|
env_file:
|
||||||
|
- ./.env
|
||||||
|
15
hoc.toml
Normal file
15
hoc.toml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# every parameter can also be set (or overwritten) by passing an environment
|
||||||
|
# variable namend `HOC_<PARAMETERNAME>`, e.g.
|
||||||
|
# `HOC_BASE_URL='https://hitsofcode.com' ./hoc`
|
||||||
|
|
||||||
|
# these config parameters have default values and must not explicitly be set
|
||||||
|
repodir = "./repos"
|
||||||
|
cachedir = "./cache"
|
||||||
|
port = 8080
|
||||||
|
host = "0.0.0.0"
|
||||||
|
workers = 4
|
||||||
|
|
||||||
|
# these parameters don't have default values and must be set
|
||||||
|
|
||||||
|
# this should be the public base URL of the service, e.g. `https://hitsofcode.com`
|
||||||
|
base_url = "http://0.0.0.0:8080"
|
15
hoc.toml.example
Normal file
15
hoc.toml.example
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# every parameter can also be set (or overwritten) by passing an environment
|
||||||
|
# variable namend `HOC_<PARAMETERNAME>`, e.g.
|
||||||
|
# `HOC_BASE_URL='https://hitsofcode.com' ./hoc`
|
||||||
|
|
||||||
|
# these config parameters have default values and must not explicitly be set
|
||||||
|
repodir = "./repos"
|
||||||
|
cachedir = "./cache"
|
||||||
|
port = 8080
|
||||||
|
host = "0.0.0.0"
|
||||||
|
workers = 4
|
||||||
|
|
||||||
|
# these parameters don't have default values and must be set
|
||||||
|
|
||||||
|
# this should be the public base URL of the service, e.g. `https://hitsofcode.com`
|
||||||
|
base_url = "http://0.0.0.0:8080"
|
50
nix/sources.json
Normal file
50
nix/sources.json
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"cargo2nix": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "Convert a Cargo.lock to mkRustCrate statements for import in Nix",
|
||||||
|
"homepage": "",
|
||||||
|
"owner": "tenx-tech",
|
||||||
|
"repo": "cargo2nix",
|
||||||
|
"rev": "7bc062ccffc41dc7d3759b8b797e8b4f8dd23a15",
|
||||||
|
"sha256": "1z7xwk1hbp26aydsk3y07riy0ivwqss06n1470mvdl7allfcd1w5",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/tenx-tech/cargo2nix/archive/7bc062ccffc41dc7d3759b8b797e8b4f8dd23a15.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"niv": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "Easy dependency management for Nix projects",
|
||||||
|
"homepage": "https://github.com/nmattia/niv",
|
||||||
|
"owner": "nmattia",
|
||||||
|
"repo": "niv",
|
||||||
|
"rev": "98c74a80934123cb4c3bf3314567f67311eb711a",
|
||||||
|
"sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"branch": "nixpkgs-unstable",
|
||||||
|
"description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to",
|
||||||
|
"homepage": "https://github.com/NixOS/nixpkgs",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs-channels",
|
||||||
|
"rev": "f6bfb371cba2b5a02f200c2747c1fe2c72bd782f",
|
||||||
|
"sha256": "0y3hlbyvznrpr1d2vxj2511hkjg733wdnxfaib3fgy9i9jr8ivzn",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/NixOS/nixpkgs-channels/archive/f6bfb371cba2b5a02f200c2747c1fe2c72bd782f.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"nixpkgs-mozilla": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "mozilla related nixpkgs (extends nixos/nixpkgs repo)",
|
||||||
|
"homepage": null,
|
||||||
|
"owner": "mozilla",
|
||||||
|
"repo": "nixpkgs-mozilla",
|
||||||
|
"rev": "e912ed483e980dfb4666ae0ed17845c4220e5e7c",
|
||||||
|
"sha256": "08fvzb8w80bkkabc1iyhzd15f4sm7ra10jn32kfch5klgl0gj3j3",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/mozilla/nixpkgs-mozilla/archive/e912ed483e980dfb4666ae0ed17845c4220e5e7c.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
}
|
||||||
|
}
|
134
nix/sources.nix
Normal file
134
nix/sources.nix
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
# This file has been generated by Niv.
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
#
|
||||||
|
# The fetchers. fetch_<type> fetches specs of type <type>.
|
||||||
|
#
|
||||||
|
|
||||||
|
fetch_file = pkgs: spec:
|
||||||
|
if spec.builtin or true then
|
||||||
|
builtins_fetchurl { inherit (spec) url sha256; }
|
||||||
|
else
|
||||||
|
pkgs.fetchurl { inherit (spec) url sha256; };
|
||||||
|
|
||||||
|
fetch_tarball = pkgs: spec:
|
||||||
|
if spec.builtin or true then
|
||||||
|
builtins_fetchTarball { inherit (spec) url sha256; }
|
||||||
|
else
|
||||||
|
pkgs.fetchzip { inherit (spec) url sha256; };
|
||||||
|
|
||||||
|
fetch_git = spec:
|
||||||
|
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
|
||||||
|
|
||||||
|
fetch_builtin-tarball = spec:
|
||||||
|
builtins.trace
|
||||||
|
''
|
||||||
|
WARNING:
|
||||||
|
The niv type "builtin-tarball" will soon be deprecated. You should
|
||||||
|
instead use `builtin = true`.
|
||||||
|
|
||||||
|
$ niv modify <package> -a type=tarball -a builtin=true
|
||||||
|
''
|
||||||
|
builtins_fetchTarball { inherit (spec) url sha256; };
|
||||||
|
|
||||||
|
fetch_builtin-url = spec:
|
||||||
|
builtins.trace
|
||||||
|
''
|
||||||
|
WARNING:
|
||||||
|
The niv type "builtin-url" will soon be deprecated. You should
|
||||||
|
instead use `builtin = true`.
|
||||||
|
|
||||||
|
$ niv modify <package> -a type=file -a builtin=true
|
||||||
|
''
|
||||||
|
(builtins_fetchurl { inherit (spec) url sha256; });
|
||||||
|
|
||||||
|
#
|
||||||
|
# Various helpers
|
||||||
|
#
|
||||||
|
|
||||||
|
# The set of packages used when specs are fetched using non-builtins.
|
||||||
|
mkPkgs = sources:
|
||||||
|
let
|
||||||
|
sourcesNixpkgs =
|
||||||
|
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
|
||||||
|
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
|
||||||
|
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
|
||||||
|
in
|
||||||
|
if builtins.hasAttr "nixpkgs" sources
|
||||||
|
then sourcesNixpkgs
|
||||||
|
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
|
||||||
|
import <nixpkgs> {}
|
||||||
|
else
|
||||||
|
abort
|
||||||
|
''
|
||||||
|
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
|
||||||
|
add a package called "nixpkgs" to your sources.json.
|
||||||
|
'';
|
||||||
|
|
||||||
|
# The actual fetching function.
|
||||||
|
fetch = pkgs: name: spec:
|
||||||
|
|
||||||
|
if ! builtins.hasAttr "type" spec then
|
||||||
|
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
|
||||||
|
else if spec.type == "file" then fetch_file pkgs spec
|
||||||
|
else if spec.type == "tarball" then fetch_tarball pkgs spec
|
||||||
|
else if spec.type == "git" then fetch_git spec
|
||||||
|
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
|
||||||
|
else if spec.type == "builtin-url" then fetch_builtin-url spec
|
||||||
|
else
|
||||||
|
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
|
||||||
|
|
||||||
|
# Ports of functions for older nix versions
|
||||||
|
|
||||||
|
# a Nix version of mapAttrs if the built-in doesn't exist
|
||||||
|
mapAttrs = builtins.mapAttrs or (
|
||||||
|
f: set: with builtins;
|
||||||
|
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
|
||||||
|
);
|
||||||
|
|
||||||
|
# fetchTarball version that is compatible between all the versions of Nix
|
||||||
|
builtins_fetchTarball = { url, sha256 }@attrs:
|
||||||
|
let
|
||||||
|
inherit (builtins) lessThan nixVersion fetchTarball;
|
||||||
|
in
|
||||||
|
if lessThan nixVersion "1.12" then
|
||||||
|
fetchTarball { inherit url; }
|
||||||
|
else
|
||||||
|
fetchTarball attrs;
|
||||||
|
|
||||||
|
# fetchurl version that is compatible between all the versions of Nix
|
||||||
|
builtins_fetchurl = { url, sha256 }@attrs:
|
||||||
|
let
|
||||||
|
inherit (builtins) lessThan nixVersion fetchurl;
|
||||||
|
in
|
||||||
|
if lessThan nixVersion "1.12" then
|
||||||
|
fetchurl { inherit url; }
|
||||||
|
else
|
||||||
|
fetchurl attrs;
|
||||||
|
|
||||||
|
# Create the final "sources" from the config
|
||||||
|
mkSources = config:
|
||||||
|
mapAttrs (
|
||||||
|
name: spec:
|
||||||
|
if builtins.hasAttr "outPath" spec
|
||||||
|
then abort
|
||||||
|
"The values in sources.json should not have an 'outPath' attribute"
|
||||||
|
else
|
||||||
|
spec // { outPath = fetch config.pkgs name spec; }
|
||||||
|
) config.sources;
|
||||||
|
|
||||||
|
# The "config" used by the fetchers
|
||||||
|
mkConfig =
|
||||||
|
{ sourcesFile ? ./sources.json
|
||||||
|
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
|
||||||
|
, pkgs ? mkPkgs sources
|
||||||
|
}: rec {
|
||||||
|
# The sources, i.e. the attribute set of spec name to spec
|
||||||
|
inherit sources;
|
||||||
|
|
||||||
|
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
|
||||||
|
inherit pkgs;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
|
113
src/cache.rs
113
src/cache.rs
@ -1,57 +1,124 @@
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
|
collections::HashMap,
|
||||||
fs::{create_dir_all, File, OpenOptions},
|
fs::{create_dir_all, File, OpenOptions},
|
||||||
io::BufReader,
|
io::BufReader,
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Enum to indicate the state of the cache
|
/// Enum to indicate the state of the cache
|
||||||
|
#[derive(Debug)]
|
||||||
pub(crate) enum CacheState<'a> {
|
pub(crate) enum CacheState<'a> {
|
||||||
/// Current head and cached head are the same
|
/// Current head and cached head are the same
|
||||||
Current { count: u64, commits: u64 },
|
Current {
|
||||||
|
count: u64,
|
||||||
|
commits: u64,
|
||||||
|
cache: Cache<'a>,
|
||||||
|
},
|
||||||
/// Cached head is older than current head
|
/// Cached head is older than current head
|
||||||
Old(Cache<'a>),
|
Old {
|
||||||
|
head: String,
|
||||||
|
cache: Cache<'a>,
|
||||||
|
},
|
||||||
|
NoneForBranch(Cache<'a>),
|
||||||
/// No cache was found
|
/// No cache was found
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CacheState<'a> {
|
impl<'a> CacheState<'a> {
|
||||||
pub(crate) fn read_from_file(path: impl AsRef<Path>, head: &str) -> Result<CacheState> {
|
#[instrument]
|
||||||
|
pub(crate) fn read_from_file(
|
||||||
|
path: impl AsRef<Path> + std::fmt::Debug,
|
||||||
|
branch: &str,
|
||||||
|
head: &str,
|
||||||
|
) -> Result<CacheState<'a>> {
|
||||||
|
trace!("Reading cache");
|
||||||
if path.as_ref().exists() {
|
if path.as_ref().exists() {
|
||||||
let cache: Cache = serde_json::from_reader(BufReader::new(File::open(path)?))?;
|
let cache: Cache = serde_json::from_reader(BufReader::new(File::open(path)?))?;
|
||||||
if cache.head == head {
|
Ok(cache
|
||||||
Ok(CacheState::Current {
|
.entries
|
||||||
count: cache.count,
|
.get(branch)
|
||||||
commits: cache.commits,
|
.map(|c| {
|
||||||
|
if c.head == head {
|
||||||
|
trace!("Cache is up to date");
|
||||||
|
CacheState::Current {
|
||||||
|
count: c.count,
|
||||||
|
commits: c.commits,
|
||||||
|
// TODO: get rid of clone
|
||||||
|
cache: cache.clone(),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
trace!("Cache is out of date");
|
||||||
|
CacheState::Old {
|
||||||
|
head: c.head.to_string(),
|
||||||
|
// TODO: get rid of clone
|
||||||
|
cache: cache.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
} else {
|
// TODO: get rid of clone
|
||||||
Ok(CacheState::Old(cache))
|
.unwrap_or_else(|| CacheState::NoneForBranch(cache.clone())))
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Ok(CacheState::No)
|
Ok(CacheState::No)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn calculate_new_cache(self, count: u64, commits: u64, head: Cow<'a, str>) -> Cache {
|
#[instrument]
|
||||||
|
pub(crate) fn calculate_new_cache(
|
||||||
|
self,
|
||||||
|
count: u64,
|
||||||
|
commits: u64,
|
||||||
|
head: Cow<'a, str>,
|
||||||
|
branch: &'a str,
|
||||||
|
) -> Cache<'a> {
|
||||||
|
trace!("Calculating new cache");
|
||||||
match self {
|
match self {
|
||||||
CacheState::Old(mut cache) => {
|
CacheState::Old { mut cache, .. } => {
|
||||||
cache.head = head;
|
if let Some(mut cache) = cache.entries.get_mut(branch) {
|
||||||
cache.count += count;
|
cache.head = head;
|
||||||
cache.commits += commits;
|
cache.count += count;
|
||||||
|
cache.commits += commits;
|
||||||
|
}
|
||||||
cache
|
cache
|
||||||
}
|
}
|
||||||
CacheState::No | CacheState::Current { .. } => Cache {
|
CacheState::Current { cache, .. } => cache,
|
||||||
head,
|
CacheState::NoneForBranch(mut cache) => {
|
||||||
count,
|
trace!("Creating new cache for branch");
|
||||||
commits,
|
cache.entries.insert(
|
||||||
},
|
branch.into(),
|
||||||
|
CacheEntry {
|
||||||
|
head,
|
||||||
|
count,
|
||||||
|
commits,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
cache
|
||||||
|
}
|
||||||
|
CacheState::No => {
|
||||||
|
trace!("Creating new cache file");
|
||||||
|
let mut entries = HashMap::with_capacity(1);
|
||||||
|
entries.insert(
|
||||||
|
branch.into(),
|
||||||
|
CacheEntry {
|
||||||
|
head,
|
||||||
|
count,
|
||||||
|
commits,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
Cache { entries }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
pub(crate) struct Cache<'a> {
|
pub(crate) struct Cache<'a> {
|
||||||
|
pub entries: HashMap<Cow<'a, str>, CacheEntry<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
|
pub(crate) struct CacheEntry<'a> {
|
||||||
/// HEAD commit ref
|
/// HEAD commit ref
|
||||||
pub head: Cow<'a, str>,
|
pub head: Cow<'a, str>,
|
||||||
/// HoC value
|
/// HoC value
|
||||||
@ -61,7 +128,9 @@ pub(crate) struct Cache<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Cache<'a> {
|
impl<'a> Cache<'a> {
|
||||||
pub(crate) fn write_to_file(&self, path: impl AsRef<Path>) -> Result<()> {
|
#[instrument]
|
||||||
|
pub(crate) fn write_to_file(&self, path: impl AsRef<Path> + std::fmt::Debug) -> Result<()> {
|
||||||
|
trace!("Persisting cache to disk");
|
||||||
create_dir_all(path.as_ref().parent().ok_or(Error::Internal)?)?;
|
create_dir_all(path.as_ref().parent().ok_or(Error::Internal)?)?;
|
||||||
serde_json::to_writer(
|
serde_json::to_writer(
|
||||||
OpenOptions::new()
|
OpenOptions::new()
|
||||||
|
@ -1,71 +1,38 @@
|
|||||||
use crate::{error::Result, statics::OPT};
|
use config::{Config, ConfigError, Environment, File};
|
||||||
use log::LevelFilter;
|
|
||||||
use log4rs::{
|
|
||||||
append::{console::ConsoleAppender, file::FileAppender},
|
|
||||||
config::{Appender, Config, Root},
|
|
||||||
encode::pattern::PatternEncoder,
|
|
||||||
};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use structopt::StructOpt;
|
|
||||||
|
|
||||||
#[derive(StructOpt, Debug)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub(crate) struct Opt {
|
pub struct Settings {
|
||||||
#[structopt(
|
|
||||||
short = "o",
|
|
||||||
long = "outdir",
|
|
||||||
parse(from_os_str),
|
|
||||||
default_value = "./repos"
|
|
||||||
)]
|
|
||||||
/// Path to store cloned repositories
|
/// Path to store cloned repositories
|
||||||
pub(crate) outdir: PathBuf,
|
pub repodir: PathBuf,
|
||||||
#[structopt(
|
|
||||||
short = "c",
|
|
||||||
long = "cachedir",
|
|
||||||
parse(from_os_str),
|
|
||||||
default_value = "./cache"
|
|
||||||
)]
|
|
||||||
/// Path to store cache
|
/// Path to store cache
|
||||||
pub(crate) cachedir: PathBuf,
|
pub cachedir: PathBuf,
|
||||||
#[structopt(short = "p", long = "port", default_value = "8080")]
|
|
||||||
/// Port to listen on
|
/// Port to listen on
|
||||||
pub(crate) port: u16,
|
pub port: u16,
|
||||||
#[structopt(short = "h", long = "host", default_value = "0.0.0.0")]
|
|
||||||
/// Interface to listen on
|
/// Interface to listen on
|
||||||
pub(crate) host: String,
|
pub host: String,
|
||||||
#[structopt(short = "d", long = "domain", default_value = "hitsofcode.com")]
|
/// Base URL
|
||||||
/// Interface to listen on
|
pub base_url: String,
|
||||||
pub(crate) domain: String,
|
|
||||||
#[structopt(short = "w", long = "workers", default_value = "4")]
|
|
||||||
/// Number of worker threads
|
/// Number of worker threads
|
||||||
pub(crate) workers: usize,
|
pub workers: usize,
|
||||||
#[structopt(
|
|
||||||
short = "l",
|
|
||||||
long = "logfile",
|
|
||||||
parse(from_os_str),
|
|
||||||
default_value = "./hoc.log"
|
|
||||||
)]
|
|
||||||
/// The logfile
|
|
||||||
pub(crate) logfile: PathBuf,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn init() -> Result<()> {
|
impl Settings {
|
||||||
std::env::set_var("RUST_LOG", "actix_web=info,hoc=info");
|
#[deprecated]
|
||||||
// pretty_env_logger::init();
|
pub fn new() -> Result<Self, ConfigError> {
|
||||||
openssl_probe::init_ssl_cert_env_vars();
|
Self::load()
|
||||||
let stdout = ConsoleAppender::builder().build();
|
}
|
||||||
let file = FileAppender::builder()
|
|
||||||
.encoder(Box::new(PatternEncoder::new("{d} - {m}{n}")))
|
pub fn load() -> Result<Self, ConfigError> {
|
||||||
.build(&OPT.logfile)
|
let mut config = Config::new();
|
||||||
.unwrap();
|
config
|
||||||
let config = Config::builder()
|
.merge(File::with_name("hoc.toml").required(false))?
|
||||||
.appender(Appender::builder().build("stdout", Box::new(stdout)))
|
.merge(Environment::with_prefix("hoc"))?
|
||||||
.appender(Appender::builder().build("file", Box::new(file)))
|
.set_default("repodir", "./repos")?
|
||||||
.build(
|
.set_default("cachedir", "./cache")?
|
||||||
Root::builder()
|
.set_default("workers", 4)?
|
||||||
.appender("stdout")
|
.set_default("port", 8080)?
|
||||||
.appender("file")
|
.set_default("host", "0.0.0.0")?;
|
||||||
.build(LevelFilter::Info),
|
config.try_into()
|
||||||
)?;
|
}
|
||||||
log4rs::init_config(config)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use std::{fs::read_dir, path::Path, result::Result as StdResult};
|
use std::{fs::read_dir, path::Path, result::Result as StdResult};
|
||||||
|
|
||||||
|
#[instrument]
|
||||||
pub(crate) fn count_repositories<P>(repo_path: P) -> Result<usize>
|
pub(crate) fn count_repositories<P>(repo_path: P) -> Result<usize>
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path> + std::fmt::Debug,
|
||||||
{
|
{
|
||||||
|
trace!("Counting repositories");
|
||||||
|
std::fs::create_dir_all(&repo_path)?;
|
||||||
Ok(read_dir(repo_path)?
|
Ok(read_dir(repo_path)?
|
||||||
.filter_map(StdResult::ok)
|
.filter_map(StdResult::ok)
|
||||||
.filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
.filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
||||||
|
48
src/error.rs
48
src/error.rs
@ -1,9 +1,6 @@
|
|||||||
use crate::{
|
use crate::{statics::VERSION_INFO, templates};
|
||||||
statics::{REPO_COUNT, VERSION_INFO},
|
use actix_web::{http::StatusCode, HttpResponse, ResponseError};
|
||||||
templates,
|
use std::fmt;
|
||||||
};
|
|
||||||
use actix_web::{HttpResponse, ResponseError};
|
|
||||||
use std::{fmt, sync::atomic::Ordering};
|
|
||||||
|
|
||||||
pub(crate) type Result<T> = std::result::Result<T, Error>;
|
pub(crate) type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
@ -14,11 +11,9 @@ pub(crate) enum Error {
|
|||||||
Git(git2::Error),
|
Git(git2::Error),
|
||||||
Internal,
|
Internal,
|
||||||
Io(std::io::Error),
|
Io(std::io::Error),
|
||||||
Log(log::SetLoggerError),
|
|
||||||
LogBuilder(log4rs::config::Errors),
|
|
||||||
Parse(std::num::ParseIntError),
|
Parse(std::num::ParseIntError),
|
||||||
Serial(serde_json::Error),
|
Serial(serde_json::Error),
|
||||||
GitNoMaster,
|
BranchNotFound,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
impl fmt::Display for Error {
|
||||||
@ -29,27 +24,30 @@ impl fmt::Display for Error {
|
|||||||
Error::Git(e) => write!(fmt, "Git({})", e),
|
Error::Git(e) => write!(fmt, "Git({})", e),
|
||||||
Error::Internal => write!(fmt, "Internal Error"),
|
Error::Internal => write!(fmt, "Internal Error"),
|
||||||
Error::Io(e) => write!(fmt, "Io({})", e),
|
Error::Io(e) => write!(fmt, "Io({})", e),
|
||||||
Error::Log(e) => write!(fmt, "Log({})", e),
|
|
||||||
Error::LogBuilder(e) => write!(fmt, "LogBuilder({})", e),
|
|
||||||
Error::Parse(e) => write!(fmt, "Parse({})", e),
|
Error::Parse(e) => write!(fmt, "Parse({})", e),
|
||||||
Error::Serial(e) => write!(fmt, "Serial({})", e),
|
Error::Serial(e) => write!(fmt, "Serial({})", e),
|
||||||
Error::GitNoMaster => write!(fmt, "Repo doesn't have master branch"),
|
Error::BranchNotFound => write!(fmt, "Repo doesn't have master branch"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResponseError for Error {
|
impl ResponseError for Error {
|
||||||
|
fn status_code(&self) -> StatusCode {
|
||||||
|
match self {
|
||||||
|
Error::BranchNotFound => StatusCode::NOT_FOUND,
|
||||||
|
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn error_response(&self) -> HttpResponse {
|
fn error_response(&self) -> HttpResponse {
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
match self {
|
match self {
|
||||||
Error::GitNoMaster => {
|
Error::BranchNotFound => {
|
||||||
templates::p404_no_master(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed)).unwrap();
|
templates::p404_no_master(&mut buf, VERSION_INFO, 0).unwrap();
|
||||||
HttpResponse::NotFound()
|
HttpResponse::NotFound().content_type("text/html").body(buf)
|
||||||
.content_type("text/html")
|
}
|
||||||
.body(buf)
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
templates::p500(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed)).unwrap();
|
templates::p500(&mut buf, VERSION_INFO, 0).unwrap();
|
||||||
HttpResponse::InternalServerError()
|
HttpResponse::InternalServerError()
|
||||||
.content_type("text/html")
|
.content_type("text/html")
|
||||||
.body(buf)
|
.body(buf)
|
||||||
@ -72,12 +70,6 @@ impl From<git2::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<log::SetLoggerError> for Error {
|
|
||||||
fn from(err: log::SetLoggerError) -> Self {
|
|
||||||
Error::Log(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
impl From<std::io::Error> for Error {
|
||||||
fn from(err: std::io::Error) -> Self {
|
fn from(err: std::io::Error) -> Self {
|
||||||
Error::Io(err)
|
Error::Io(err)
|
||||||
@ -96,12 +88,6 @@ impl From<reqwest::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<log4rs::config::Errors> for Error {
|
|
||||||
fn from(err: log4rs::config::Errors) -> Self {
|
|
||||||
Error::LogBuilder(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::num::ParseIntError> for Error {
|
impl From<std::num::ParseIntError> for Error {
|
||||||
fn from(err: std::num::ParseIntError) -> Self {
|
fn from(err: std::num::ParseIntError) -> Self {
|
||||||
Error::Parse(err)
|
Error::Parse(err)
|
||||||
|
525
src/lib.rs
Normal file
525
src/lib.rs
Normal file
@ -0,0 +1,525 @@
|
|||||||
|
#![type_length_limit = "2257138"]
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate actix_web;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate serde_derive;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate tracing;
|
||||||
|
|
||||||
|
mod cache;
|
||||||
|
pub mod config;
|
||||||
|
mod count;
|
||||||
|
mod error;
|
||||||
|
mod service;
|
||||||
|
mod statics;
|
||||||
|
pub mod telemetry;
|
||||||
|
mod template;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
cache::CacheState,
|
||||||
|
config::Settings,
|
||||||
|
error::{Error, Result},
|
||||||
|
service::{Bitbucket, FormService, GitHub, Gitlab, Service},
|
||||||
|
statics::{CLIENT, CSS, FAVICON, VERSION_INFO},
|
||||||
|
template::RepoInfo,
|
||||||
|
};
|
||||||
|
use actix_web::{
|
||||||
|
dev::Server,
|
||||||
|
http::header::{CacheControl, CacheDirective, Expires, LOCATION},
|
||||||
|
middleware::{self, normalize::TrailingSlash},
|
||||||
|
web, App, HttpResponse, HttpServer, Responder,
|
||||||
|
};
|
||||||
|
use badge::{Badge, BadgeOptions};
|
||||||
|
use git2::{BranchType, Repository};
|
||||||
|
use number_prefix::NumberPrefix;
|
||||||
|
use std::{
|
||||||
|
borrow::Cow,
|
||||||
|
fs::create_dir_all,
|
||||||
|
io,
|
||||||
|
net::TcpListener,
|
||||||
|
path::Path,
|
||||||
|
process::Command,
|
||||||
|
sync::atomic::AtomicUsize,
|
||||||
|
sync::atomic::Ordering,
|
||||||
|
time::{Duration, SystemTime},
|
||||||
|
};
|
||||||
|
use tracing::Instrument;
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/templates.rs"));
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize)]
|
||||||
|
struct GeneratorForm<'a> {
|
||||||
|
service: FormService,
|
||||||
|
user: Cow<'a, str>,
|
||||||
|
repo: Cow<'a, str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct State {
|
||||||
|
settings: Settings,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl State {
|
||||||
|
fn repos(&self) -> String {
|
||||||
|
self.settings.repodir.display().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cache(&self) -> String {
|
||||||
|
self.settings.cachedir.display().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct JsonResponse<'a> {
|
||||||
|
head: &'a str,
|
||||||
|
branch: &'a str,
|
||||||
|
count: u64,
|
||||||
|
commits: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BranchQuery {
|
||||||
|
branch: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pull(path: impl AsRef<Path>) -> Result<()> {
|
||||||
|
let repo = Repository::open_bare(path)?;
|
||||||
|
let mut origin = repo.find_remote("origin")?;
|
||||||
|
origin.fetch(&["refs/heads/*:refs/heads/*"], None, None)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hoc(repo: &str, repo_dir: &str, cache_dir: &str, branch: &str) -> Result<(u64, String, u64)> {
|
||||||
|
let repo_dir = format!("{}/{}", repo_dir, repo);
|
||||||
|
let cache_dir = format!("{}/{}.json", cache_dir, repo);
|
||||||
|
let cache_dir = Path::new(&cache_dir);
|
||||||
|
let repo = Repository::open_bare(&repo_dir)?;
|
||||||
|
// TODO: do better...
|
||||||
|
let head = repo
|
||||||
|
.find_branch(branch, BranchType::Local)
|
||||||
|
.map_err(|_| Error::BranchNotFound)?
|
||||||
|
.into_reference();
|
||||||
|
let head = format!("{}", head.target().ok_or(Error::BranchNotFound)?);
|
||||||
|
let mut arg_commit_count = vec!["rev-list".to_string(), "--count".to_string()];
|
||||||
|
let mut arg = vec![
|
||||||
|
"log".to_string(),
|
||||||
|
"--pretty=tformat:".to_string(),
|
||||||
|
"--numstat".to_string(),
|
||||||
|
"--ignore-space-change".to_string(),
|
||||||
|
"--ignore-all-space".to_string(),
|
||||||
|
"--ignore-submodules".to_string(),
|
||||||
|
"--no-color".to_string(),
|
||||||
|
"--find-copies-harder".to_string(),
|
||||||
|
"-M".to_string(),
|
||||||
|
"--diff-filter=ACDM".to_string(),
|
||||||
|
];
|
||||||
|
let cache = CacheState::read_from_file(&cache_dir, branch, &head)?;
|
||||||
|
match &cache {
|
||||||
|
CacheState::Current { count, commits, .. } => {
|
||||||
|
info!("Using cache");
|
||||||
|
return Ok((*count, head, *commits));
|
||||||
|
}
|
||||||
|
CacheState::Old { head, .. } => {
|
||||||
|
info!("Updating cache");
|
||||||
|
arg.push(format!("{}..{}", head, branch));
|
||||||
|
arg_commit_count.push(format!("{}..{}", head, branch));
|
||||||
|
}
|
||||||
|
CacheState::No | CacheState::NoneForBranch(..) => {
|
||||||
|
info!("Creating cache");
|
||||||
|
arg.push(branch.to_string());
|
||||||
|
arg_commit_count.push(branch.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
arg.push("--".to_string());
|
||||||
|
arg.push(".".to_string());
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(&arg)
|
||||||
|
.current_dir(&repo_dir)
|
||||||
|
.output()?
|
||||||
|
.stdout;
|
||||||
|
let output = String::from_utf8_lossy(&output);
|
||||||
|
let output_commits = Command::new("git")
|
||||||
|
.args(&arg_commit_count)
|
||||||
|
.current_dir(&repo_dir)
|
||||||
|
.output()?
|
||||||
|
.stdout;
|
||||||
|
let output_commits = String::from_utf8_lossy(&output_commits);
|
||||||
|
let commits: u64 = output_commits.trim().parse()?;
|
||||||
|
let count: u64 = output
|
||||||
|
.lines()
|
||||||
|
.map(|s| {
|
||||||
|
s.split_whitespace()
|
||||||
|
.take(2)
|
||||||
|
.map(str::parse::<u64>)
|
||||||
|
.filter_map(std::result::Result::ok)
|
||||||
|
.sum::<u64>()
|
||||||
|
})
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
let cache = cache.calculate_new_cache(count, commits, (&head).into(), branch);
|
||||||
|
cache.write_to_file(cache_dir)?;
|
||||||
|
|
||||||
|
Ok((count, head, commits))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn remote_exists(url: &str) -> Result<bool> {
|
||||||
|
let resp = CLIENT.head(url).send().await?;
|
||||||
|
Ok(resp.status() == reqwest::StatusCode::OK)
|
||||||
|
}
|
||||||
|
|
||||||
|
enum HocResult {
|
||||||
|
Hoc {
|
||||||
|
hoc: u64,
|
||||||
|
commits: u64,
|
||||||
|
hoc_pretty: String,
|
||||||
|
head: String,
|
||||||
|
url: String,
|
||||||
|
repo: String,
|
||||||
|
service_path: String,
|
||||||
|
},
|
||||||
|
NotFound,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_repo_and_cache<T>(
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
data: web::Path<(String, String)>,
|
||||||
|
) -> Result<impl Responder>
|
||||||
|
where
|
||||||
|
T: Service,
|
||||||
|
{
|
||||||
|
let data = data.into_inner();
|
||||||
|
let span = info_span!(
|
||||||
|
"deleting repository and cache",
|
||||||
|
service = T::domain(),
|
||||||
|
user = data.0.as_str(),
|
||||||
|
repo = data.1.as_str()
|
||||||
|
);
|
||||||
|
let future = async {
|
||||||
|
let repo = format!(
|
||||||
|
"{}/{}/{}",
|
||||||
|
T::domain(),
|
||||||
|
data.0.to_lowercase(),
|
||||||
|
data.1.to_lowercase()
|
||||||
|
);
|
||||||
|
info!("Deleting cache and repository");
|
||||||
|
let cache_dir = format!("{}/{}.json", &state.cache(), repo);
|
||||||
|
let repo_dir = format!("{}/{}", &state.repos(), repo);
|
||||||
|
std::fs::remove_file(&cache_dir).or_else(|e| {
|
||||||
|
if e.kind() == io::ErrorKind::NotFound {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
std::fs::remove_dir_all(&repo_dir).or_else(|e| {
|
||||||
|
if e.kind() == io::ErrorKind::NotFound {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
repo_count.fetch_sub(1, Ordering::Relaxed);
|
||||||
|
Ok(HttpResponse::TemporaryRedirect()
|
||||||
|
.header(
|
||||||
|
LOCATION,
|
||||||
|
format!("/{}/{}/{}/view", T::url_path(), data.0, data.1),
|
||||||
|
)
|
||||||
|
.finish())
|
||||||
|
};
|
||||||
|
future.instrument(span).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_hoc_request<T, F>(
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
data: web::Path<(String, String)>,
|
||||||
|
branch: &str,
|
||||||
|
mapper: F,
|
||||||
|
) -> Result<HttpResponse>
|
||||||
|
where
|
||||||
|
T: Service,
|
||||||
|
F: FnOnce(HocResult) -> Result<HttpResponse>,
|
||||||
|
{
|
||||||
|
let data = data.into_inner();
|
||||||
|
let span = info_span!(
|
||||||
|
"handling hoc calculation",
|
||||||
|
service = T::domain(),
|
||||||
|
user = data.0.as_str(),
|
||||||
|
repo = data.1.as_str(),
|
||||||
|
branch
|
||||||
|
);
|
||||||
|
let future = async {
|
||||||
|
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
||||||
|
let service_path = format!("{}/{}", T::url_path(), repo);
|
||||||
|
let service_url = format!("{}/{}", T::domain(), repo);
|
||||||
|
let path = format!("{}/{}", state.repos(), service_url);
|
||||||
|
let url = format!("https://{}", service_url);
|
||||||
|
let remote_exists = remote_exists(&url).await?;
|
||||||
|
let file = Path::new(&path);
|
||||||
|
if !file.exists() {
|
||||||
|
if !remote_exists {
|
||||||
|
warn!("Repository does not exist");
|
||||||
|
return mapper(HocResult::NotFound);
|
||||||
|
}
|
||||||
|
info!("Cloning for the first time");
|
||||||
|
create_dir_all(file)?;
|
||||||
|
let repo = Repository::init_bare(file)?;
|
||||||
|
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
||||||
|
repo.remote_set_url("origin", &url)?;
|
||||||
|
repo_count.fetch_add(1, Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
pull(&path)?;
|
||||||
|
let (hoc, head, commits) = hoc(&service_url, &state.repos(), &state.cache(), branch)?;
|
||||||
|
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
||||||
|
NumberPrefix::Standalone(hoc) => hoc.to_string(),
|
||||||
|
NumberPrefix::Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
||||||
|
};
|
||||||
|
let res = HocResult::Hoc {
|
||||||
|
hoc,
|
||||||
|
commits,
|
||||||
|
hoc_pretty,
|
||||||
|
head,
|
||||||
|
url,
|
||||||
|
repo,
|
||||||
|
service_path,
|
||||||
|
};
|
||||||
|
mapper(res)
|
||||||
|
};
|
||||||
|
future.instrument(span).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn json_hoc<T: Service>(
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
data: web::Path<(String, String)>,
|
||||||
|
branch: web::Query<BranchQuery>,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||||
|
let rc_clone = repo_count.clone();
|
||||||
|
let mapper = move |r| match r {
|
||||||
|
HocResult::NotFound => p404(rc_clone),
|
||||||
|
HocResult::Hoc {
|
||||||
|
hoc, head, commits, ..
|
||||||
|
} => Ok(HttpResponse::Ok().json(JsonResponse {
|
||||||
|
branch,
|
||||||
|
head: &head,
|
||||||
|
count: hoc,
|
||||||
|
commits,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
handle_hoc_request::<T, _>(state, repo_count, data, branch, mapper).await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn no_cache_response(body: Vec<u8>) -> HttpResponse {
|
||||||
|
let expiration = SystemTime::now() + Duration::from_secs(30);
|
||||||
|
HttpResponse::Ok()
|
||||||
|
.content_type("image/svg+xml")
|
||||||
|
.set(Expires(expiration.into()))
|
||||||
|
.set(CacheControl(vec![
|
||||||
|
CacheDirective::MaxAge(0u32),
|
||||||
|
CacheDirective::MustRevalidate,
|
||||||
|
CacheDirective::NoCache,
|
||||||
|
CacheDirective::NoStore,
|
||||||
|
]))
|
||||||
|
.body(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn calculate_hoc<T: Service>(
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
data: web::Path<(String, String)>,
|
||||||
|
branch: web::Query<BranchQuery>,
|
||||||
|
) -> HttpResponse {
|
||||||
|
let rc_clone = repo_count.clone();
|
||||||
|
let mapper = move |r| match r {
|
||||||
|
HocResult::NotFound => p404(rc_clone),
|
||||||
|
HocResult::Hoc { hoc_pretty, .. } => {
|
||||||
|
let badge_opt = BadgeOptions {
|
||||||
|
subject: "Hits-of-Code".to_string(),
|
||||||
|
color: "#007ec6".to_string(),
|
||||||
|
status: hoc_pretty,
|
||||||
|
};
|
||||||
|
let badge = Badge::new(badge_opt)?;
|
||||||
|
// TODO: remove clone
|
||||||
|
let body = badge.to_svg().as_bytes().to_vec();
|
||||||
|
|
||||||
|
Ok(no_cache_response(body))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||||
|
let error_badge = |_| {
|
||||||
|
let error_badge = Badge::new(BadgeOptions {
|
||||||
|
subject: "Hits-of-Code".to_string(),
|
||||||
|
color: "#ff0000".to_string(),
|
||||||
|
status: "error".to_string(),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
let body = error_badge.to_svg().as_bytes().to_vec();
|
||||||
|
no_cache_response(body)
|
||||||
|
};
|
||||||
|
handle_hoc_request::<T, _>(state, repo_count, data, branch, mapper)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(error_badge)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn overview<T: Service>(
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
data: web::Path<(String, String)>,
|
||||||
|
branch: web::Query<BranchQuery>,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||||
|
let base_url = state.settings.base_url.clone();
|
||||||
|
let rc_clone = repo_count.clone();
|
||||||
|
let mapper = move |r| match r {
|
||||||
|
HocResult::NotFound => p404(rc_clone),
|
||||||
|
HocResult::Hoc {
|
||||||
|
hoc,
|
||||||
|
commits,
|
||||||
|
hoc_pretty,
|
||||||
|
url,
|
||||||
|
head,
|
||||||
|
repo,
|
||||||
|
service_path,
|
||||||
|
} => {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let repo_info = RepoInfo {
|
||||||
|
commit_url: &T::commit_url(&repo, &head),
|
||||||
|
commits,
|
||||||
|
base_url: &base_url,
|
||||||
|
head: &head,
|
||||||
|
hoc,
|
||||||
|
hoc_pretty: &hoc_pretty,
|
||||||
|
path: &service_path,
|
||||||
|
url: &url,
|
||||||
|
branch,
|
||||||
|
};
|
||||||
|
templates::overview(
|
||||||
|
&mut buf,
|
||||||
|
VERSION_INFO,
|
||||||
|
rc_clone.load(Ordering::Relaxed),
|
||||||
|
repo_info,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
handle_hoc_request::<T, _>(state, repo_count, data, branch, mapper).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/health_check")]
|
||||||
|
async fn health_check() -> HttpResponse {
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/")]
|
||||||
|
async fn index(
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
templates::index(
|
||||||
|
&mut buf,
|
||||||
|
VERSION_INFO,
|
||||||
|
repo_count.load(Ordering::Relaxed),
|
||||||
|
&state.settings.base_url,
|
||||||
|
)?;
|
||||||
|
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/generate")]
|
||||||
|
async fn generate(
|
||||||
|
params: web::Form<GeneratorForm<'_>>,
|
||||||
|
state: web::Data<State>,
|
||||||
|
repo_count: web::Data<AtomicUsize>,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let repo = format!("{}/{}", params.user, params.repo);
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
templates::generate(
|
||||||
|
&mut buf,
|
||||||
|
VERSION_INFO,
|
||||||
|
repo_count.load(Ordering::Relaxed),
|
||||||
|
&state.settings.base_url,
|
||||||
|
params.service.url(),
|
||||||
|
params.service.service(),
|
||||||
|
&repo,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn p404(repo_count: web::Data<AtomicUsize>) -> Result<HttpResponse> {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
templates::p404(&mut buf, VERSION_INFO, repo_count.load(Ordering::Relaxed))?;
|
||||||
|
Ok(HttpResponse::NotFound().content_type("text/html").body(buf))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn async_p404(repo_count: web::Data<AtomicUsize>) -> Result<HttpResponse> {
|
||||||
|
p404(repo_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn css() -> HttpResponse {
|
||||||
|
HttpResponse::Ok().content_type("text/css").body(CSS)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn favicon32() -> HttpResponse {
|
||||||
|
HttpResponse::Ok().content_type("image/png").body(FAVICON)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn start_server(listener: TcpListener, settings: Settings) -> std::io::Result<Server> {
|
||||||
|
let workers = settings.workers;
|
||||||
|
let repo_count =
|
||||||
|
// TODO: errorhandling
|
||||||
|
web::Data::new(AtomicUsize::new(count::count_repositories(&settings.repodir).unwrap()));
|
||||||
|
let state = web::Data::new(State { settings });
|
||||||
|
Ok(HttpServer::new(move || {
|
||||||
|
App::new()
|
||||||
|
.app_data(state.clone())
|
||||||
|
.app_data(repo_count.clone())
|
||||||
|
.wrap(tracing_actix_web::TracingLogger)
|
||||||
|
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||||
|
.service(index)
|
||||||
|
.service(health_check)
|
||||||
|
.service(web::resource("/tacit-css.min.css").route(web::get().to(css)))
|
||||||
|
.service(web::resource("/favicon.ico").route(web::get().to(favicon32)))
|
||||||
|
.service(generate)
|
||||||
|
.service(web::resource("/github/{user}/{repo}").to(calculate_hoc::<GitHub>))
|
||||||
|
.service(web::resource("/gitlab/{user}/{repo}").to(calculate_hoc::<Gitlab>))
|
||||||
|
.service(web::resource("/bitbucket/{user}/{repo}").to(calculate_hoc::<Bitbucket>))
|
||||||
|
.service(
|
||||||
|
web::resource("/github/{user}/{repo}/delete")
|
||||||
|
.route(web::post().to(delete_repo_and_cache::<GitHub>)),
|
||||||
|
)
|
||||||
|
.service(
|
||||||
|
web::resource("/gitlab/{user}/{repo}/delete")
|
||||||
|
.route(web::post().to(delete_repo_and_cache::<Gitlab>)),
|
||||||
|
)
|
||||||
|
.service(
|
||||||
|
web::resource("/bitbucket/{user}/{repo}/delete")
|
||||||
|
.route(web::post().to(delete_repo_and_cache::<Bitbucket>)),
|
||||||
|
)
|
||||||
|
.service(web::resource("/github/{user}/{repo}/json").to(json_hoc::<GitHub>))
|
||||||
|
.service(web::resource("/gitlab/{user}/{repo}/json").to(json_hoc::<Gitlab>))
|
||||||
|
.service(web::resource("/bitbucket/{user}/{repo}/json").to(json_hoc::<Bitbucket>))
|
||||||
|
.service(web::resource("/view/github/{user}/{repo}").to(overview::<GitHub>))
|
||||||
|
.service(web::resource("/view/gitlab/{user}/{repo}").to(overview::<Gitlab>))
|
||||||
|
.service(web::resource("/view/bitbucket/{user}/{repo}").to(overview::<Bitbucket>))
|
||||||
|
.service(web::resource("/github/{user}/{repo}/view").to(overview::<GitHub>))
|
||||||
|
.service(web::resource("/gitlab/{user}/{repo}/view").to(overview::<Gitlab>))
|
||||||
|
.service(web::resource("/bitbucket/{user}/{repo}/view").to(overview::<Bitbucket>))
|
||||||
|
.default_service(web::resource("").route(web::get().to(async_p404)))
|
||||||
|
})
|
||||||
|
.workers(workers)
|
||||||
|
.listen(listener)?
|
||||||
|
.run())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run(listener: TcpListener, settings: Settings) -> std::io::Result<Server> {
|
||||||
|
let span = info_span!("hoc", version = env!("CARGO_PKG_VERSION"));
|
||||||
|
let _ = span.enter();
|
||||||
|
start_server(listener, settings).instrument(span).await
|
||||||
|
}
|
394
src/main.rs
394
src/main.rs
@ -1,384 +1,26 @@
|
|||||||
#![type_length_limit = "2257138"]
|
use hoc::{config::Settings, telemetry};
|
||||||
|
|
||||||
#[macro_use]
|
use std::net::TcpListener;
|
||||||
extern crate actix_web;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate serde_derive;
|
|
||||||
|
|
||||||
mod cache;
|
fn init() {
|
||||||
mod config;
|
dotenv::dotenv().ok();
|
||||||
mod count;
|
openssl_probe::init_ssl_cert_env_vars();
|
||||||
mod error;
|
|
||||||
mod service;
|
|
||||||
mod statics;
|
|
||||||
mod template;
|
|
||||||
|
|
||||||
use crate::{
|
telemetry::init_subscriber(telemetry::get_subscriber("hoc", "info"))
|
||||||
cache::CacheState,
|
|
||||||
error::{Error, Result},
|
|
||||||
service::{Bitbucket, FormService, GitHub, Gitlab, Service},
|
|
||||||
statics::{CLIENT, CSS, FAVICON, OPT, REPO_COUNT, VERSION_INFO},
|
|
||||||
template::RepoInfo,
|
|
||||||
};
|
|
||||||
use actix_web::{
|
|
||||||
http::header::{CacheControl, CacheDirective, Expires},
|
|
||||||
middleware, web, App, HttpResponse, HttpServer,
|
|
||||||
};
|
|
||||||
use badge::{Badge, BadgeOptions};
|
|
||||||
use futures::future::Future;
|
|
||||||
use git2::Repository;
|
|
||||||
use number_prefix::{NumberPrefix, Prefixed, Standalone};
|
|
||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
fs::create_dir_all,
|
|
||||||
path::Path,
|
|
||||||
process::Command,
|
|
||||||
sync::atomic::Ordering,
|
|
||||||
sync::Arc,
|
|
||||||
time::{Duration, SystemTime},
|
|
||||||
};
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/templates.rs"));
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
|
||||||
struct GeneratorForm<'a> {
|
|
||||||
service: FormService,
|
|
||||||
user: Cow<'a, str>,
|
|
||||||
repo: Cow<'a, str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct State {
|
|
||||||
repos: String,
|
|
||||||
cache: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct JsonResponse<'a> {
|
|
||||||
head: &'a str,
|
|
||||||
count: u64,
|
|
||||||
commits: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pull(path: impl AsRef<Path>) -> Result<()> {
|
|
||||||
let repo = Repository::open_bare(path)?;
|
|
||||||
let mut origin = repo.find_remote("origin")?;
|
|
||||||
origin.fetch(&["refs/heads/*:refs/heads/*"], None, None)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String, u64)> {
|
|
||||||
let repo_dir = format!("{}/{}", repo_dir, repo);
|
|
||||||
let cache_dir = format!("{}/{}.json", cache_dir, repo);
|
|
||||||
let cache_dir = Path::new(&cache_dir);
|
|
||||||
let repo = Repository::open_bare(&repo_dir)?;
|
|
||||||
// TODO: do better...
|
|
||||||
let head = match repo.head() {
|
|
||||||
Ok(v) => v,
|
|
||||||
Err(_) => return Err(Error::GitNoMaster),
|
|
||||||
};
|
|
||||||
let head = format!("{}", head.target().ok_or(Error::Internal)?);
|
|
||||||
let mut arg_commit_count = vec!["rev-list".to_string(), "--count".to_string()];
|
|
||||||
let mut arg = vec![
|
|
||||||
"log".to_string(),
|
|
||||||
"--pretty=tformat:".to_string(),
|
|
||||||
"--numstat".to_string(),
|
|
||||||
"--ignore-space-change".to_string(),
|
|
||||||
"--ignore-all-space".to_string(),
|
|
||||||
"--ignore-submodules".to_string(),
|
|
||||||
"--no-color".to_string(),
|
|
||||||
"--find-copies-harder".to_string(),
|
|
||||||
"-M".to_string(),
|
|
||||||
"--diff-filter=ACDM".to_string(),
|
|
||||||
];
|
|
||||||
let cache = CacheState::read_from_file(&cache_dir, &head)?;
|
|
||||||
match &cache {
|
|
||||||
CacheState::Current { count, commits } => {
|
|
||||||
info!("Using cache for {}", repo_dir);
|
|
||||||
return Ok((*count, head, *commits));
|
|
||||||
}
|
|
||||||
CacheState::Old(cache) => {
|
|
||||||
info!("Updating cache for {}", repo_dir);
|
|
||||||
arg.push(format!("{}..HEAD", cache.head));
|
|
||||||
arg_commit_count.push(format!("{}..HEAD", cache.head));
|
|
||||||
}
|
|
||||||
CacheState::No => {
|
|
||||||
info!("Creating cache for {}", repo_dir);
|
|
||||||
arg_commit_count.push("HEAD".to_string());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
arg.push("--".to_string());
|
|
||||||
arg.push(".".to_string());
|
|
||||||
let output = Command::new("git")
|
|
||||||
.args(&arg)
|
|
||||||
.current_dir(&repo_dir)
|
|
||||||
.output()?
|
|
||||||
.stdout;
|
|
||||||
let output = String::from_utf8_lossy(&output);
|
|
||||||
let output_commits = Command::new("git")
|
|
||||||
.args(&arg_commit_count)
|
|
||||||
.current_dir(&repo_dir)
|
|
||||||
.output()?
|
|
||||||
.stdout;
|
|
||||||
let output_commits = String::from_utf8_lossy(&output_commits);
|
|
||||||
let commits: u64 = output_commits.trim().parse()?;
|
|
||||||
let count: u64 = output
|
|
||||||
.lines()
|
|
||||||
.map(|s| {
|
|
||||||
s.split_whitespace()
|
|
||||||
.take(2)
|
|
||||||
.map(str::parse::<u64>)
|
|
||||||
.filter_map(std::result::Result::ok)
|
|
||||||
.sum::<u64>()
|
|
||||||
})
|
|
||||||
.sum();
|
|
||||||
|
|
||||||
let cache = cache.calculate_new_cache(count, commits, (&head).into());
|
|
||||||
cache.write_to_file(cache_dir)?;
|
|
||||||
|
|
||||||
Ok((cache.count, head, commits))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn remote_exists(url: &str) -> Result<bool> {
|
|
||||||
let resp = CLIENT.head(url).send().await?;
|
|
||||||
Ok(resp.status() == reqwest::StatusCode::OK)
|
|
||||||
|
|
||||||
// .map(|resp| resp.status() == reqwest::StatusCode::OK)
|
|
||||||
// .from_err()
|
|
||||||
}
|
|
||||||
|
|
||||||
enum HocResult {
|
|
||||||
Hoc {
|
|
||||||
hoc: u64,
|
|
||||||
commits: u64,
|
|
||||||
hoc_pretty: String,
|
|
||||||
head: String,
|
|
||||||
url: String,
|
|
||||||
repo: String,
|
|
||||||
service_path: String,
|
|
||||||
},
|
|
||||||
NotFound,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_hoc_request<T, F>(
|
|
||||||
state: web::Data<Arc<State>>,
|
|
||||||
data: web::Path<(String, String)>,
|
|
||||||
mapper: F,
|
|
||||||
) -> Result<HttpResponse>
|
|
||||||
where
|
|
||||||
T: Service,
|
|
||||||
F: Fn(HocResult) -> Result<HttpResponse>,
|
|
||||||
{
|
|
||||||
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
|
||||||
let service_path = format!("{}/{}", T::domain(), repo);
|
|
||||||
let path = format!("{}/{}", state.repos, service_path);
|
|
||||||
let url = format!("https://{}", service_path);
|
|
||||||
let remote_exists = remote_exists(&url).await?;
|
|
||||||
let file = Path::new(&path);
|
|
||||||
if !file.exists() {
|
|
||||||
if !remote_exists {
|
|
||||||
warn!("Repository does not exist: {}", url);
|
|
||||||
return mapper(HocResult::NotFound);
|
|
||||||
}
|
|
||||||
info!("Cloning {} for the first time", url);
|
|
||||||
create_dir_all(file)?;
|
|
||||||
let repo = Repository::init_bare(file)?;
|
|
||||||
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
|
||||||
repo.remote_set_url("origin", &url)?;
|
|
||||||
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
pull(&path)?;
|
|
||||||
let (hoc, head, commits) = hoc(&service_path, &state.repos, &state.cache)?;
|
|
||||||
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
|
||||||
Standalone(hoc) => hoc.to_string(),
|
|
||||||
Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
|
||||||
};
|
|
||||||
let res = HocResult::Hoc {
|
|
||||||
hoc,
|
|
||||||
commits,
|
|
||||||
hoc_pretty,
|
|
||||||
head,
|
|
||||||
url,
|
|
||||||
repo,
|
|
||||||
service_path,
|
|
||||||
};
|
|
||||||
mapper(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json_hoc<T: Service>(
|
|
||||||
state: web::Data<Arc<State>>,
|
|
||||||
data: web::Path<(String, String)>,
|
|
||||||
) -> impl Future<Output = Result<HttpResponse>> {
|
|
||||||
let mapper = |r| match r {
|
|
||||||
HocResult::NotFound => p404(),
|
|
||||||
HocResult::Hoc {
|
|
||||||
hoc, head, commits, ..
|
|
||||||
} => Ok(HttpResponse::Ok().json(JsonResponse {
|
|
||||||
head: &head,
|
|
||||||
count: hoc,
|
|
||||||
commits,
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
handle_hoc_request::<T, _>(state, data, mapper)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_hoc<T: Service>(
|
|
||||||
state: web::Data<Arc<State>>,
|
|
||||||
data: web::Path<(String, String)>,
|
|
||||||
) -> impl Future<Output = Result<HttpResponse>> {
|
|
||||||
let mapper = move |r| match r {
|
|
||||||
HocResult::NotFound => p404(),
|
|
||||||
HocResult::Hoc { hoc_pretty, .. } => {
|
|
||||||
let badge_opt = BadgeOptions {
|
|
||||||
subject: "Hits-of-Code".to_string(),
|
|
||||||
color: "#007ec6".to_string(),
|
|
||||||
status: hoc_pretty,
|
|
||||||
};
|
|
||||||
let badge = Badge::new(badge_opt)?;
|
|
||||||
// TODO: remove clone
|
|
||||||
let body = badge.to_svg().as_bytes().to_vec();
|
|
||||||
|
|
||||||
let expiration = SystemTime::now() + Duration::from_secs(30);
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.content_type("image/svg+xml")
|
|
||||||
.set(Expires(expiration.into()))
|
|
||||||
.set(CacheControl(vec![
|
|
||||||
CacheDirective::MaxAge(0u32),
|
|
||||||
CacheDirective::MustRevalidate,
|
|
||||||
CacheDirective::NoCache,
|
|
||||||
CacheDirective::NoStore,
|
|
||||||
]))
|
|
||||||
.body(body))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
handle_hoc_request::<T, _>(state, data, mapper)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overview<T: Service>(
|
|
||||||
state: web::Data<Arc<State>>,
|
|
||||||
data: web::Path<(String, String)>,
|
|
||||||
) -> impl Future<Output = Result<HttpResponse>> {
|
|
||||||
let mapper = |r| match r {
|
|
||||||
HocResult::NotFound => p404(),
|
|
||||||
HocResult::Hoc {
|
|
||||||
hoc,
|
|
||||||
commits,
|
|
||||||
hoc_pretty,
|
|
||||||
url,
|
|
||||||
head,
|
|
||||||
repo,
|
|
||||||
service_path,
|
|
||||||
} => {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
let repo_info = RepoInfo {
|
|
||||||
commit_url: &T::commit_url(&repo, &head),
|
|
||||||
commits,
|
|
||||||
domain: &OPT.domain,
|
|
||||||
head: &head,
|
|
||||||
hoc,
|
|
||||||
hoc_pretty: &hoc_pretty,
|
|
||||||
path: &service_path,
|
|
||||||
url: &url,
|
|
||||||
};
|
|
||||||
templates::overview(
|
|
||||||
&mut buf,
|
|
||||||
VERSION_INFO,
|
|
||||||
REPO_COUNT.load(Ordering::Relaxed),
|
|
||||||
repo_info,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
handle_hoc_request::<T, _>(state, data, mapper)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/")]
|
|
||||||
async fn index() -> Result<HttpResponse> {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
templates::index(
|
|
||||||
&mut buf,
|
|
||||||
VERSION_INFO,
|
|
||||||
REPO_COUNT.load(Ordering::Relaxed),
|
|
||||||
&OPT.domain,
|
|
||||||
)?;
|
|
||||||
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/generate")]
|
|
||||||
async fn generate(params: web::Form<GeneratorForm<'_>>) -> Result<HttpResponse> {
|
|
||||||
let repo = format!("{}/{}", params.user, params.repo);
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
templates::generate(
|
|
||||||
&mut buf,
|
|
||||||
VERSION_INFO,
|
|
||||||
REPO_COUNT.load(Ordering::Relaxed),
|
|
||||||
&OPT.domain,
|
|
||||||
params.service.url(),
|
|
||||||
params.service.service(),
|
|
||||||
&repo,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn p404() -> Result<HttpResponse> {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
templates::p404(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed))?;
|
|
||||||
Ok(HttpResponse::NotFound().content_type("text/html").body(buf))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn async_p404() -> Result<HttpResponse> {
|
|
||||||
p404()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/tacit-css.min.css")]
|
|
||||||
fn css() -> HttpResponse {
|
|
||||||
HttpResponse::Ok().content_type("text/css").body(CSS)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/favicon.ico")]
|
|
||||||
fn favicon32() -> HttpResponse {
|
|
||||||
HttpResponse::Ok().content_type("image/png").body(FAVICON)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn start_server() -> std::io::Result<()> {
|
|
||||||
let interface = format!("{}:{}", OPT.host, OPT.port);
|
|
||||||
let state = Arc::new(State {
|
|
||||||
repos: OPT.outdir.display().to_string(),
|
|
||||||
cache: OPT.cachedir.display().to_string(),
|
|
||||||
});
|
|
||||||
HttpServer::new(move || {
|
|
||||||
App::new()
|
|
||||||
.data(state.clone())
|
|
||||||
.wrap(middleware::Logger::default())
|
|
||||||
.wrap(middleware::NormalizePath)
|
|
||||||
.service(index)
|
|
||||||
.service(css)
|
|
||||||
.service(favicon32)
|
|
||||||
.service(generate)
|
|
||||||
.service(web::resource("/github/{user}/{repo}").to(calculate_hoc::<GitHub>))
|
|
||||||
.service(web::resource("/gitlab/{user}/{repo}").to(calculate_hoc::<Gitlab>))
|
|
||||||
.service(web::resource("/bitbucket/{user}/{repo}").to(calculate_hoc::<Bitbucket>))
|
|
||||||
.service(web::resource("/github/{user}/{repo}/json").to(json_hoc::<GitHub>))
|
|
||||||
.service(web::resource("/gitlab/{user}/{repo}/json").to(json_hoc::<Gitlab>))
|
|
||||||
.service(web::resource("/bitbucket/{user}/{repo}/json").to(json_hoc::<Bitbucket>))
|
|
||||||
.service(web::resource("/view/github/{user}/{repo}").to(overview::<GitHub>))
|
|
||||||
.service(web::resource("/view/gitlab/{user}/{repo}").to(overview::<Gitlab>))
|
|
||||||
.service(web::resource("/view/bitbucket/{user}/{repo}").to(overview::<Bitbucket>))
|
|
||||||
.default_service(web::resource("").route(web::get().to(async_p404)))
|
|
||||||
})
|
|
||||||
.workers(OPT.workers)
|
|
||||||
.bind(interface)?
|
|
||||||
.run()
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
config::init().await.unwrap();
|
init();
|
||||||
start_server().await
|
|
||||||
|
// TODO: error handling
|
||||||
|
let settings = Settings::load().expect("Cannot load config");
|
||||||
|
|
||||||
|
let address = format!("{}:{}", settings.host, settings.port);
|
||||||
|
// TODO: error handling
|
||||||
|
let listener = TcpListener::bind(address)?;
|
||||||
|
hoc::run(listener, settings)
|
||||||
|
.await
|
||||||
|
.expect("Server error")
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,10 @@
|
|||||||
use crate::{config::Opt, count::count_repositories};
|
|
||||||
use std::sync::atomic::AtomicUsize;
|
|
||||||
use structopt::StructOpt;
|
|
||||||
|
|
||||||
pub struct VersionInfo<'a> {
|
pub struct VersionInfo<'a> {
|
||||||
pub commit: &'a str,
|
pub commit: &'a str,
|
||||||
pub version: &'a str,
|
pub version: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) const VERSION_INFO: VersionInfo = VersionInfo {
|
pub(crate) const VERSION_INFO: VersionInfo = VersionInfo {
|
||||||
commit: env!("VERGEN_SHA_SHORT"),
|
commit: env!("VERGEN_GIT_SHA_SHORT"),
|
||||||
version: env!("CARGO_PKG_VERSION"),
|
version: env!("CARGO_PKG_VERSION"),
|
||||||
};
|
};
|
||||||
pub(crate) const CSS: &str = include_str!("../static/tacit-css.min.css");
|
pub(crate) const CSS: &str = include_str!("../static/tacit-css.min.css");
|
||||||
@ -16,7 +12,4 @@ pub(crate) const FAVICON: &[u8] = include_bytes!("../static/favicon32.png");
|
|||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub(crate) static ref CLIENT: reqwest::Client = reqwest::Client::new();
|
pub(crate) static ref CLIENT: reqwest::Client = reqwest::Client::new();
|
||||||
pub(crate) static ref OPT: Opt = Opt::from_args();
|
|
||||||
pub(crate) static ref REPO_COUNT: AtomicUsize =
|
|
||||||
AtomicUsize::new(count_repositories(&OPT.outdir).unwrap());
|
|
||||||
}
|
}
|
||||||
|
21
src/telemetry.rs
Normal file
21
src/telemetry.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
use tracing::{subscriber::set_global_default, Subscriber};
|
||||||
|
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
||||||
|
use tracing_log::LogTracer;
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
|
||||||
|
|
||||||
|
pub fn get_subscriber(name: &str, env_filter: &str) -> impl Subscriber + Send + Sync {
|
||||||
|
let env_filter =
|
||||||
|
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
|
||||||
|
|
||||||
|
let formatting_layer = BunyanFormattingLayer::new(name.to_string(), std::io::stdout);
|
||||||
|
|
||||||
|
Registry::default()
|
||||||
|
.with(env_filter)
|
||||||
|
.with(JsonStorageLayer)
|
||||||
|
.with(formatting_layer)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
|
||||||
|
LogTracer::init().expect("Failed to set logger");
|
||||||
|
set_global_default(subscriber).expect("Failed to set tracing subscriber");
|
||||||
|
}
|
@ -1,10 +1,11 @@
|
|||||||
pub struct RepoInfo<'a> {
|
pub struct RepoInfo<'a> {
|
||||||
pub commit_url: &'a str,
|
pub commit_url: &'a str,
|
||||||
pub commits: u64,
|
pub commits: u64,
|
||||||
pub domain: &'a str,
|
pub base_url: &'a str,
|
||||||
pub head: &'a str,
|
pub head: &'a str,
|
||||||
pub hoc: u64,
|
pub hoc: u64,
|
||||||
pub hoc_pretty: &'a str,
|
pub hoc_pretty: &'a str,
|
||||||
pub path: &'a str,
|
pub path: &'a str,
|
||||||
pub url: &'a str,
|
pub url: &'a str,
|
||||||
|
pub branch: &'a str,
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
@use super::base;
|
@use super::base;
|
||||||
@use crate::statics::VersionInfo;
|
@use crate::statics::VersionInfo;
|
||||||
|
|
||||||
@(version_info: VersionInfo, repo_count: usize, domain: &str, url: &str, service: &str, path: &str)
|
@(version_info: VersionInfo, repo_count: usize, base_url: &str, url: &str, service: &str, path: &str)
|
||||||
|
|
||||||
@:base("Hits-of-Code Badges", "Badge Generator", {
|
@:base("Hits-of-Code Badges", "Badge Generator", {
|
||||||
|
|
||||||
@ -10,7 +10,7 @@ Here is the markdown for the badge for <a href="https://@url/@path">@url/@path</
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
[](https://@domain/view/@service/@path)
|
[](@base_url/@service/@path/view)
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
@ -18,6 +18,6 @@ It will be rendered like this
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
<a href="https://@domain/view/@service/@path"><img src="https://@domain/@service/@path" alt="example badge" /></a>
|
<a href="@base_url/@service/@path/view"><img src="@base_url/@service/@path" alt="example badge" /></a>
|
||||||
</pre>
|
</pre>
|
||||||
}, version_info, repo_count)
|
}, version_info, repo_count)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
@use super::base;
|
@use super::base;
|
||||||
@use crate::statics::VersionInfo;
|
@use crate::statics::VersionInfo;
|
||||||
|
|
||||||
@(version_info: VersionInfo, repo_count: usize, domain: &str)
|
@(version_info: VersionInfo, repo_count: usize, base_url: &str)
|
||||||
|
|
||||||
@:base("Hits-of-Code Badges", "Hits-of-Code Badges", {
|
@:base("Hits-of-Code Badges", "Hits-of-Code Badges", {
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ used for GitHub, GitLab and Bitbucket repositories. Just put the following code
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
[](https://@domain/view/<service>/<user>/<repo>)
|
[](@base_url/<service>/<user>/<repo>/view)
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
@ -33,7 +33,7 @@ following Markdown
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
[](https://@domain/view/github/vbrandl/hoc)
|
[](@base_url/github/vbrandl/hoc/view)
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
@ -41,15 +41,21 @@ would render this badge:
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
<a href="https://@domain/view/github/vbrandl/hoc"><img src="https://@domain/github/vbrandl/hoc"
|
<a href="@base_url/github/vbrandl/hoc/view"><img src="@base_url/github/vbrandl/hoc"
|
||||||
alt="example badge" /></a>
|
alt="example badge" /></a>
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
By default, this service assumes the existence of a branch named <code>master</code>. If no branch with that name exists
|
||||||
|
in your repository or you want a badge for another branch of your repository, just append
|
||||||
|
<code>?branch=<branch-name></code> to the URL.
|
||||||
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
You can also request the HoC as JSON by appending <code>/json</code> to the request path. This will return a JSON object
|
You can also request the HoC as JSON by appending <code>/json</code> to the request path. This will return a JSON object
|
||||||
with three fields: <code>count</code> (the HoC value), <code>commits</code> (the number of commits) and
|
with three fields: <code>count</code> (the HoC value), <code>commits</code> (the number of commits) and
|
||||||
<code>head</code> (the commit ref of HEAD). Requesting <a
|
<code>head</code> (the commit ref of HEAD). Requesting <a
|
||||||
href="https://@domain/github/vbrandl/hoc/json">https://@domain/github/vbrandl/hoc/json</a> might return something along
|
href="@base_url/github/vbrandl/hoc/json">@base_url/github/vbrandl/hoc/json</a> might return something along
|
||||||
the lines of
|
the lines of
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@ -88,7 +94,7 @@ welcome.
|
|||||||
|
|
||||||
<p>
|
<p>
|
||||||
You can reach me via mail: <a href="mailto:mail+hoc@@vbrandl.net">mail+hoc@@vbrandl.net</a> preferably using
|
You can reach me via mail: <a href="mailto:mail+hoc@@vbrandl.net">mail+hoc@@vbrandl.net</a> preferably using
|
||||||
my <a href="https://mirror.oldsql.cc/key.asc">GPG key</a>
|
my <a href="https://www.vbrandl.net/static/keys/0x1FFE431282F4B8CC0A7579167FB009175885FC76.asc">GPG key</a>
|
||||||
(<a href="http://pool.sks-keyservers.net/pks/lookup?op=get&search=0x1FFE431282F4B8CC0A7579167FB009175885FC76">from a
|
(<a href="http://pool.sks-keyservers.net/pks/lookup?op=get&search=0x1FFE431282F4B8CC0A7579167FB009175885FC76">from a
|
||||||
keyserver</a>), or by using any other UID from my key.
|
keyserver</a>), or by using any other UID from my key.
|
||||||
</p>
|
</p>
|
||||||
|
@ -9,7 +9,8 @@
|
|||||||
<p>
|
<p>
|
||||||
The project <a href="@repo_info.url">@repo_info.url</a> has
|
The project <a href="@repo_info.url">@repo_info.url</a> has
|
||||||
<strong>@repo_info.hoc_pretty</strong> (exactly @repo_info.hoc) hits of code at
|
<strong>@repo_info.hoc_pretty</strong> (exactly @repo_info.hoc) hits of code at
|
||||||
<a href="@repo_info.commit_url">@repo_info.head</a>. The repository contains
|
<a href="@repo_info.commit_url">@repo_info.head</a> on the
|
||||||
|
<code>@repo_info.branch</code> branch. The repository contains
|
||||||
<strong>@repo_info.commits</strong> commits.
|
<strong>@repo_info.commits</strong> commits.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@ -18,6 +19,11 @@ To include the badge in your readme, use the following markdown:
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
[](https://@repo_info.domain/view/@repo_info.path)
|
[](@repo_info.base_url/@repo_info.path/view?branch=@repo_info.branch)
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
|
|
||||||
|
<form method="post" action="/@repo_info.path/delete">
|
||||||
|
<button type="submit">Rebuild Cache</button>
|
||||||
|
</form>
|
||||||
}, version_info, repo_count)
|
}, version_info, repo_count)
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
|
|
||||||
@(version_info: VersionInfo, repo_count: usize)
|
@(version_info: VersionInfo, repo_count: usize)
|
||||||
|
|
||||||
@:base("Master Branch not Found - Hits-of-Code Badges", "404 - Master Branch not Found", {
|
@:base("Branch not Found - Hits-of-Code Badges", "404 - Branch not Found", {
|
||||||
<p>
|
<p>
|
||||||
<big>Sorry</big>. I couldn't find the master branch of your repositroy.
|
<big>Sorry</big>. I couldn't find the requested branch of your repositroy. Currently this service assumes the
|
||||||
Currently this service depends on the existence of a master branch. Please go
|
extistence of a branch named <code>master</code>. If you'd like to request a badge for another branch, you can do so by
|
||||||
<a href="/">back to the homepage</a>.
|
attaching <code>?branch=<branch-name></code> to the request.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
|
18
tests/badge.rs
Normal file
18
tests/badge.rs
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
mod util;
|
||||||
|
|
||||||
|
use actix_web::client;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn badge_succeeds() {
|
||||||
|
let test_app = util::spawn_app().await;
|
||||||
|
|
||||||
|
let client = client::Client::default();
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&format!("{}/github/vbrandl/hoc", test_app.address))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.expect("Failed to execute request");
|
||||||
|
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
18
tests/health_check.rs
Normal file
18
tests/health_check.rs
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
mod util;
|
||||||
|
|
||||||
|
use actix_web::client;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn health_check_works() {
|
||||||
|
let test_app = util::spawn_app().await;
|
||||||
|
|
||||||
|
let client = client::Client::default();
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&format!("{}/health_check", test_app.address))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.expect("Failed to execute request");
|
||||||
|
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
18
tests/index.rs
Normal file
18
tests/index.rs
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
mod util;
|
||||||
|
|
||||||
|
use actix_web::client;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn index_returns_success() {
|
||||||
|
let test_app = util::spawn_app().await;
|
||||||
|
|
||||||
|
let client = client::Client::default();
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&format!("{}/", test_app.address))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.expect("Failed to execute request");
|
||||||
|
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
18
tests/json.rs
Normal file
18
tests/json.rs
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
mod util;
|
||||||
|
|
||||||
|
use actix_web::client;
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn json_returns_success() {
|
||||||
|
let test_app = util::spawn_app().await;
|
||||||
|
|
||||||
|
let client = client::Client::default();
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&format!("{}/github/vbrandl/hoc/json", test_app.address))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.expect("Failed to execute request");
|
||||||
|
|
||||||
|
assert!(response.status().is_success());
|
||||||
|
}
|
50
tests/util/mod.rs
Normal file
50
tests/util/mod.rs
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
use hoc::{config::Settings, telemetry};
|
||||||
|
|
||||||
|
use std::net::TcpListener;
|
||||||
|
|
||||||
|
use tempfile::{tempdir, TempDir};
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref TRACING: () = {
|
||||||
|
let filter = if std::env::var("TEST_LOG").is_ok() { "debug" } else { "" };
|
||||||
|
let subscriber = telemetry::get_subscriber("test", filter);
|
||||||
|
telemetry::init_subscriber(subscriber);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TestApp {
|
||||||
|
pub address: String,
|
||||||
|
repo_dir: TempDir,
|
||||||
|
cache_dir: TempDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn spawn_app() -> TestApp {
|
||||||
|
lazy_static::initialize(&TRACING);
|
||||||
|
|
||||||
|
let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind random port");
|
||||||
|
|
||||||
|
let port = listener.local_addr().unwrap().port();
|
||||||
|
let address = format!("http://127.0.0.1:{}", port);
|
||||||
|
|
||||||
|
let repo_dir = tempdir().expect("Cannot create repo_dir");
|
||||||
|
let cache_dir = tempdir().expect("Cannot create cache_dir");
|
||||||
|
|
||||||
|
let mut settings = Settings::load().expect("Failed to read configuration.");
|
||||||
|
settings.repodir = repo_dir.path().to_path_buf();
|
||||||
|
settings.cachedir = cache_dir.path().to_path_buf();
|
||||||
|
// configuration.database.database_name = Uuid::new_v4().to_string();
|
||||||
|
|
||||||
|
// let connection_pool = configure_database(&configuration.database).await;
|
||||||
|
|
||||||
|
let server = hoc::run(listener, settings)
|
||||||
|
.await
|
||||||
|
.expect("Failed to bind address");
|
||||||
|
|
||||||
|
let _ = tokio::spawn(server);
|
||||||
|
|
||||||
|
TestApp {
|
||||||
|
address,
|
||||||
|
repo_dir,
|
||||||
|
cache_dir,
|
||||||
|
}
|
||||||
|
}
|
80
vm.nix
Normal file
80
vm.nix
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# Nix configuration for a VM to run a custom configured Vim
|
||||||
|
#
|
||||||
|
# It is intended as an example of building a VM that builds Vim for testing
|
||||||
|
# and evaluation purposes. It does not represent a production or secure
|
||||||
|
# deployment.
|
||||||
|
|
||||||
|
{ sources ? import ./nix/sources.nix
|
||||||
|
, pkgs ? import sources.nixpkgs { }
|
||||||
|
, callPackage ? pkgs.callPackage
|
||||||
|
, config
|
||||||
|
, lib
|
||||||
|
, ...
|
||||||
|
}:
|
||||||
|
# config, pkgs, lib, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
hoc = pkgs.callPackage ./default.nix { };
|
||||||
|
|
||||||
|
# hoc = cargoNix.rootCrate.build;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
environment = {
|
||||||
|
systemPackages = with pkgs; [
|
||||||
|
(
|
||||||
|
hoc
|
||||||
|
# import ./default.nix
|
||||||
|
)
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
networking.hostName = "hoc"; # Define your hostname.
|
||||||
|
|
||||||
|
system.stateVersion = "19.09"; # The version of NixOS originally installed
|
||||||
|
|
||||||
|
# Set security options:
|
||||||
|
security = {
|
||||||
|
sudo = {
|
||||||
|
enable = true; # Enable sudo
|
||||||
|
wheelNeedsPassword = false; # Allow wheel members to run sudo without a passowrd
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||||
|
|
||||||
|
# List services that you want to enable:
|
||||||
|
services.openssh = {
|
||||||
|
enable = true; # Enable the OpenSSH daemon.
|
||||||
|
#permitRootLogin = "yes"; # Probably want to change this in production
|
||||||
|
#challengeResponseAuthentication = true; # Probably want to change this in production
|
||||||
|
#passwordAuthentication = true; # Probably want to change this in production
|
||||||
|
openFirewall = true;
|
||||||
|
hostKeys = [
|
||||||
|
{
|
||||||
|
path = "/etc/ssh/ssh_host_ed25519_key"; # Generate a key for the vm
|
||||||
|
type = "ed25519"; # Use the current best key type
|
||||||
|
}
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
# Users of the Vim VM:
|
||||||
|
users.mutableUsers = false; # Remove any users not defined in here
|
||||||
|
|
||||||
|
users.users.root = {
|
||||||
|
password = "123456"; # Probably want to change this in production
|
||||||
|
};
|
||||||
|
|
||||||
|
# Misc groups:
|
||||||
|
users.groups.nixos.gid = 1000;
|
||||||
|
|
||||||
|
# NixOS users
|
||||||
|
users.users.nixos = {
|
||||||
|
isNormalUser = true;
|
||||||
|
uid = 1000;
|
||||||
|
group = "nixos";
|
||||||
|
extraGroups = [ "wheel" ];
|
||||||
|
password = "123456"; # Probably want to change this in production
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
Reference in New Issue
Block a user