Compare commits
282 Commits
Author | SHA1 | Date | |
---|---|---|---|
d6409c21ec | |||
990b5acbda | |||
f9e14e2ffd | |||
a73afe6851 | |||
20544b27d9 | |||
288573b1a4 | |||
a12755d7be | |||
a248531ce2 | |||
de7919a031 | |||
3913039010 | |||
078d3cdcf9 | |||
c552a84870 | |||
7c1a14b6ad | |||
c69b8207b8 | |||
a319f400e9 | |||
ddcb041f3f | |||
2a73370c9f | |||
689a2109fa | |||
e82146c912 | |||
568398f1c6 | |||
1cc6363cba | |||
dcfca96051 | |||
981c347acf | |||
22aad6ed77 | |||
b06bbbba8f | |||
e06e6736c0 | |||
0abfba8b4f | |||
ca87bf9282 | |||
4f37b101ae | |||
e16f02d729 | |||
1256996d19 | |||
c1d1551283 | |||
26bdb3dbb5 | |||
c9d959d4c5 | |||
15ca2f10b8 | |||
2a81fbe9f2 | |||
fe276458db | |||
73d034ff00 | |||
63e6a3d2db | |||
fbb5bbd20e | |||
7ff238fea6 | |||
06e9926cb5 | |||
c9a54eda49 | |||
382cd73bae | |||
8e5270a1d8 | |||
fdf3fda24a | |||
219099779b | |||
7393ba54f1 | |||
c14cf28a3e | |||
68a0604365 | |||
8dc07554f6 | |||
64bb6c2922 | |||
4be0cdeb5f | |||
0c9da804bb | |||
f95a2a7923 | |||
8eff7db8ae | |||
57df8ab209 | |||
ffea4f21ef | |||
0b0e88c604 | |||
c48e27c649 | |||
dfa353c356 | |||
2f1209d1e0 | |||
195815fc8a | |||
4e393fdd57 | |||
75f14f7878 | |||
349640acc5 | |||
1d0eca90a7 | |||
81bb65db4e | |||
6295477ccf | |||
b7324b3b38 | |||
e90fb7f54c | |||
253de8cea2 | |||
bd014301fd | |||
45c0d26e33 | |||
0edceb6a7d | |||
056d798d61 | |||
96f2e9422c | |||
df19951729 | |||
bfc97dad32 | |||
d28ac95aa2 | |||
8a0df3e52c | |||
e2c42a5287 | |||
f638124930 | |||
9cabea63c3 | |||
20074ac4e1 | |||
0ec7bd93d8 | |||
c1a1af0109 | |||
2c0094670b | |||
58fdc32627 | |||
cbeca19467 | |||
4cc8bd4385 | |||
fac1efeb7d | |||
1abc2d6333 | |||
a69c523e3c | |||
766c67f723 | |||
2f70e42f9a | |||
763910b515 | |||
dca32e4317 | |||
c301978f0b | |||
97fcb5a420 | |||
724c49d056 | |||
7b5d225701 | |||
90fc0ee584 | |||
978321e6ef | |||
7e5adbee24 | |||
803f95cde8 | |||
31fe058879 | |||
421c1a4164 | |||
06fa568225 | |||
520ac2442f | |||
61c4b18bf7 | |||
a3ccfdc4a3 | |||
70ce0f71e2 | |||
ce6150c48b | |||
95b60f3753 | |||
ec9f7a3635 | |||
9fef178a27 | |||
7af58de9fa | |||
6c414bf07d | |||
d39a81720a | |||
9ed5a5b257 | |||
f3c138b7d4 | |||
51b8519fc2 | |||
b1bce024de | |||
89a5e52857 | |||
da9f9263d1 | |||
62accd6e43 | |||
210a1f4592 | |||
3708768f34 | |||
94265ba39f | |||
7e3ca9c620 | |||
a3c978e0fc | |||
67db10460f | |||
615f71bfd7 | |||
1010d5cf0d | |||
6896a22409 | |||
3c2f06ebae | |||
3c945f5762 | |||
903abdefdc | |||
01689fc89e | |||
ede937d651 | |||
1805d86048 | |||
a81de6f89c | |||
dfc49c6b64 | |||
c7cde6222a | |||
fb889aa0ec | |||
36f29a410b | |||
e65efb5ea1 | |||
2127dbe879 | |||
9fe0c080c7 | |||
5f132ed0c8 | |||
5fe25b83e1 | |||
72655edcfc | |||
2ef4412f0a | |||
3863a870c5 | |||
7e29eab063 | |||
303445f3ad | |||
6a03ae9fff | |||
9ac60b8a26 | |||
d2c23ebf4f | |||
d0ddc7f62d | |||
164c97f453 | |||
c702bec8e2 | |||
2c002a1865 | |||
95efa4d875 | |||
65f1575045 | |||
9f95ae74a4 | |||
37159f8909 | |||
0142425f1e | |||
b72641221b | |||
50d985ffd4 | |||
8b0cc49724 | |||
6d0d53af1b | |||
b666a3e3b3 | |||
f5b37ba700 | |||
beb3c8330c | |||
34ee7e3853 | |||
84cf8b9a6f | |||
9ef0337605 | |||
bbf5bba490 | |||
9991f6c545 | |||
37e3c5ee72 | |||
edab6fd3dd | |||
97197190f5 | |||
4959fabd29 | |||
fecd5d0b6a | |||
1060edaebe | |||
bc9f411297 | |||
7fc920cddd | |||
6a4c1edf9e | |||
c1ea45d517 | |||
0b729da42f | |||
c0d0aa2176 | |||
41159c4ba4 | |||
b40b6bcad2 | |||
6e86f49585 | |||
c8c73b6b83 | |||
c6cc956f4d | |||
e37489d2cb | |||
8246be996e | |||
1f32f3eda4 | |||
ab6c317241 | |||
1223d429db | |||
9613aa7848 | |||
334fa8ced5 | |||
062343dd32 | |||
bdb2a151fa | |||
7e66393966 | |||
32ac5ca0d8 | |||
b3b4cd0e42 | |||
be08b87bbb | |||
cf4d427b96 | |||
b5ebc54372 | |||
231741629e | |||
135c5756b1 | |||
79cfee5fa5 | |||
1c9b5db4e1 | |||
6e0e452395 | |||
9e44731650 | |||
f673d71387 | |||
eda1822dce | |||
fb64af3456 | |||
0837284798 | |||
a7039f15b5 | |||
b1be281982 | |||
2795995f58 | |||
913cc9dc75 | |||
645d6e4a2b | |||
e455a89de4 | |||
f2cae46624 | |||
f47cbbe81a | |||
85f90c0bdb | |||
9143e5dbf8 | |||
865e3c7bbd | |||
5ebdd4067c | |||
55a1405e71 | |||
1436336b6a | |||
5a74306e49 | |||
614e5a6da5 | |||
0128e267cc | |||
c3dffac5da | |||
aaaff22907 | |||
ddfcbf11f1 | |||
3bee454548 | |||
9cff6f1f13 | |||
c946c5d0b4 | |||
88ae90af95 | |||
9f783dffa8 | |||
2cdf64ed80 | |||
9e024b530e | |||
0db7b26541 | |||
009d2c4813 | |||
f0ed6c2e23 | |||
d16a21e74e | |||
7e721ac212 | |||
4eb5d06cd1 | |||
b71207ca51 | |||
f3902f800f | |||
b8e454762e | |||
268734efdd | |||
c71925f61e | |||
c5bbd14a05 | |||
a239a3f80b | |||
00b4e60341 | |||
ed1cafafd0 | |||
bdb10fd54a | |||
1f01c3b964 | |||
3f94789b8b | |||
e4f8604861 | |||
25a179dad6 | |||
6ee44f7ba3 | |||
e9ebbee957 | |||
5211481226 | |||
fd230db0cb | |||
92a95f33d4 | |||
7e13f93ee3 | |||
36afc8732e | |||
abeed1f971 | |||
03a0743517 | |||
cbfd56b33a | |||
9c5208d6e4 | |||
92d7b6668a |
31
.github/workflows/audit.yml
vendored
Normal file
31
.github/workflows/audit.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
name: Security audit
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 1 * * *'
|
||||
push:
|
||||
paths:
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
security_audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: /usr/share/rust/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: /usr/share/rust/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index
|
||||
|
||||
- uses: actions-rs/audit-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
20
.github/workflows/nix-build.yml
vendored
Normal file
20
.github/workflows/nix-build.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: "Nix Build"
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: cachix/install-nix-action@v8
|
||||
- name: Cache nix store
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: /nix
|
||||
key: ${{ runner.os }}-nix-store
|
||||
- uses: cachix/cachix-action@v5
|
||||
with:
|
||||
name: hitsofcode
|
||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||
attributes: package
|
94
.github/workflows/rust.yml
vendored
Normal file
94
.github/workflows/rust.yml
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
name: Rust
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Linting and Formatting Checks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo build
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Check Formatting
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- name: Install clippy
|
||||
run: rustup component add clippy
|
||||
|
||||
- name: Clippy Linting
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
# add windows-latest when it is clear why tests are failing
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo build
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Run Tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,3 +3,4 @@
|
||||
repos
|
||||
cache
|
||||
hoc.log
|
||||
result
|
||||
|
@ -1,6 +1,6 @@
|
||||
image: docker:stable
|
||||
image: docker:19.03
|
||||
services:
|
||||
- docker:dind
|
||||
- docker:19.03-dind
|
||||
|
||||
stages:
|
||||
- build
|
||||
@ -9,6 +9,8 @@ stages:
|
||||
variables:
|
||||
DOCKER_HOST: tcp://docker:2375
|
||||
DOCKER_DRIVER: overlay2
|
||||
# DOCKER_TLS_CERTDIR: "/certs"
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
CONTAINER_BUILDER_IMAGE: $CI_REGISTRY_IMAGE:builder-latest
|
||||
CONTAINER_TEST_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
|
||||
CONTAINER_RELEASE_IMAGE: $CI_REGISTRY_IMAGE:latest
|
||||
|
3143
Cargo.lock
generated
3143
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
38
Cargo.toml
38
Cargo.toml
@ -1,27 +1,31 @@
|
||||
[package]
|
||||
name = "hoc"
|
||||
version = "0.9.3"
|
||||
version = "0.14.3"
|
||||
authors = ["Valentin Brandl <vbrandl@riseup.net>"]
|
||||
edition = "2018"
|
||||
build = "build.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "1.0.2"
|
||||
badge = "0.2.0"
|
||||
bytes = "0.4.12"
|
||||
futures = "0.1.27"
|
||||
git2 = "0.9.1"
|
||||
lazy_static = "1.3.0"
|
||||
log = "0.4.6"
|
||||
log4rs = "0.8.3"
|
||||
number_prefix = "0.3.0"
|
||||
actix-web = "3.1.0"
|
||||
badge = "0.3.0"
|
||||
bytes = "0.6.0"
|
||||
futures = "0.3.7"
|
||||
git2 = "0.13.12"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.11"
|
||||
log4rs = "0.13.0"
|
||||
number_prefix = "0.4.0"
|
||||
openssl-probe = "0.1.2"
|
||||
reqwest = "0.9.17"
|
||||
serde = "1.0.93"
|
||||
serde_derive = "1.0.93"
|
||||
serde_json = "1.0.39"
|
||||
structopt = "0.2.16"
|
||||
reqwest = "0.10.8"
|
||||
serde = "1.0.117"
|
||||
serde_derive = "1.0.103"
|
||||
serde_json = "1.0.59"
|
||||
structopt = "0.3.20"
|
||||
actix-rt = "1.1.1"
|
||||
|
||||
[build-dependencies]
|
||||
ructe = "0.6.4"
|
||||
vergen = "3.0.4"
|
||||
ructe = "0.12.0"
|
||||
vergen = "3.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.1.0"
|
||||
|
@ -29,6 +29,10 @@ https://<host>/<service>/<user>/<repo>/json
|
||||
|
||||
There is also an overview page available via `https://<host>/view/<service>/<user>/<repo>`
|
||||
|
||||
To delete a repository and the cache from the server, send a `POST` request to
|
||||
`https://<host>/<service>/<user>/<repo>/delete`. On the overview page, there is a button to perform this operation. It
|
||||
will respond with a redirect to the overview page so the cache is rebuilt directly.
|
||||
|
||||
## Building
|
||||
|
||||
The code can be built as a standalone binary, using `cargo` or as a Docker container. Run either
|
||||
@ -45,6 +49,10 @@ $ docker build .
|
||||
|
||||
inside the repository.
|
||||
|
||||
I'm currently working on migrating to [nix](https://nixos.org/nix). To get a
|
||||
development shell, run `nix-shell`, to build the package run `nix-build --attr
|
||||
package` and to build the Docker image, run `nix-build --attr dockerImage`.
|
||||
|
||||
|
||||
## Running
|
||||
|
||||
|
1
crate-hashes.json
Normal file
1
crate-hashes.json
Normal file
@ -0,0 +1 @@
|
||||
{}
|
56
default.nix
Normal file
56
default.nix
Normal file
@ -0,0 +1,56 @@
|
||||
{ sources ? import ./nix/sources.nix
|
||||
, system ? builtins.currentSystem
|
||||
}:
|
||||
|
||||
let
|
||||
rustOverlay = import "${sources.nixpkgs-mozilla}/rust-overlay.nix";
|
||||
cargo2nixOverlay = import "${sources.cargo2nix}/overlay";
|
||||
|
||||
pkgs = import sources.nixpkgs {
|
||||
# pkgs = import <nixpkgs> {
|
||||
inherit system;
|
||||
overlays = [ cargo2nixOverlay rustOverlay ];
|
||||
};
|
||||
|
||||
rustPkgs = pkgs.rustBuilder.makePackageSet' {
|
||||
rustChannel = "stable";
|
||||
packageFun = import ./Cargo.nix;
|
||||
localPatterns =
|
||||
[
|
||||
''^(src|tests)(/.*)?''
|
||||
''[^/]*\.(rs|toml)$''
|
||||
# include other directory from the project repository
|
||||
''^templates(/.*)?''
|
||||
''^static(/.*)?''
|
||||
''^.git.*(/.*)?''
|
||||
];
|
||||
# packageOverrides
|
||||
};
|
||||
in
|
||||
rec {
|
||||
inherit rustPkgs;
|
||||
shell = pkgs.mkShell {
|
||||
inputsFrom = pkgs.lib.mapAttrsToList (_: pkg: pkg { }) rustPkgs.noBuild.workspace;
|
||||
nativeBuildInputs = with rustPkgs; [ cargo rustc ];
|
||||
};
|
||||
package = (rustPkgs.workspace.hoc {}).overrideAttrs (drv: {
|
||||
buildInputs = drv.buildInputs or [ ] ++ [ pkgs.git ];
|
||||
});
|
||||
dockerImage =
|
||||
pkgs.dockerTools.buildImage {
|
||||
name = "vbrandl/hits-of-code";
|
||||
tag = package.version;
|
||||
|
||||
contents =
|
||||
[
|
||||
package
|
||||
pkgs.cacert
|
||||
pkgs.gitMinimal
|
||||
];
|
||||
|
||||
config = {
|
||||
Cmd = [ "/bin/hoc" ];
|
||||
WorkingDir = "/home/hoc";
|
||||
};
|
||||
};
|
||||
}
|
50
nix/sources.json
Normal file
50
nix/sources.json
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"cargo2nix": {
|
||||
"branch": "master",
|
||||
"description": "Convert a Cargo.lock to mkRustCrate statements for import in Nix",
|
||||
"homepage": "",
|
||||
"owner": "tenx-tech",
|
||||
"repo": "cargo2nix",
|
||||
"rev": "7bc062ccffc41dc7d3759b8b797e8b4f8dd23a15",
|
||||
"sha256": "1z7xwk1hbp26aydsk3y07riy0ivwqss06n1470mvdl7allfcd1w5",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/tenx-tech/cargo2nix/archive/7bc062ccffc41dc7d3759b8b797e8b4f8dd23a15.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"niv": {
|
||||
"branch": "master",
|
||||
"description": "Easy dependency management for Nix projects",
|
||||
"homepage": "https://github.com/nmattia/niv",
|
||||
"owner": "nmattia",
|
||||
"repo": "niv",
|
||||
"rev": "98c74a80934123cb4c3bf3314567f67311eb711a",
|
||||
"sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"nixpkgs": {
|
||||
"branch": "nixpkgs-unstable",
|
||||
"description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to",
|
||||
"homepage": "https://github.com/NixOS/nixpkgs",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs-channels",
|
||||
"rev": "f6bfb371cba2b5a02f200c2747c1fe2c72bd782f",
|
||||
"sha256": "0y3hlbyvznrpr1d2vxj2511hkjg733wdnxfaib3fgy9i9jr8ivzn",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/NixOS/nixpkgs-channels/archive/f6bfb371cba2b5a02f200c2747c1fe2c72bd782f.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"nixpkgs-mozilla": {
|
||||
"branch": "master",
|
||||
"description": "mozilla related nixpkgs (extends nixos/nixpkgs repo)",
|
||||
"homepage": null,
|
||||
"owner": "mozilla",
|
||||
"repo": "nixpkgs-mozilla",
|
||||
"rev": "e912ed483e980dfb4666ae0ed17845c4220e5e7c",
|
||||
"sha256": "08fvzb8w80bkkabc1iyhzd15f4sm7ra10jn32kfch5klgl0gj3j3",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/mozilla/nixpkgs-mozilla/archive/e912ed483e980dfb4666ae0ed17845c4220e5e7c.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
}
|
||||
}
|
134
nix/sources.nix
Normal file
134
nix/sources.nix
Normal file
@ -0,0 +1,134 @@
|
||||
# This file has been generated by Niv.
|
||||
|
||||
let
|
||||
|
||||
#
|
||||
# The fetchers. fetch_<type> fetches specs of type <type>.
|
||||
#
|
||||
|
||||
fetch_file = pkgs: spec:
|
||||
if spec.builtin or true then
|
||||
builtins_fetchurl { inherit (spec) url sha256; }
|
||||
else
|
||||
pkgs.fetchurl { inherit (spec) url sha256; };
|
||||
|
||||
fetch_tarball = pkgs: spec:
|
||||
if spec.builtin or true then
|
||||
builtins_fetchTarball { inherit (spec) url sha256; }
|
||||
else
|
||||
pkgs.fetchzip { inherit (spec) url sha256; };
|
||||
|
||||
fetch_git = spec:
|
||||
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
|
||||
|
||||
fetch_builtin-tarball = spec:
|
||||
builtins.trace
|
||||
''
|
||||
WARNING:
|
||||
The niv type "builtin-tarball" will soon be deprecated. You should
|
||||
instead use `builtin = true`.
|
||||
|
||||
$ niv modify <package> -a type=tarball -a builtin=true
|
||||
''
|
||||
builtins_fetchTarball { inherit (spec) url sha256; };
|
||||
|
||||
fetch_builtin-url = spec:
|
||||
builtins.trace
|
||||
''
|
||||
WARNING:
|
||||
The niv type "builtin-url" will soon be deprecated. You should
|
||||
instead use `builtin = true`.
|
||||
|
||||
$ niv modify <package> -a type=file -a builtin=true
|
||||
''
|
||||
(builtins_fetchurl { inherit (spec) url sha256; });
|
||||
|
||||
#
|
||||
# Various helpers
|
||||
#
|
||||
|
||||
# The set of packages used when specs are fetched using non-builtins.
|
||||
mkPkgs = sources:
|
||||
let
|
||||
sourcesNixpkgs =
|
||||
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
|
||||
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
|
||||
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
|
||||
in
|
||||
if builtins.hasAttr "nixpkgs" sources
|
||||
then sourcesNixpkgs
|
||||
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
|
||||
import <nixpkgs> {}
|
||||
else
|
||||
abort
|
||||
''
|
||||
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
|
||||
add a package called "nixpkgs" to your sources.json.
|
||||
'';
|
||||
|
||||
# The actual fetching function.
|
||||
fetch = pkgs: name: spec:
|
||||
|
||||
if ! builtins.hasAttr "type" spec then
|
||||
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
|
||||
else if spec.type == "file" then fetch_file pkgs spec
|
||||
else if spec.type == "tarball" then fetch_tarball pkgs spec
|
||||
else if spec.type == "git" then fetch_git spec
|
||||
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
|
||||
else if spec.type == "builtin-url" then fetch_builtin-url spec
|
||||
else
|
||||
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
|
||||
|
||||
# Ports of functions for older nix versions
|
||||
|
||||
# a Nix version of mapAttrs if the built-in doesn't exist
|
||||
mapAttrs = builtins.mapAttrs or (
|
||||
f: set: with builtins;
|
||||
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
|
||||
);
|
||||
|
||||
# fetchTarball version that is compatible between all the versions of Nix
|
||||
builtins_fetchTarball = { url, sha256 }@attrs:
|
||||
let
|
||||
inherit (builtins) lessThan nixVersion fetchTarball;
|
||||
in
|
||||
if lessThan nixVersion "1.12" then
|
||||
fetchTarball { inherit url; }
|
||||
else
|
||||
fetchTarball attrs;
|
||||
|
||||
# fetchurl version that is compatible between all the versions of Nix
|
||||
builtins_fetchurl = { url, sha256 }@attrs:
|
||||
let
|
||||
inherit (builtins) lessThan nixVersion fetchurl;
|
||||
in
|
||||
if lessThan nixVersion "1.12" then
|
||||
fetchurl { inherit url; }
|
||||
else
|
||||
fetchurl attrs;
|
||||
|
||||
# Create the final "sources" from the config
|
||||
mkSources = config:
|
||||
mapAttrs (
|
||||
name: spec:
|
||||
if builtins.hasAttr "outPath" spec
|
||||
then abort
|
||||
"The values in sources.json should not have an 'outPath' attribute"
|
||||
else
|
||||
spec // { outPath = fetch config.pkgs name spec; }
|
||||
) config.sources;
|
||||
|
||||
# The "config" used by the fetchers
|
||||
mkConfig =
|
||||
{ sourcesFile ? ./sources.json
|
||||
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
|
||||
, pkgs ? mkPkgs sources
|
||||
}: rec {
|
||||
# The sources, i.e. the attribute set of spec name to spec
|
||||
inherit sources;
|
||||
|
||||
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
|
||||
inherit pkgs;
|
||||
};
|
||||
in
|
||||
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
|
10
scripts/find-active.sh
Executable file
10
scripts/find-active.sh
Executable file
@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
DIR=${1:-repos}
|
||||
|
||||
for url in $(./scripts/list.sh "${DIR}")
|
||||
do
|
||||
(curl "${url}" --silent | grep -q hitsofcode) && echo "${url}" &
|
||||
done
|
21
scripts/load-active.sh
Executable file
21
scripts/load-active.sh
Executable file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
ACTIVE=${1}
|
||||
|
||||
if [ -z "${ACTIVE}" ]
|
||||
then
|
||||
echo "Usage: $0 <list of active repos>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
while IFS= read -r url
|
||||
do
|
||||
imgs=$(curl "${url}" --silent | grep hitsofcode | grep -o -P 'https://camo.githubusercontent.com/[a-z0-9]+/[a-z0-9]+')
|
||||
[ -z "${imgs}" ] || echo "${url}"
|
||||
for img in ${imgs}
|
||||
do
|
||||
curl "$img" --silent > /dev/null &
|
||||
done
|
||||
done < "${ACTIVE}"
|
98
src/cache.rs
98
src/cache.rs
@ -1,6 +1,7 @@
|
||||
use crate::error::{Error, Result};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
fs::{create_dir_all, File, OpenOptions},
|
||||
io::BufReader,
|
||||
path::Path,
|
||||
@ -9,43 +10,112 @@ use std::{
|
||||
/// Enum to indicate the state of the cache
|
||||
pub(crate) enum CacheState<'a> {
|
||||
/// Current head and cached head are the same
|
||||
Current(u64),
|
||||
Current {
|
||||
count: u64,
|
||||
commits: u64,
|
||||
cache: Cache<'a>,
|
||||
},
|
||||
/// Cached head is older than current head
|
||||
Old(Cache<'a>),
|
||||
Old {
|
||||
head: String,
|
||||
cache: Cache<'a>,
|
||||
},
|
||||
NoneForBranch(Cache<'a>),
|
||||
/// No cache was found
|
||||
No,
|
||||
}
|
||||
|
||||
impl<'a> CacheState<'a> {
|
||||
pub(crate) fn read_from_file(path: impl AsRef<Path>, head: &str) -> Result<CacheState> {
|
||||
pub(crate) fn read_from_file(
|
||||
path: impl AsRef<Path>,
|
||||
branch: &str,
|
||||
head: &str,
|
||||
) -> Result<CacheState<'a>> {
|
||||
if path.as_ref().exists() {
|
||||
let cache: Cache = serde_json::from_reader(BufReader::new(File::open(path)?))?;
|
||||
if cache.head == head {
|
||||
Ok(CacheState::Current(cache.count))
|
||||
} else {
|
||||
Ok(CacheState::Old(cache))
|
||||
}
|
||||
Ok(cache
|
||||
.entries
|
||||
.get(branch)
|
||||
.map(|c| {
|
||||
if c.head == head {
|
||||
CacheState::Current {
|
||||
count: c.count,
|
||||
commits: c.commits,
|
||||
// TODO: get rid of clone
|
||||
cache: cache.clone(),
|
||||
}
|
||||
} else {
|
||||
CacheState::Old {
|
||||
head: c.head.to_string(),
|
||||
// TODO: get rid of clone
|
||||
cache: cache.clone(),
|
||||
}
|
||||
}
|
||||
})
|
||||
// TODO: get rid of clone
|
||||
.unwrap_or_else(|| CacheState::NoneForBranch(cache.clone())))
|
||||
} else {
|
||||
Ok(CacheState::No)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn calculate_new_cache(self, count: u64, head: Cow<'a, str>) -> Cache {
|
||||
pub(crate) fn calculate_new_cache(
|
||||
self,
|
||||
count: u64,
|
||||
commits: u64,
|
||||
head: Cow<'a, str>,
|
||||
branch: &'a str,
|
||||
) -> Cache<'a> {
|
||||
match self {
|
||||
CacheState::Old(mut cache) => {
|
||||
cache.head = head;
|
||||
cache.count += count;
|
||||
CacheState::Old { mut cache, .. } => {
|
||||
if let Some(mut cache) = cache.entries.get_mut(branch) {
|
||||
cache.head = head;
|
||||
cache.count += count;
|
||||
cache.commits += commits;
|
||||
}
|
||||
cache
|
||||
}
|
||||
CacheState::No | CacheState::Current(_) => Cache { head, count },
|
||||
CacheState::Current { cache, .. } => cache,
|
||||
CacheState::NoneForBranch(mut cache) => {
|
||||
cache.entries.insert(
|
||||
branch.into(),
|
||||
CacheEntry {
|
||||
head,
|
||||
count,
|
||||
commits,
|
||||
},
|
||||
);
|
||||
cache
|
||||
}
|
||||
CacheState::No => {
|
||||
let mut entries = HashMap::with_capacity(1);
|
||||
entries.insert(
|
||||
branch.into(),
|
||||
CacheEntry {
|
||||
commits,
|
||||
head,
|
||||
count,
|
||||
},
|
||||
);
|
||||
Cache { entries }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub(crate) struct Cache<'a> {
|
||||
pub entries: HashMap<Cow<'a, str>, CacheEntry<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub(crate) struct CacheEntry<'a> {
|
||||
/// HEAD commit ref
|
||||
pub head: Cow<'a, str>,
|
||||
/// HoC value
|
||||
pub count: u64,
|
||||
/// Number of commits
|
||||
pub commits: u64,
|
||||
}
|
||||
|
||||
impl<'a> Cache<'a> {
|
||||
|
@ -48,9 +48,8 @@ pub(crate) struct Opt {
|
||||
pub(crate) logfile: PathBuf,
|
||||
}
|
||||
|
||||
pub(crate) fn init() -> Result<()> {
|
||||
pub(crate) async fn init() -> Result<()> {
|
||||
std::env::set_var("RUST_LOG", "actix_web=info,hoc=info");
|
||||
// pretty_env_logger::init();
|
||||
openssl_probe::init_ssl_cert_env_vars();
|
||||
let stdout = ConsoleAppender::builder().build();
|
||||
let file = FileAppender::builder()
|
||||
|
@ -5,6 +5,7 @@ pub(crate) fn count_repositories<P>(repo_path: P) -> Result<usize>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
std::fs::create_dir_all(&repo_path)?;
|
||||
Ok(read_dir(repo_path)?
|
||||
.filter_map(StdResult::ok)
|
||||
.filter(|entry| entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
||||
|
36
src/error.rs
36
src/error.rs
@ -16,7 +16,9 @@ pub(crate) enum Error {
|
||||
Io(std::io::Error),
|
||||
Log(log::SetLoggerError),
|
||||
LogBuilder(log4rs::config::Errors),
|
||||
Parse(std::num::ParseIntError),
|
||||
Serial(serde_json::Error),
|
||||
BranchNotFound,
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
@ -29,7 +31,9 @@ impl fmt::Display for Error {
|
||||
Error::Io(e) => write!(fmt, "Io({})", e),
|
||||
Error::Log(e) => write!(fmt, "Log({})", e),
|
||||
Error::LogBuilder(e) => write!(fmt, "LogBuilder({})", e),
|
||||
Error::Parse(e) => write!(fmt, "Parse({})", e),
|
||||
Error::Serial(e) => write!(fmt, "Serial({})", e),
|
||||
Error::BranchNotFound => write!(fmt, "Repo doesn't have master branch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -37,14 +41,24 @@ impl fmt::Display for Error {
|
||||
impl ResponseError for Error {
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
let mut buf = Vec::new();
|
||||
templates::p500(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed)).unwrap();
|
||||
HttpResponse::InternalServerError()
|
||||
.content_type("text/html")
|
||||
.body(buf)
|
||||
}
|
||||
|
||||
fn render_response(&self) -> HttpResponse {
|
||||
self.error_response()
|
||||
match self {
|
||||
Error::BranchNotFound => {
|
||||
templates::p404_no_master(
|
||||
&mut buf,
|
||||
VERSION_INFO,
|
||||
REPO_COUNT.load(Ordering::Relaxed),
|
||||
)
|
||||
.unwrap();
|
||||
HttpResponse::NotFound().content_type("text/html").body(buf)
|
||||
}
|
||||
_ => {
|
||||
templates::p500(&mut buf, VERSION_INFO, REPO_COUNT.load(Ordering::Relaxed))
|
||||
.unwrap();
|
||||
HttpResponse::InternalServerError()
|
||||
.content_type("text/html")
|
||||
.body(buf)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -91,3 +105,9 @@ impl From<log4rs::config::Errors> for Error {
|
||||
Error::LogBuilder(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::num::ParseIntError> for Error {
|
||||
fn from(err: std::num::ParseIntError) -> Self {
|
||||
Error::Parse(err)
|
||||
}
|
||||
}
|
||||
|
327
src/main.rs
327
src/main.rs
@ -15,26 +15,30 @@ mod count;
|
||||
mod error;
|
||||
mod service;
|
||||
mod statics;
|
||||
mod template;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
cache::CacheState,
|
||||
error::{Error, Result},
|
||||
service::{Bitbucket, FormService, GitHub, Gitlab, Service},
|
||||
statics::{CLIENT, CSS, FAVICON, OPT, REPO_COUNT, VERSION_INFO},
|
||||
template::RepoInfo,
|
||||
};
|
||||
use actix_web::{
|
||||
error::ErrorBadRequest,
|
||||
http::header::{CacheControl, CacheDirective, Expires},
|
||||
middleware, web, App, HttpResponse, HttpServer,
|
||||
http::header::{CacheControl, CacheDirective, Expires, LOCATION},
|
||||
middleware::{self, normalize::TrailingSlash},
|
||||
web, App, HttpResponse, HttpServer, Responder,
|
||||
};
|
||||
use badge::{Badge, BadgeOptions};
|
||||
use bytes::Bytes;
|
||||
use futures::{unsync::mpsc, Future, Stream};
|
||||
use git2::Repository;
|
||||
use number_prefix::{NumberPrefix, Prefixed, Standalone};
|
||||
use git2::{BranchType, Repository};
|
||||
use number_prefix::NumberPrefix;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
fs::create_dir_all,
|
||||
io,
|
||||
path::Path,
|
||||
process::Command,
|
||||
sync::atomic::Ordering,
|
||||
@ -51,7 +55,8 @@ struct GeneratorForm<'a> {
|
||||
repo: Cow<'a, str>,
|
||||
}
|
||||
|
||||
struct State {
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct State {
|
||||
repos: String,
|
||||
cache: String,
|
||||
}
|
||||
@ -59,7 +64,14 @@ struct State {
|
||||
#[derive(Serialize)]
|
||||
struct JsonResponse<'a> {
|
||||
head: &'a str,
|
||||
branch: &'a str,
|
||||
count: u64,
|
||||
commits: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BranchQuery {
|
||||
branch: Option<String>,
|
||||
}
|
||||
|
||||
fn pull(path: impl AsRef<Path>) -> Result<()> {
|
||||
@ -69,17 +81,18 @@ fn pull(path: impl AsRef<Path>) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
||||
fn hoc(repo: &str, repo_dir: &str, cache_dir: &str, branch: &str) -> Result<(u64, String, u64)> {
|
||||
let repo_dir = format!("{}/{}", repo_dir, repo);
|
||||
let cache_dir = format!("{}/{}.json", cache_dir, repo);
|
||||
let cache_dir = Path::new(&cache_dir);
|
||||
let head = format!(
|
||||
"{}",
|
||||
Repository::open_bare(&repo_dir)?
|
||||
.head()?
|
||||
.target()
|
||||
.ok_or(Error::Internal)?
|
||||
);
|
||||
let repo = Repository::open_bare(&repo_dir)?;
|
||||
// TODO: do better...
|
||||
let head = repo
|
||||
.find_branch(branch, BranchType::Local)
|
||||
.map_err(|_| Error::BranchNotFound)?
|
||||
.into_reference();
|
||||
let head = format!("{}", head.target().ok_or(Error::BranchNotFound)?);
|
||||
let mut arg_commit_count = vec!["rev-list".to_string(), "--count".to_string()];
|
||||
let mut arg = vec![
|
||||
"log".to_string(),
|
||||
"--pretty=tformat:".to_string(),
|
||||
@ -92,18 +105,21 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
||||
"-M".to_string(),
|
||||
"--diff-filter=ACDM".to_string(),
|
||||
];
|
||||
let cache = CacheState::read_from_file(&cache_dir, &head)?;
|
||||
let cache = CacheState::read_from_file(&cache_dir, branch, &head)?;
|
||||
match &cache {
|
||||
CacheState::Current(res) => {
|
||||
CacheState::Current { count, commits, .. } => {
|
||||
info!("Using cache for {}", repo_dir);
|
||||
return Ok((*res, head));
|
||||
return Ok((*count, head, *commits));
|
||||
}
|
||||
CacheState::Old(cache) => {
|
||||
CacheState::Old { head, .. } => {
|
||||
info!("Updating cache for {}", repo_dir);
|
||||
arg.push(format!("{}..HEAD", cache.head));
|
||||
arg.push(format!("{}..{}", head, branch));
|
||||
arg_commit_count.push(format!("{}..{}", head, branch));
|
||||
}
|
||||
CacheState::No => {
|
||||
CacheState::No | CacheState::NoneForBranch(..) => {
|
||||
info!("Creating cache for {}", repo_dir);
|
||||
arg.push(branch.to_string());
|
||||
arg_commit_count.push(branch.to_string());
|
||||
}
|
||||
};
|
||||
arg.push("--".to_string());
|
||||
@ -114,6 +130,13 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
||||
.output()?
|
||||
.stdout;
|
||||
let output = String::from_utf8_lossy(&output);
|
||||
let output_commits = Command::new("git")
|
||||
.args(&arg_commit_count)
|
||||
.current_dir(&repo_dir)
|
||||
.output()?
|
||||
.stdout;
|
||||
let output_commits = String::from_utf8_lossy(&output_commits);
|
||||
let commits: u64 = output_commits.trim().parse()?;
|
||||
let count: u64 = output
|
||||
.lines()
|
||||
.map(|s| {
|
||||
@ -125,19 +148,21 @@ fn hoc(repo: &str, repo_dir: &str, cache_dir: &str) -> Result<(u64, String)> {
|
||||
})
|
||||
.sum();
|
||||
|
||||
let cache = cache.calculate_new_cache(count, (&head).into());
|
||||
let cache = cache.calculate_new_cache(count, commits, (&head).into(), branch);
|
||||
cache.write_to_file(cache_dir)?;
|
||||
|
||||
Ok((cache.count, head))
|
||||
Ok((count, head, commits))
|
||||
}
|
||||
|
||||
fn remote_exists(url: &str) -> Result<bool> {
|
||||
Ok(CLIENT.head(url).send()?.status() == reqwest::StatusCode::OK)
|
||||
async fn remote_exists(url: &str) -> Result<bool> {
|
||||
let resp = CLIENT.head(url).send().await?;
|
||||
Ok(resp.status() == reqwest::StatusCode::OK)
|
||||
}
|
||||
|
||||
enum HocResult {
|
||||
Hoc {
|
||||
hoc: u64,
|
||||
commits: u64,
|
||||
hoc_pretty: String,
|
||||
head: String,
|
||||
url: String,
|
||||
@ -147,71 +172,120 @@ enum HocResult {
|
||||
NotFound,
|
||||
}
|
||||
|
||||
fn handle_hoc_request<T, F>(
|
||||
async fn delete_repo_and_cache<T>(
|
||||
state: web::Data<Arc<State>>,
|
||||
data: web::Path<(String, String)>,
|
||||
) -> Result<impl Responder>
|
||||
where
|
||||
T: Service,
|
||||
{
|
||||
let data = data.into_inner();
|
||||
let repo = format!(
|
||||
"{}/{}/{}",
|
||||
T::domain(),
|
||||
data.0.to_lowercase(),
|
||||
data.1.to_lowercase()
|
||||
);
|
||||
info!("Deleting cache and repository for {}", repo);
|
||||
let cache_dir = format!("{}/{}.json", &state.cache, repo);
|
||||
let repo_dir = format!("{}/{}", &state.repos, repo);
|
||||
std::fs::remove_file(&cache_dir).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
std::fs::remove_dir_all(&repo_dir).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
REPO_COUNT.fetch_sub(1, Ordering::Relaxed);
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.header(
|
||||
LOCATION,
|
||||
format!("/view/{}/{}/{}", T::url_path(), data.0, data.1),
|
||||
)
|
||||
.finish())
|
||||
}
|
||||
|
||||
async fn handle_hoc_request<T, F>(
|
||||
state: web::Data<Arc<State>>,
|
||||
data: web::Path<(String, String)>,
|
||||
branch: &str,
|
||||
mapper: F,
|
||||
) -> impl Future<Item = HttpResponse, Error = Error>
|
||||
) -> Result<HttpResponse>
|
||||
where
|
||||
T: Service,
|
||||
F: Fn(HocResult) -> Result<HttpResponse>,
|
||||
{
|
||||
futures::future::result(Ok(()))
|
||||
.and_then(move |_| {
|
||||
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
||||
let service_path = format!("{}/{}", T::domain(), repo);
|
||||
let path = format!("{}/{}", state.repos, service_path);
|
||||
let file = Path::new(&path);
|
||||
let url = format!("https://{}", service_path);
|
||||
if !file.exists() {
|
||||
if !remote_exists(&url)? {
|
||||
warn!("Repository does not exist: {}", url);
|
||||
return Ok(HocResult::NotFound);
|
||||
}
|
||||
info!("Cloning {} for the first time", url);
|
||||
create_dir_all(file)?;
|
||||
let repo = Repository::init_bare(file)?;
|
||||
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
||||
repo.remote_set_url("origin", &url)?;
|
||||
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
pull(&path)?;
|
||||
let (hoc, head) = hoc(&service_path, &state.repos, &state.cache)?;
|
||||
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
||||
Standalone(hoc) => hoc.to_string(),
|
||||
Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
||||
};
|
||||
Ok(HocResult::Hoc {
|
||||
hoc,
|
||||
hoc_pretty,
|
||||
head,
|
||||
url,
|
||||
repo,
|
||||
service_path,
|
||||
})
|
||||
})
|
||||
.and_then(mapper)
|
||||
let data = data.into_inner();
|
||||
let repo = format!("{}/{}", data.0.to_lowercase(), data.1.to_lowercase());
|
||||
let service_path = format!("{}/{}", T::url_path(), repo);
|
||||
let service_url = format!("{}/{}", T::domain(), repo);
|
||||
let path = format!("{}/{}", state.repos, service_url);
|
||||
let url = format!("https://{}", service_url);
|
||||
let remote_exists = remote_exists(&url).await?;
|
||||
let file = Path::new(&path);
|
||||
if !file.exists() {
|
||||
if !remote_exists {
|
||||
warn!("Repository does not exist: {}", url);
|
||||
return mapper(HocResult::NotFound);
|
||||
}
|
||||
info!("Cloning {} for the first time", url);
|
||||
create_dir_all(file)?;
|
||||
let repo = Repository::init_bare(file)?;
|
||||
repo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")?;
|
||||
repo.remote_set_url("origin", &url)?;
|
||||
REPO_COUNT.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
pull(&path)?;
|
||||
let (hoc, head, commits) = hoc(&service_url, &state.repos, &state.cache, branch)?;
|
||||
let hoc_pretty = match NumberPrefix::decimal(hoc as f64) {
|
||||
NumberPrefix::Standalone(hoc) => hoc.to_string(),
|
||||
NumberPrefix::Prefixed(prefix, hoc) => format!("{:.1}{}", hoc, prefix),
|
||||
};
|
||||
let res = HocResult::Hoc {
|
||||
hoc,
|
||||
commits,
|
||||
hoc_pretty,
|
||||
head,
|
||||
url,
|
||||
repo,
|
||||
service_path,
|
||||
};
|
||||
mapper(res)
|
||||
}
|
||||
|
||||
fn json_hoc<T: Service>(
|
||||
pub(crate) async fn json_hoc<T: Service>(
|
||||
state: web::Data<Arc<State>>,
|
||||
data: web::Path<(String, String)>,
|
||||
) -> impl Future<Item = HttpResponse, Error = Error> {
|
||||
branch: web::Query<BranchQuery>,
|
||||
) -> Result<HttpResponse> {
|
||||
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||
let mapper = |r| match r {
|
||||
HocResult::NotFound => p404(),
|
||||
HocResult::Hoc { hoc, head, .. } => Ok(HttpResponse::Ok().json(JsonResponse {
|
||||
HocResult::Hoc {
|
||||
hoc, head, commits, ..
|
||||
} => Ok(HttpResponse::Ok().json(JsonResponse {
|
||||
branch,
|
||||
head: &head,
|
||||
count: hoc,
|
||||
commits,
|
||||
})),
|
||||
};
|
||||
handle_hoc_request::<T, _>(state, data, mapper)
|
||||
handle_hoc_request::<T, _>(state, data, branch, mapper).await
|
||||
}
|
||||
|
||||
fn calculate_hoc<T: Service>(
|
||||
pub(crate) async fn calculate_hoc<T: Service>(
|
||||
state: web::Data<Arc<State>>,
|
||||
data: web::Path<(String, String)>,
|
||||
) -> impl Future<Item = HttpResponse, Error = Error> {
|
||||
let mapper = |r| match r {
|
||||
branch: web::Query<BranchQuery>,
|
||||
) -> Result<HttpResponse> {
|
||||
let mapper = move |r| match r {
|
||||
HocResult::NotFound => p404(),
|
||||
HocResult::Hoc { hoc_pretty, .. } => {
|
||||
let badge_opt = BadgeOptions {
|
||||
@ -220,9 +294,8 @@ fn calculate_hoc<T: Service>(
|
||||
status: hoc_pretty,
|
||||
};
|
||||
let badge = Badge::new(badge_opt)?;
|
||||
|
||||
let (tx, rx_body) = mpsc::unbounded();
|
||||
let _ = tx.unbounded_send(Bytes::from(badge.to_svg().as_bytes()));
|
||||
// TODO: remove clone
|
||||
let body = badge.to_svg().as_bytes().to_vec();
|
||||
|
||||
let expiration = SystemTime::now() + Duration::from_secs(30);
|
||||
Ok(HttpResponse::Ok()
|
||||
@ -234,20 +307,24 @@ fn calculate_hoc<T: Service>(
|
||||
CacheDirective::NoCache,
|
||||
CacheDirective::NoStore,
|
||||
]))
|
||||
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
||||
.body(body))
|
||||
}
|
||||
};
|
||||
handle_hoc_request::<T, _>(state, data, mapper)
|
||||
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||
handle_hoc_request::<T, _>(state, data, branch, mapper).await
|
||||
}
|
||||
|
||||
fn overview<T: Service>(
|
||||
async fn overview<T: Service>(
|
||||
state: web::Data<Arc<State>>,
|
||||
data: web::Path<(String, String)>,
|
||||
) -> impl Future<Item = HttpResponse, Error = Error> {
|
||||
branch: web::Query<BranchQuery>,
|
||||
) -> Result<HttpResponse> {
|
||||
let branch = branch.branch.as_deref().unwrap_or("master");
|
||||
let mapper = |r| match r {
|
||||
HocResult::NotFound => p404(),
|
||||
HocResult::Hoc {
|
||||
hoc,
|
||||
commits,
|
||||
hoc_pretty,
|
||||
url,
|
||||
head,
|
||||
@ -255,32 +332,32 @@ fn overview<T: Service>(
|
||||
service_path,
|
||||
} => {
|
||||
let mut buf = Vec::new();
|
||||
let repo_info = RepoInfo {
|
||||
commit_url: &T::commit_url(&repo, &head),
|
||||
commits,
|
||||
domain: &OPT.domain,
|
||||
head: &head,
|
||||
hoc,
|
||||
hoc_pretty: &hoc_pretty,
|
||||
path: &service_path,
|
||||
url: &url,
|
||||
branch,
|
||||
};
|
||||
templates::overview(
|
||||
&mut buf,
|
||||
VERSION_INFO,
|
||||
REPO_COUNT.load(Ordering::Relaxed),
|
||||
&OPT.domain,
|
||||
&service_path,
|
||||
&url,
|
||||
hoc,
|
||||
&hoc_pretty,
|
||||
&head,
|
||||
&T::commit_url(&repo, &head),
|
||||
repo_info,
|
||||
)?;
|
||||
|
||||
let (tx, rx_body) = mpsc::unbounded();
|
||||
let _ = tx.unbounded_send(Bytes::from(buf));
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("text/html")
|
||||
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||
}
|
||||
};
|
||||
handle_hoc_request::<T, _>(state, data, mapper)
|
||||
handle_hoc_request::<T, _>(state, data, branch, mapper).await
|
||||
}
|
||||
|
||||
#[get("/")]
|
||||
fn index() -> Result<HttpResponse> {
|
||||
async fn index() -> Result<HttpResponse> {
|
||||
let mut buf = Vec::new();
|
||||
templates::index(
|
||||
&mut buf,
|
||||
@ -292,7 +369,7 @@ fn index() -> Result<HttpResponse> {
|
||||
}
|
||||
|
||||
#[post("/generate")]
|
||||
fn generate(params: web::Form<GeneratorForm>) -> Result<HttpResponse> {
|
||||
async fn generate(params: web::Form<GeneratorForm<'_>>) -> Result<HttpResponse> {
|
||||
let repo = format!("{}/{}", params.user, params.repo);
|
||||
let mut buf = Vec::new();
|
||||
templates::generate(
|
||||
@ -304,12 +381,8 @@ fn generate(params: web::Form<GeneratorForm>) -> Result<HttpResponse> {
|
||||
params.service.service(),
|
||||
&repo,
|
||||
)?;
|
||||
let (tx, rx_body) = mpsc::unbounded();
|
||||
let _ = tx.unbounded_send(Bytes::from(buf));
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("text/html")
|
||||
.streaming(rx_body.map_err(|_| ErrorBadRequest("bad request"))))
|
||||
Ok(HttpResponse::Ok().content_type("text/html").body(buf))
|
||||
}
|
||||
|
||||
fn p404() -> Result<HttpResponse> {
|
||||
@ -318,43 +391,65 @@ fn p404() -> Result<HttpResponse> {
|
||||
Ok(HttpResponse::NotFound().content_type("text/html").body(buf))
|
||||
}
|
||||
|
||||
#[get("/tacit-css.min.css")]
|
||||
async fn async_p404() -> Result<HttpResponse> {
|
||||
p404()
|
||||
}
|
||||
|
||||
fn css() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("text/css").body(CSS)
|
||||
}
|
||||
|
||||
#[get("/favicon.ico")]
|
||||
fn favicon32() -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("image/png").body(FAVICON)
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
config::init()?;
|
||||
async fn start_server() -> std::io::Result<()> {
|
||||
let interface = format!("{}:{}", OPT.host, OPT.port);
|
||||
let state = Arc::new(State {
|
||||
repos: OPT.outdir.display().to_string(),
|
||||
cache: OPT.cachedir.display().to_string(),
|
||||
});
|
||||
Ok(HttpServer::new(move || {
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(state.clone())
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||
.service(index)
|
||||
.service(css)
|
||||
.service(favicon32)
|
||||
.service(web::resource("/tacit-css.min.css").route(web::get().to(css)))
|
||||
// TODO
|
||||
.service(web::resource("/favicon.ico").route(web::get().to(favicon32)))
|
||||
.service(generate)
|
||||
.service(web::resource("/github/{user}/{repo}").to_async(calculate_hoc::<GitHub>))
|
||||
.service(web::resource("/gitlab/{user}/{repo}").to_async(calculate_hoc::<Gitlab>))
|
||||
.service(web::resource("/bitbucket/{user}/{repo}").to_async(calculate_hoc::<Bitbucket>))
|
||||
.service(web::resource("/github/{user}/{repo}/json").to_async(json_hoc::<GitHub>))
|
||||
.service(web::resource("/gitlab/{user}/{repo}/json").to_async(json_hoc::<Gitlab>))
|
||||
.service(web::resource("/bitbucket/{user}/{repo}/json").to_async(json_hoc::<Bitbucket>))
|
||||
.service(web::resource("/view/github/{user}/{repo}").to_async(overview::<GitHub>))
|
||||
.service(web::resource("/view/gitlab/{user}/{repo}").to_async(overview::<Gitlab>))
|
||||
.service(web::resource("/view/bitbucket/{user}/{repo}").to_async(overview::<Bitbucket>))
|
||||
.default_service(web::resource("").route(web::get().to_async(p404)))
|
||||
.service(web::resource("/github/{user}/{repo}").to(calculate_hoc::<GitHub>))
|
||||
.service(web::resource("/gitlab/{user}/{repo}").to(calculate_hoc::<Gitlab>))
|
||||
.service(web::resource("/bitbucket/{user}/{repo}").to(calculate_hoc::<Bitbucket>))
|
||||
.service(
|
||||
web::resource("/github/{user}/{repo}/delete")
|
||||
.route(web::post().to(delete_repo_and_cache::<GitHub>)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/gitlab/{user}/{repo}/delete")
|
||||
.route(web::post().to(delete_repo_and_cache::<Gitlab>)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/bitbucket/{user}/{repo}/delete")
|
||||
.route(web::post().to(delete_repo_and_cache::<Bitbucket>)),
|
||||
)
|
||||
.service(web::resource("/github/{user}/{repo}/json").to(json_hoc::<GitHub>))
|
||||
.service(web::resource("/gitlab/{user}/{repo}/json").to(json_hoc::<Gitlab>))
|
||||
.service(web::resource("/bitbucket/{user}/{repo}/json").to(json_hoc::<Bitbucket>))
|
||||
.service(web::resource("/view/github/{user}/{repo}").to(overview::<GitHub>))
|
||||
.service(web::resource("/view/gitlab/{user}/{repo}").to(overview::<Gitlab>))
|
||||
.service(web::resource("/view/bitbucket/{user}/{repo}").to(overview::<Bitbucket>))
|
||||
.default_service(web::resource("").route(web::get().to(async_p404)))
|
||||
})
|
||||
.workers(OPT.workers)
|
||||
.bind(interface)?
|
||||
.run()?)
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
config::init().await.unwrap();
|
||||
start_server().await
|
||||
}
|
||||
|
11
src/template.rs
Normal file
11
src/template.rs
Normal file
@ -0,0 +1,11 @@
|
||||
pub struct RepoInfo<'a> {
|
||||
pub commit_url: &'a str,
|
||||
pub commits: u64,
|
||||
pub domain: &'a str,
|
||||
pub head: &'a str,
|
||||
pub hoc: u64,
|
||||
pub hoc_pretty: &'a str,
|
||||
pub path: &'a str,
|
||||
pub url: &'a str,
|
||||
pub branch: &'a str,
|
||||
}
|
66
src/tests.rs
Normal file
66
src/tests.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use crate::{
|
||||
calculate_hoc, index, json_hoc,
|
||||
service::{Bitbucket, GitHub, Gitlab, Service},
|
||||
State,
|
||||
};
|
||||
|
||||
use actix_web::{http, test, web, App};
|
||||
use tempfile::tempdir;
|
||||
|
||||
macro_rules! test_app {
|
||||
($path: expr) => {
|
||||
test::init_service(App::new().service($path)).await
|
||||
};
|
||||
($state: expr, $path: expr) => {
|
||||
test::init_service(App::new().data($state).service($path)).await
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! test_service {
|
||||
($name: ident, $path: tt, $what: ident) => {
|
||||
async fn $name<T: 'static + Service>(req_path: &str) {
|
||||
let repo_dir = dbg!(tempdir().unwrap());
|
||||
let cache_dir = dbg!(tempdir().unwrap());
|
||||
let repos = format!("{}/", repo_dir.path().display());
|
||||
let cache = format!("{}/", cache_dir.path().display());
|
||||
let state = dbg!(State { repos, cache });
|
||||
|
||||
let mut app = test_app!(state, web::resource($path).to($what::<T>));
|
||||
|
||||
let req = dbg!(test::TestRequest::with_uri(req_path).to_request());
|
||||
let resp = dbg!(test::call_service(&mut app, req).await);
|
||||
|
||||
assert_eq!(resp.status(), http::StatusCode::OK);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_index() {
|
||||
let mut app = test::init_service(App::new().service(index)).await;
|
||||
|
||||
let req = dbg!(test::TestRequest::with_uri("/").to_request());
|
||||
let resp = dbg!(test::call_service(&mut app, req).await);
|
||||
|
||||
assert_eq!(resp.status(), http::StatusCode::OK);
|
||||
}
|
||||
|
||||
// TODO: fix this test
|
||||
// #[actix_rt::test]
|
||||
async fn test_json() {
|
||||
test_service!(test_json_service, "/service/{user}/{repo}/json", json_hoc);
|
||||
|
||||
test_json_service::<Gitlab>("/service/vbrandl/hoc/json").await;
|
||||
test_json_service::<GitHub>("/service/vbrandl/hoc/json").await;
|
||||
test_json_service::<Bitbucket>("/service/vbrandl/hoc/json").await;
|
||||
}
|
||||
|
||||
// TODO: fix this test
|
||||
// #[actix_rt::test]
|
||||
async fn test_badge() {
|
||||
test_service!(test_badge_service, "/service/{user}/{repo}", calculate_hoc);
|
||||
|
||||
test_badge_service::<Gitlab>("/service/vbrandl/hoc").await;
|
||||
test_badge_service::<GitHub>("/service/vbrandl/hoc").await;
|
||||
test_badge_service::<Bitbucket>("/service/vbrandl/hoc").await;
|
||||
}
|
@ -46,17 +46,24 @@ alt="example badge" /></a>
|
||||
</pre>
|
||||
|
||||
<p>
|
||||
You can also request the HoC as JSON by appending <code>/json</code> to the request path. This will return a JSON
|
||||
object with two fields: <code>count</code> and <code>head</code> with count being the HoC value and head being the
|
||||
commit ref of <code>HEAD</code>. Requesting
|
||||
<a href="https://@domain/github/vbrandl/hoc/json">https://@domain/github/vbrandl/hoc/json</a> might return something
|
||||
along the lines of
|
||||
By default, this service assumes the existence of a branch named <code>master</code>. If no branch with that name exists
|
||||
in your repository or you want a badge for another branch of your repository, just append
|
||||
<code>?branch=<branch-name></code> to the URL.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
You can also request the HoC as JSON by appending <code>/json</code> to the request path. This will return a JSON object
|
||||
with three fields: <code>count</code> (the HoC value), <code>commits</code> (the number of commits) and
|
||||
<code>head</code> (the commit ref of HEAD). Requesting <a
|
||||
href="https://@domain/github/vbrandl/hoc/json">https://@domain/github/vbrandl/hoc/json</a> might return something along
|
||||
the lines of
|
||||
</p>
|
||||
|
||||
<pre>
|
||||
{
|
||||
"head" : "05736ee3ba256ec9a7227c436aef2bf43db109ab",
|
||||
"count": 7582
|
||||
"head": "1f01c3b964b018fb0c0c2c5b572bf4ace2968546",
|
||||
"count": 8324,
|
||||
"commits": 223
|
||||
}
|
||||
</pre>
|
||||
|
||||
|
@ -1,12 +1,17 @@
|
||||
@use super::base;
|
||||
@use crate::statics::VersionInfo;
|
||||
@use crate::template::RepoInfo;
|
||||
|
||||
@(version_info: VersionInfo, repo_count: usize, domain: &str, path: &str, url: &str, hoc: u64, hoc_pretty: &str, head: &str, commit_url: &str)
|
||||
@(version_info: VersionInfo, repo_count: usize, repo_info: RepoInfo)
|
||||
|
||||
@:base("Hits-of-Code Badges", "Overview", {
|
||||
|
||||
<p>
|
||||
The project <a href="@url">@url</a> has <strong>@hoc_pretty</strong> (exactly @hoc) hits of code at <a href="@commit_url">@head</a>.
|
||||
The project <a href="@repo_info.url">@repo_info.url</a> has
|
||||
<strong>@repo_info.hoc_pretty</strong> (exactly @repo_info.hoc) hits of code at
|
||||
<a href="@repo_info.commit_url">@repo_info.head</a> on the
|
||||
<code>@repo_info.branch</code> branch. The repository contains
|
||||
<strong>@repo_info.commits</strong> commits.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
@ -14,6 +19,11 @@ To include the badge in your readme, use the following markdown:
|
||||
</p>
|
||||
|
||||
<pre>
|
||||
[](https://@domain/view/@path)
|
||||
[](https://@repo_info.domain/view/@repo_info.path?branch=@repo_info.branch)
|
||||
</pre>
|
||||
|
||||
|
||||
<form method="post" action="/@repo_info.path/delete">
|
||||
<button type="submit">Rebuild Cache</button>
|
||||
</form>
|
||||
}, version_info, repo_count)
|
||||
|
16
templates/p404_no_master.rs.html
Normal file
16
templates/p404_no_master.rs.html
Normal file
@ -0,0 +1,16 @@
|
||||
@use super::base;
|
||||
@use crate::statics::VersionInfo;
|
||||
|
||||
@(version_info: VersionInfo, repo_count: usize)
|
||||
|
||||
@:base("Branch not Found - Hits-of-Code Badges", "404 - Branch not Found", {
|
||||
<p>
|
||||
<big>Sorry</big>. I couldn't find the requested branch of your repositroy. Currently this service assumes the
|
||||
extistence of a branch named <code>master</code>. If you'd like to request a badge for another branch, you can do so by
|
||||
attaching <code>?branch=<branch-name></code> to the request.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
If you think, this is a mistake on my side, please <a href="mailto:mail+hoc@@vbrandl.net">drop me a mail</a>.
|
||||
</p>
|
||||
}, version_info, repo_count)
|
80
vm.nix
Normal file
80
vm.nix
Normal file
@ -0,0 +1,80 @@
|
||||
# Nix configuration for a VM to run a custom configured Vim
|
||||
#
|
||||
# It is intended as an example of building a VM that builds Vim for testing
|
||||
# and evaluation purposes. It does not represent a production or secure
|
||||
# deployment.
|
||||
|
||||
{ sources ? import ./nix/sources.nix
|
||||
, pkgs ? import sources.nixpkgs { }
|
||||
, callPackage ? pkgs.callPackage
|
||||
, config
|
||||
, lib
|
||||
, ...
|
||||
}:
|
||||
# config, pkgs, lib, ... }:
|
||||
|
||||
let
|
||||
hoc = pkgs.callPackage ./default.nix { };
|
||||
|
||||
# hoc = cargoNix.rootCrate.build;
|
||||
in
|
||||
{
|
||||
environment = {
|
||||
systemPackages = with pkgs; [
|
||||
(
|
||||
hoc
|
||||
# import ./default.nix
|
||||
)
|
||||
];
|
||||
};
|
||||
|
||||
|
||||
networking.hostName = "hoc"; # Define your hostname.
|
||||
|
||||
system.stateVersion = "19.09"; # The version of NixOS originally installed
|
||||
|
||||
# Set security options:
|
||||
security = {
|
||||
sudo = {
|
||||
enable = true; # Enable sudo
|
||||
wheelNeedsPassword = false; # Allow wheel members to run sudo without a passowrd
|
||||
};
|
||||
};
|
||||
|
||||
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||
|
||||
# List services that you want to enable:
|
||||
services.openssh = {
|
||||
enable = true; # Enable the OpenSSH daemon.
|
||||
#permitRootLogin = "yes"; # Probably want to change this in production
|
||||
#challengeResponseAuthentication = true; # Probably want to change this in production
|
||||
#passwordAuthentication = true; # Probably want to change this in production
|
||||
openFirewall = true;
|
||||
hostKeys = [
|
||||
{
|
||||
path = "/etc/ssh/ssh_host_ed25519_key"; # Generate a key for the vm
|
||||
type = "ed25519"; # Use the current best key type
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
# Users of the Vim VM:
|
||||
users.mutableUsers = false; # Remove any users not defined in here
|
||||
|
||||
users.users.root = {
|
||||
password = "123456"; # Probably want to change this in production
|
||||
};
|
||||
|
||||
# Misc groups:
|
||||
users.groups.nixos.gid = 1000;
|
||||
|
||||
# NixOS users
|
||||
users.users.nixos = {
|
||||
isNormalUser = true;
|
||||
uid = 1000;
|
||||
group = "nixos";
|
||||
extraGroups = [ "wheel" ];
|
||||
password = "123456"; # Probably want to change this in production
|
||||
};
|
||||
|
||||
}
|
Reference in New Issue
Block a user