Compare commits
184 commits
Author | SHA1 | Date | |
---|---|---|---|
c3cb6f6298 | |||
b600518091 | |||
55d7d3eb5a | |||
c7fdb1acc5 | |||
5efb1f4344 | |||
ca8b9f00a0 | |||
c270a24de8 | |||
0f4ff679d2 | |||
e28cf9be3f | |||
1dac706aeb | |||
29916c0841 | |||
3c9587bd4c | |||
7dcd2c6a4c | |||
5a98e8205f | |||
2183c81d70 | |||
4ab2f709a9 | |||
269a37c920 | |||
6ac1133486 | |||
c5c51645ba | |||
fe58c295ff | |||
f356463079 | |||
caa7c1165f | |||
7dca0efdf2 | |||
5f2ec1089f | |||
ba2babb0ae | |||
f9da4aecd4 | |||
2d00dd3818 | |||
2daf620a4a | |||
316f2bf576 | |||
3d660314cf | |||
f207cbe859 | |||
c589fb40c3 | |||
08ea3271c1 | |||
8787adae30 | |||
130c949723 | |||
e236aa424b | |||
baeb98a2e6 | |||
9b41d7d2a5 | |||
33f8b1570d | |||
06995416d2 | |||
f8848e669e | |||
72843b8f52 | |||
4cdffc20bc | |||
cf66c77136 | |||
5fc53886f2 | |||
881983dd6c | |||
632250d544 | |||
c71a8b418c | |||
873dd980ff | |||
3e1140ffe5 | |||
6c78e8d78e | |||
3ed76c094c | |||
f16c3b9138 | |||
42482574ac | |||
855963bc85 | |||
7cc182b23e | |||
bc62fd7c1d | |||
cdef8078cc | |||
a6f0785dc3 | |||
5d4c0fdb70 | |||
2a2f9ef06c | |||
ef6e955b90 | |||
b581e2adf1 | |||
48034ec6e5 | |||
593bee9024 | |||
09345f2193 | |||
71d1e43ef2 | |||
528aad1d8e | |||
cb7e4d554b | |||
01f53b2bf0 | |||
60662ff1f0 | |||
eaed7b2302 | |||
8a04c2726f | |||
1a88dbaeb2 | |||
c41008f800 | |||
3c18f07a2a | |||
0a5348097d | |||
bdb27d7cb1 | |||
41c30372fb | |||
e99bc52de2 | |||
338e296683 | |||
29f7315f67 | |||
738fa8accf | |||
172076eaad | |||
b775f8e811 | |||
48c2e5be4d | |||
d56af9ecfe | |||
72aa4f365e | |||
d8630aa8cb | |||
5655d7de67 | |||
d2f6c3cd66 | |||
13c9c544a7 | |||
70ed898f1d | |||
2f97008475 | |||
09bd450a46 | |||
a6bb8bf817 | |||
a39fa354e4 | |||
26566fd612 | |||
09be5627c3 | |||
dff29cab77 | |||
c8407a8eb4 | |||
77cb03576d | |||
a47dd0bfac | |||
34ce8a69f6 | |||
ea428d1aef | |||
98aa3c2a97 | |||
0fc17d9150 | |||
e72e3777b8 | |||
2473e6edbc | |||
989ed2a080 | |||
2e8d20f89d | |||
ec27e31336 | |||
e5f7d94f77 | |||
a2735b46b5 | |||
542336867a | |||
70162c83f6 | |||
0fd9a87907 | |||
880d462e80 | |||
40a9d600c9 | |||
8bf62f3ce2 | |||
883f6e6ae7 | |||
4863ebc6df | |||
36b4d55ea8 | |||
faf1f637ab | |||
9b2880c141 | |||
0685c2601a | |||
5e16700652 | |||
ccc1ccba97 | |||
a515c5d8df | |||
7fdf322c73 | |||
5d6fd054ee | |||
2b953c4b75 | |||
c038b68ecb | |||
dcba690961 | |||
99e887513d | |||
4490dfac05 | |||
22ab037b6f | |||
e4451beebf | |||
6a1feb2612 | |||
2ba6277778 | |||
69fde96f67 | |||
f6084449fa | |||
e9d5b87ecc | |||
63a7505724 | |||
502f7cbcdf | |||
7bc103e861 | |||
2a52e66bc0 | |||
bd8badac23 | |||
![]() |
6b4c0c4865 | ||
9f40444846 | |||
faf09c7de1 | |||
657a877168 | |||
ce2692f66d | |||
bea38bc445 | |||
846a0675d1 | |||
750cbbff93 | |||
60aed649b1 | |||
d98c4202f4 | |||
68cf43c539 | |||
![]() |
82b1078a94 | ||
5a9b09bc7e | |||
618670512e | |||
e61913e5a8 | |||
b7fbb65438 | |||
d44d2aa1a7 | |||
8dd8b9e3aa | |||
c81d7af356 | |||
442601f25a | |||
![]() |
51f557c482 | ||
5745e1aaf7 | |||
8242b83dd9 | |||
2a18f6bedb | |||
![]() |
d26caed805 | ||
095bc3bc8c | |||
![]() |
b1e184d186 | ||
26cffb230f | |||
d76722f602 | |||
144d5c45c6 | |||
9fc79a0c7b | |||
965b289484 | |||
ee65b5dcac | |||
a8fcc3bd4c | |||
bd5737e4f2 | |||
43f3c417da |
28 changed files with 2829 additions and 1033 deletions
|
@ -1,2 +1,2 @@
|
||||||
[build]
|
[build]
|
||||||
rustflags = ["-Clink-arg=-fuse-ld=mold", "-Zthreads=16"]
|
rustflags = ["-Clink-arg=-fuse-ld=mold"]
|
||||||
|
|
14
.editorconfig
Normal file
14
.editorconfig
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.{nix,toml,json}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.rs]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
41
.forgejo/workflows/check.yml
Normal file
41
.forgejo/workflows/check.yml
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
on: [push]
|
||||||
|
jobs:
|
||||||
|
check-renovaterc:
|
||||||
|
runs-on: nixos
|
||||||
|
steps:
|
||||||
|
- uses: https://git.salame.cl/actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
- name: Validate renovaterc
|
||||||
|
run: |
|
||||||
|
nix --version
|
||||||
|
nix shell nixpkgs#renovate --command renovate-config-validator
|
||||||
|
build:
|
||||||
|
runs-on: nixos
|
||||||
|
steps:
|
||||||
|
- uses: https://git.salame.cl/actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
- name: Build Package
|
||||||
|
run: |
|
||||||
|
nix --version
|
||||||
|
nix build --print-build-logs .#
|
||||||
|
test:
|
||||||
|
needs: build # we use the built binaries in the checks
|
||||||
|
runs-on: nixos
|
||||||
|
steps:
|
||||||
|
- uses: https://git.salame.cl/actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
nix --version
|
||||||
|
nix-fast-build --max-jobs 2 --no-nom --skip-cached --no-link \
|
||||||
|
--flake ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)"
|
||||||
|
report-size:
|
||||||
|
runs-on: nixos
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: https://git.salame.cl/actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
- run: nix --version
|
||||||
|
- name: Generate size report
|
||||||
|
uses: "https://git.salame.cl/jalil/nix-flake-outputs-size@5c40a31e3e2ed0ea28f8ba68deca41d05fdf2e71" # main
|
||||||
|
with:
|
||||||
|
comment-on-pr: ${{ github.ref_name != 'main' }}
|
||||||
|
generate-artifact: ${{ github.ref_name == 'main' }}
|
||||||
|
do-comparison: true
|
||||||
|
job-name: report-size
|
44
.renovaterc.json
Normal file
44
.renovaterc.json
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"assignees": [
|
||||||
|
"jalil"
|
||||||
|
],
|
||||||
|
"automerge": true,
|
||||||
|
"automergeStrategy": "auto",
|
||||||
|
"automergeType": "pr",
|
||||||
|
"commitBodyTable": true,
|
||||||
|
"dependencyDashboard": true,
|
||||||
|
"extends": [
|
||||||
|
"config:best-practices"
|
||||||
|
],
|
||||||
|
"prCreation": "immediate",
|
||||||
|
"cargo": {
|
||||||
|
"commitMessageTopic": "Rust crate {{depName}}",
|
||||||
|
"fileMatch": [
|
||||||
|
"(^|/)Cargo\\.toml$"
|
||||||
|
],
|
||||||
|
"versioning": "cargo",
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"nix": {
|
||||||
|
"fileMatch": [
|
||||||
|
"(^|/)flake\\.nix$"
|
||||||
|
],
|
||||||
|
"commitMessageTopic": "nixpkgs",
|
||||||
|
"commitMessageExtra": "to {{newValue}}",
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"lockFileMaintenance": {
|
||||||
|
"enabled": true,
|
||||||
|
"recreateWhen": "always",
|
||||||
|
"rebaseWhen": "behind-base-branch",
|
||||||
|
"branchTopic": "lock-file-maintenance",
|
||||||
|
"commitMessageAction": "Lock file maintenance",
|
||||||
|
"schedule": [
|
||||||
|
"* 22 * * *"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"automergeSchedule": [
|
||||||
|
"* 23 * * *"
|
||||||
|
]
|
||||||
|
}
|
139
CHANGELOG.md
Normal file
139
CHANGELOG.md
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
## [0.3.6] - 2025-01-26
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- *(webnsupdate)* Allow running in IPv4/6 only mode
|
||||||
|
- *(module)* Add option for setting --ip-type
|
||||||
|
- *(flake)* Add tests for new allowedIPVersion option
|
||||||
|
|
||||||
|
## [0.3.5] - 2025-01-23
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- *(renovate)* Enable lockFileMaintenance
|
||||||
|
- *(webnsupdate)* Add handling for multiple IPs
|
||||||
|
- Tune compilation for size
|
||||||
|
- *(tests)* Add nginx integration test
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- *(flake)* Switch to github ref
|
||||||
|
- *(renovate)* Switch automergeStrategy to auto
|
||||||
|
- *(ci)* Remove update workflow
|
||||||
|
- *(typos)* Typos caught more typos :3
|
||||||
|
- *(renovate)* Branch creation before automerge
|
||||||
|
- *(renovaterc)* Invalid cron syntax
|
||||||
|
- *(deps)* Update rust crate clap to v4.5.24
|
||||||
|
- *(deps)* Update rust crate tokio to v1.43.0
|
||||||
|
- *(deps)* Update rust crate clap to v4.5.25
|
||||||
|
- *(deps)* Update rust crate clap to v4.5.26
|
||||||
|
- *(flake)* Switch overlay to callPackage
|
||||||
|
- *(deps)* Update rust crate clap to v4.5.27
|
||||||
|
- *(deps)* Update rust crate axum to v0.8.2
|
||||||
|
- *(module)* Test both IPv4 and IPv6
|
||||||
|
|
||||||
|
### 🚜 Refactor
|
||||||
|
|
||||||
|
- Setup renovate to manage dependencies
|
||||||
|
|
||||||
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Update to axum 0.8
|
||||||
|
- Parallelize checks
|
||||||
|
|
||||||
|
## [0.3.4] - 2024-12-26
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- *(main)* Add more logging and default to info
|
||||||
|
|
||||||
|
## [0.3.3] - 2024-12-22
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- *(ci)* Generate package size report
|
||||||
|
- Add git-cliff to generate changelogs
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- *(webnsupdate)* Reduce binary size
|
||||||
|
- *(ci)* Remove tea
|
||||||
|
|
||||||
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
|
- *(flake.lock)* Update inputs
|
||||||
|
- Cargo update
|
||||||
|
- Generate base changelog
|
||||||
|
|
||||||
|
## [0.3.2] - 2024-11-23
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- *(ci)* Check depends on build
|
||||||
|
- Upgrade clap_verbosity_flag
|
||||||
|
- Replace axum-auth with tower_http
|
||||||
|
- Release new version
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- *(clippy)* Enable more lints and fix issues
|
||||||
|
|
||||||
|
### 🚜 Refactor
|
||||||
|
|
||||||
|
- Reorganize main.rs
|
||||||
|
|
||||||
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Cargo update
|
||||||
|
- Update flake inputs
|
||||||
|
|
||||||
|
## [0.3.1] - 2024-10-28
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- Overlay was broken T-T
|
||||||
|
|
||||||
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Next dev version
|
||||||
|
|
||||||
|
## [0.3.0] - 2024-10-28
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- *(ci)* Auto-update rust deps
|
||||||
|
- Refactor and add ip saving
|
||||||
|
- Add -v verbosity flag
|
||||||
|
- Use treefmt-nix and split up flake.nix
|
||||||
|
- Add NixOS VM tests
|
||||||
|
- Switch to crane
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- *(fmt)* Use nixfmt-rfc-style
|
||||||
|
- *(default.nix)* Small issues here and there
|
||||||
|
- *(ci)* Do not use a name when logging in
|
||||||
|
|
||||||
|
### 🚜 Refactor
|
||||||
|
|
||||||
|
- *(flake)* Use flake-parts
|
||||||
|
|
||||||
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Updarte deps
|
||||||
|
- *(flake.lock)* Update inputs
|
||||||
|
- Cargo update
|
||||||
|
- Cargo update
|
||||||
|
- Cargo update
|
||||||
|
|
||||||
|
## [0.2.0] - 2024-06-02
|
||||||
|
|
||||||
|
### 💼 Other
|
||||||
|
|
||||||
|
- Init at version 0.1.0
|
||||||
|
|
||||||
|
<!-- generated by git-cliff -->
|
675
Cargo.lock
generated
675
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
58
Cargo.toml
58
Cargo.toml
|
@ -1,31 +1,49 @@
|
||||||
cargo-features = ["codegen-backend"]
|
|
||||||
[package]
|
[package]
|
||||||
description = "An HTTP server using HTTP basic auth to make secure calls to nsupdate"
|
description = "An HTTP server using HTTP basic auth to make secure calls to nsupdate"
|
||||||
name = "webnsupdate"
|
name = "webnsupdate"
|
||||||
version = "0.2.0"
|
version = "0.3.6"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
license-file = "LICENSE"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = ["dns", "dyndns", "dynamic-ip"]
|
||||||
|
categories = ["networking", "dns", "dyndns"]
|
||||||
|
repository = "https://github.com/jalil-salame/webnsupdate"
|
||||||
|
|
||||||
|
[lints.clippy]
|
||||||
|
cargo = { level = "warn", priority = -2 }
|
||||||
|
multiple_crate_versions = "allow"
|
||||||
|
pedantic = { level = "warn", priority = -1 }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
axum = "0.7.5"
|
axum = "0.8"
|
||||||
axum-client-ip = "0.6.0"
|
axum-client-ip = "1.0"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22"
|
||||||
clap = { version = "4.5.4", features = ["derive", "env"] }
|
clap = { version = "4", features = ["derive", "env"] }
|
||||||
http = "1.1.0"
|
clap-verbosity-flag = { version = "3", default-features = false, features = [
|
||||||
insta = "1.38.0"
|
"tracing",
|
||||||
miette = { version = "7.2.0", features = ["fancy"] }
|
] }
|
||||||
ring = { version = "0.17.8", features = ["std"] }
|
http = "1"
|
||||||
tracing = "0.1.40"
|
humantime = "2.2.0"
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
miette = { version = "7", features = ["fancy"] }
|
||||||
|
ring = { version = "0.17", features = ["std"] }
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
thiserror = "2"
|
||||||
|
tokio = { version = "1", features = ["macros", "rt", "process", "io-util"] }
|
||||||
|
tower-http = { version = "0.6", features = ["validate-request"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
|
|
||||||
[dependencies.axum-auth]
|
[dev-dependencies]
|
||||||
version = "0.7.0"
|
insta = { version = "=1.42.2", features = ["json"] }
|
||||||
default-features = false
|
|
||||||
features = ["auth-basic"]
|
|
||||||
|
|
||||||
[dependencies.tokio]
|
[profile.release]
|
||||||
version = "1.37.0"
|
opt-level = "s"
|
||||||
features = ["macros", "rt", "process", "io-util"]
|
panic = "abort"
|
||||||
|
lto = true
|
||||||
|
strip = true
|
||||||
|
codegen-units = 1
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
debug = 0
|
debug = 0
|
||||||
codegen-backend = "cranelift"
|
|
||||||
|
|
85
cliff.toml
Normal file
85
cliff.toml
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
# git-cliff ~ default configuration file
|
||||||
|
# https://git-cliff.org/docs/configuration
|
||||||
|
#
|
||||||
|
# Lines starting with "#" are comments.
|
||||||
|
# Configuration options are organized into tables and keys.
|
||||||
|
# See documentation for more information on available options.
|
||||||
|
|
||||||
|
[changelog]
|
||||||
|
# template for the changelog header
|
||||||
|
header = """
|
||||||
|
# Changelog\n
|
||||||
|
All notable changes to this project will be documented in this file.\n
|
||||||
|
"""
|
||||||
|
# template for the changelog body
|
||||||
|
# https://keats.github.io/tera/docs/#introduction
|
||||||
|
body = """
|
||||||
|
{% if version %}\
|
||||||
|
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||||
|
{% else %}\
|
||||||
|
## [unreleased]
|
||||||
|
{% endif %}\
|
||||||
|
{% for group, commits in commits | group_by(attribute="group") %}
|
||||||
|
### {{ group | striptags | trim | upper_first }}
|
||||||
|
{% for commit in commits %}
|
||||||
|
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||||
|
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||||
|
{{ commit.message | upper_first }}\
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}\n
|
||||||
|
"""
|
||||||
|
# template for the changelog footer
|
||||||
|
footer = """
|
||||||
|
<!-- generated by git-cliff -->
|
||||||
|
"""
|
||||||
|
# remove the leading and trailing s
|
||||||
|
trim = true
|
||||||
|
# postprocessors
|
||||||
|
postprocessors = [
|
||||||
|
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
|
||||||
|
]
|
||||||
|
# render body even when there are no releases to process
|
||||||
|
# render_always = true
|
||||||
|
# output file path
|
||||||
|
# output = "test.md"
|
||||||
|
|
||||||
|
[git]
|
||||||
|
# parse the commits based on https://www.conventionalcommits.org
|
||||||
|
conventional_commits = true
|
||||||
|
# filter out the commits that are not conventional
|
||||||
|
filter_unconventional = true
|
||||||
|
# process each line of a commit as an individual commit
|
||||||
|
split_commits = false
|
||||||
|
# regex for preprocessing the commit messages
|
||||||
|
commit_preprocessors = [
|
||||||
|
# Replace issue numbers
|
||||||
|
#{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"},
|
||||||
|
# Check spelling of the commit with https://github.com/crate-ci/typos
|
||||||
|
# If the spelling is incorrect, it will be automatically fixed.
|
||||||
|
#{ pattern = '.*', replace_command = 'typos --write-changes -' },
|
||||||
|
]
|
||||||
|
# regex for parsing and grouping commits
|
||||||
|
commit_parsers = [
|
||||||
|
{ message = "^feat", group = "<!-- 0 -->🚀 Features" },
|
||||||
|
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
|
||||||
|
{ message = "^doc", group = "<!-- 3 -->📚 Documentation" },
|
||||||
|
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
|
||||||
|
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
|
||||||
|
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
|
||||||
|
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
|
||||||
|
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||||
|
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||||
|
{ message = "^chore\\(pr\\)", skip = true },
|
||||||
|
{ message = "^chore\\(pull\\)", skip = true },
|
||||||
|
{ message = "^chore: bump version", skip = true },
|
||||||
|
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
|
||||||
|
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
|
||||||
|
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
|
||||||
|
{ message = ".*", group = "<!-- 10 -->💼 Other" },
|
||||||
|
]
|
||||||
|
# filter out the commits that are not matched by commit parsers
|
||||||
|
filter_commits = false
|
||||||
|
# sort the tags topologically
|
||||||
|
topo_order = false
|
||||||
|
# sort the commits inside sections by oldest/newest order
|
||||||
|
sort_commits = "oldest"
|
48
default.nix
48
default.nix
|
@ -1,25 +1,37 @@
|
||||||
{
|
{
|
||||||
lib,
|
pkgs ?
|
||||||
rustPlatform,
|
(builtins.getFlake (builtins.toString ./.)).inputs.nixpkgs.legacyPackages.${builtins.currentSystem},
|
||||||
}: let
|
lib ? pkgs.lib,
|
||||||
readToml = path: builtins.fromTOML (builtins.readFile path);
|
crane ? (builtins.getFlake (builtins.toString ./.)).inputs.crane,
|
||||||
cargoToml = readToml ./Cargo.toml;
|
pkgSrc ? ./.,
|
||||||
pname = cargoToml.package.name;
|
mold ? pkgs.mold,
|
||||||
inherit (cargoToml.package) version description;
|
}:
|
||||||
in
|
let
|
||||||
rustPlatform.buildRustPackage {
|
craneLib = crane.mkLib pkgs;
|
||||||
inherit pname version;
|
src = craneLib.cleanCargoSource pkgSrc;
|
||||||
src = builtins.path {
|
|
||||||
path = ./.;
|
commonArgs = {
|
||||||
name = "${pname}-source";
|
inherit src;
|
||||||
};
|
strictDeps = true;
|
||||||
cargoLock.lockFile = ./Cargo.lock;
|
|
||||||
useNextest = true;
|
doCheck = false; # tests will be run in the `checks` derivation
|
||||||
|
NEXTEST_HIDE_PROGRESS_BAR = 1;
|
||||||
|
NEXTEST_FAILURE_OUTPUT = "immediate-final";
|
||||||
|
|
||||||
|
nativeBuildInputs = [ mold ];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
inherit description;
|
|
||||||
license = lib.licenses.mit;
|
license = lib.licenses.mit;
|
||||||
homepage = "https://github.com/jalil-salame/webnsupdate";
|
homepage = "https://github.com/jalil-salame/webnsupdate";
|
||||||
mainProgram = "webnsupdate";
|
mainProgram = "webnsupdate";
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
|
|
||||||
|
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
|
||||||
|
in
|
||||||
|
craneLib.buildPackage (
|
||||||
|
lib.mergeAttrsList [
|
||||||
|
commonArgs
|
||||||
|
{ inherit cargoArtifacts; }
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
41
flake-modules/default.nix
Normal file
41
flake-modules/default.nix
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
{ inputs, ... }:
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
inputs.treefmt-nix.flakeModule
|
||||||
|
./package.nix
|
||||||
|
./tests.nix
|
||||||
|
];
|
||||||
|
|
||||||
|
flake.nixosModules =
|
||||||
|
let
|
||||||
|
webnsupdate = ../module.nix;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
default = webnsupdate;
|
||||||
|
inherit webnsupdate;
|
||||||
|
};
|
||||||
|
|
||||||
|
perSystem =
|
||||||
|
{ pkgs, ... }:
|
||||||
|
{
|
||||||
|
# Setup formatters
|
||||||
|
treefmt = {
|
||||||
|
projectRootFile = "flake.nix";
|
||||||
|
programs = {
|
||||||
|
nixfmt.enable = true;
|
||||||
|
rustfmt.enable = true;
|
||||||
|
statix.enable = true;
|
||||||
|
typos.enable = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
devShells.default = pkgs.mkShellNoCC {
|
||||||
|
packages = with pkgs; [
|
||||||
|
cargo-insta
|
||||||
|
cargo-udeps
|
||||||
|
mold
|
||||||
|
git-cliff
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
60
flake-modules/package.nix
Normal file
60
flake-modules/package.nix
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
{ inputs, ... }:
|
||||||
|
{
|
||||||
|
flake.overlays.default = final: prev: {
|
||||||
|
webnsupdate = prev.callPackage ../default.nix {
|
||||||
|
inherit (inputs) crane;
|
||||||
|
pkgSrc = inputs.self;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
perSystem =
|
||||||
|
{ pkgs, lib, ... }:
|
||||||
|
let
|
||||||
|
craneLib = inputs.crane.mkLib pkgs;
|
||||||
|
src = craneLib.cleanCargoSource inputs.self;
|
||||||
|
|
||||||
|
commonArgs = {
|
||||||
|
inherit src;
|
||||||
|
strictDeps = true;
|
||||||
|
|
||||||
|
doCheck = false; # tests will be run in the `checks` derivation
|
||||||
|
NEXTEST_HIDE_PROGRESS_BAR = 1;
|
||||||
|
NEXTEST_FAILURE_OUTPUT = "immediate-final";
|
||||||
|
|
||||||
|
nativeBuildInputs = [ pkgs.mold ];
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
license = lib.licenses.mit;
|
||||||
|
homepage = "https://github.com/jalil-salame/webnsupdate";
|
||||||
|
mainProgram = "webnsupdate";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
|
||||||
|
withArtifacts = lib.mergeAttrsList [
|
||||||
|
commonArgs
|
||||||
|
{ inherit cargoArtifacts; }
|
||||||
|
];
|
||||||
|
webnsupdate = pkgs.callPackage ../default.nix {
|
||||||
|
inherit (inputs) crane;
|
||||||
|
pkgSrc = inputs.self;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
checks = {
|
||||||
|
nextest = craneLib.cargoNextest withArtifacts;
|
||||||
|
clippy = craneLib.cargoClippy (
|
||||||
|
lib.mergeAttrsList [
|
||||||
|
withArtifacts
|
||||||
|
{ cargoClippyExtraArgs = "--all-targets -- --deny warnings"; }
|
||||||
|
]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
packages = {
|
||||||
|
inherit webnsupdate;
|
||||||
|
inherit (pkgs) git-cliff;
|
||||||
|
default = webnsupdate;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
343
flake-modules/tests.nix
Normal file
343
flake-modules/tests.nix
Normal file
|
@ -0,0 +1,343 @@
|
||||||
|
{ self, ... }:
|
||||||
|
{
|
||||||
|
perSystem =
|
||||||
|
{ pkgs, self', ... }:
|
||||||
|
{
|
||||||
|
checks =
|
||||||
|
let
|
||||||
|
testDomain = "webnstest.example";
|
||||||
|
lastIPPath = "/var/lib/webnsupdate/last-ip.json";
|
||||||
|
|
||||||
|
zoneFile = pkgs.writeText "${testDomain}.zoneinfo" ''
|
||||||
|
$TTL 600 ; 10 minutes
|
||||||
|
$ORIGIN ${testDomain}.
|
||||||
|
@ IN SOA ns1.${testDomain}. admin.${testDomain}. (
|
||||||
|
1 ; serial
|
||||||
|
6h ; refresh
|
||||||
|
1h ; retry
|
||||||
|
1w ; expire
|
||||||
|
1d) ; negative caching TTL
|
||||||
|
|
||||||
|
IN NS ns1.${testDomain}.
|
||||||
|
@ IN A 127.0.0.1
|
||||||
|
ns1 IN A 127.0.0.1
|
||||||
|
nsupdate IN A 127.0.0.1
|
||||||
|
@ IN AAAA ::1
|
||||||
|
ns1 IN AAAA ::1
|
||||||
|
nsupdate IN AAAA ::1
|
||||||
|
'';
|
||||||
|
|
||||||
|
bindDynamicZone =
|
||||||
|
{ config, ... }:
|
||||||
|
let
|
||||||
|
bindCfg = config.services.bind;
|
||||||
|
bindData = bindCfg.directory;
|
||||||
|
dynamicZonesDir = "${bindData}/zones";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
services.bind.zones.${testDomain} = {
|
||||||
|
master = true;
|
||||||
|
file = "${dynamicZonesDir}/${testDomain}";
|
||||||
|
extraConfig = ''
|
||||||
|
allow-update { key rndc-key; };
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.services.bind.preStart = ''
|
||||||
|
# shellcheck disable=SC2211,SC1127
|
||||||
|
rm -f ${dynamicZonesDir}/* # reset dynamic zones
|
||||||
|
|
||||||
|
# create a dynamic zones dir
|
||||||
|
mkdir -m 0755 -p ${dynamicZonesDir}
|
||||||
|
# copy dynamic zone's file to the dynamic zones dir
|
||||||
|
cp ${zoneFile} ${dynamicZonesDir}/${testDomain}
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
webnsupdate-ipv4-machine =
|
||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
bindDynamicZone
|
||||||
|
self.nixosModules.webnsupdate
|
||||||
|
];
|
||||||
|
|
||||||
|
config = {
|
||||||
|
environment.systemPackages = [
|
||||||
|
pkgs.dig
|
||||||
|
pkgs.curl
|
||||||
|
];
|
||||||
|
|
||||||
|
services = {
|
||||||
|
bind.enable = true;
|
||||||
|
|
||||||
|
webnsupdate = {
|
||||||
|
enable = true;
|
||||||
|
package = self'.packages.webnsupdate;
|
||||||
|
extraArgs = [ "-vvv" ]; # debug messages
|
||||||
|
settings = {
|
||||||
|
address = lib.mkDefault "127.0.0.1:5353";
|
||||||
|
key_file = "/etc/bind/rndc.key";
|
||||||
|
password_file = pkgs.writeText "webnsupdate.pass" "FQoNmuU1BKfg8qsU96F6bK5ykp2b0SLe3ZpB3nbtfZA"; # test:test
|
||||||
|
ip_source = lib.mkDefault "ConnectInfo";
|
||||||
|
records = [
|
||||||
|
"test1.${testDomain}."
|
||||||
|
"test2.${testDomain}."
|
||||||
|
"test3.${testDomain}."
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
webnsupdate-ipv6-machine = {
|
||||||
|
imports = [
|
||||||
|
webnsupdate-ipv4-machine
|
||||||
|
];
|
||||||
|
|
||||||
|
config.services.webnsupdate.settings.address = "[::1]:5353";
|
||||||
|
};
|
||||||
|
|
||||||
|
webnsupdate-nginx-machine =
|
||||||
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
webnsupdate-ipv4-machine
|
||||||
|
];
|
||||||
|
|
||||||
|
config.services = {
|
||||||
|
# Use default IP Source
|
||||||
|
webnsupdate.settings.ip_source = "RightmostXForwardedFor";
|
||||||
|
|
||||||
|
nginx = {
|
||||||
|
enable = true;
|
||||||
|
recommendedProxySettings = true;
|
||||||
|
|
||||||
|
virtualHosts.webnsupdate.locations."/".proxyPass =
|
||||||
|
"http://${config.services.webnsupdate.settings.address}";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
webnsupdate-ipv4-only-machine = {
|
||||||
|
imports = [ webnsupdate-nginx-machine ];
|
||||||
|
config.services.webnsupdate.settings.ip_type = "Ipv4Only";
|
||||||
|
};
|
||||||
|
|
||||||
|
webnsupdate-ipv6-only-machine = {
|
||||||
|
imports = [ webnsupdate-nginx-machine ];
|
||||||
|
config.services.webnsupdate.settings.ip_type = "Ipv6Only";
|
||||||
|
};
|
||||||
|
|
||||||
|
# "A" for IPv4, "AAAA" for IPv6, "ANY" for any
|
||||||
|
testTemplate =
|
||||||
|
{
|
||||||
|
ipv4 ? false,
|
||||||
|
ipv6 ? false,
|
||||||
|
nginx ? false,
|
||||||
|
exclusive ? false,
|
||||||
|
}:
|
||||||
|
if exclusive && (ipv4 == ipv6) then
|
||||||
|
builtins.throw "exclusive means one of ipv4 or ipv6 must be set, but not both"
|
||||||
|
else
|
||||||
|
''
|
||||||
|
IPV4: bool = ${if ipv4 then "True" else "False"}
|
||||||
|
IPV6: bool = ${if ipv6 then "True" else "False"}
|
||||||
|
NGINX: bool = ${if nginx then "True" else "False"}
|
||||||
|
EXCLUSIVE: bool = ${if exclusive then "True" else "False"}
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
|
||||||
|
CURL: str = "curl --fail --no-progress-meter --show-error"
|
||||||
|
|
||||||
|
machine.start(allow_reboot=True)
|
||||||
|
machine.wait_for_unit("bind.service")
|
||||||
|
machine.wait_for_unit("webnsupdate.service")
|
||||||
|
|
||||||
|
STATIC_DOMAINS: list[str] = ["${testDomain}", "ns1.${testDomain}", "nsupdate.${testDomain}"]
|
||||||
|
DYNAMIC_DOMAINS: list[str] = ["test1.${testDomain}", "test2.${testDomain}", "test3.${testDomain}"]
|
||||||
|
|
||||||
|
def dig_cmd(domain: str, record: str, ip: str | None) -> tuple[str, str]:
|
||||||
|
match_ip = "" if ip is None else f"\\s\\+600\\s\\+IN\\s\\+{record}\\s\\+{ip}$"
|
||||||
|
return f"dig @localhost {record} {domain} +noall +answer", f"grep '^{domain}.{match_ip}'"
|
||||||
|
|
||||||
|
def curl_cmd(domain: str, identity: str, path: str, query: dict[str, str]) -> str:
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
q= f"?{urlencode(query)}" if query else ""
|
||||||
|
return f"{CURL} -u {identity} -X GET 'http://{domain}{"" if NGINX else ":5353"}/{path}{q}'"
|
||||||
|
|
||||||
|
def domain_available(domain: str, record: str, ip: str | None=None):
|
||||||
|
dig, grep = dig_cmd(domain, record, ip)
|
||||||
|
rc, output = machine.execute(dig)
|
||||||
|
print(f"{dig}[{rc}]: {output}")
|
||||||
|
machine.succeed(f"{dig} | {grep}")
|
||||||
|
|
||||||
|
def domain_missing(domain: str, record: str, ip: str | None=None):
|
||||||
|
dig, grep = dig_cmd(domain, record, ip)
|
||||||
|
rc, output = machine.execute(dig)
|
||||||
|
print(f"{dig}[{rc}]: {output}")
|
||||||
|
machine.fail(f"{dig} | {grep}")
|
||||||
|
|
||||||
|
def update_records(domain: str="localhost", /, *, path: str="update", **kwargs):
|
||||||
|
machine.succeed(curl_cmd(domain, "test:test", path, kwargs))
|
||||||
|
machine.succeed("cat ${lastIPPath}")
|
||||||
|
|
||||||
|
def update_records_fail(domain: str="localhost", /, *, identity: str="test:test", path: str="update", **kwargs):
|
||||||
|
machine.fail(curl_cmd(domain, identity, path, kwargs))
|
||||||
|
machine.fail("cat ${lastIPPath}")
|
||||||
|
|
||||||
|
def invalid_update(domain: str="localhost"):
|
||||||
|
update_records_fail(domain, identity="bad_user:test")
|
||||||
|
update_records_fail(domain, identity="test:bad_pass")
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
|
||||||
|
with subtest("static DNS records are available"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
for domain in STATIC_DOMAINS:
|
||||||
|
domain_available(domain, "A", "127.0.0.1") # IPv4
|
||||||
|
domain_available(domain, "AAAA", "::1") # IPv6
|
||||||
|
|
||||||
|
with subtest("dynamic DNS records are missing"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
for domain in DYNAMIC_DOMAINS:
|
||||||
|
domain_missing(domain, "A") # IPv4
|
||||||
|
domain_missing(domain, "AAAA") # IPv6
|
||||||
|
|
||||||
|
with subtest("invalid auth fails to update records"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
invalid_update()
|
||||||
|
for domain in DYNAMIC_DOMAINS:
|
||||||
|
domain_missing(domain, "A") # IPv4
|
||||||
|
domain_missing(domain, "AAAA") # IPv6
|
||||||
|
|
||||||
|
if EXCLUSIVE:
|
||||||
|
with subtest("exclusive IP version fails to update with invalid version"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
if IPV6:
|
||||||
|
update_records_fail("127.0.0.1")
|
||||||
|
if IPV4:
|
||||||
|
update_records_fail("[::1]")
|
||||||
|
|
||||||
|
with subtest("valid auth updates records"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
if IPV4:
|
||||||
|
update_records("127.0.0.1")
|
||||||
|
if IPV6:
|
||||||
|
update_records("[::1]")
|
||||||
|
|
||||||
|
for domain in DYNAMIC_DOMAINS:
|
||||||
|
if IPV4:
|
||||||
|
domain_available(domain, "A", "127.0.0.1")
|
||||||
|
elif IPV6 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "A")
|
||||||
|
|
||||||
|
if IPV6:
|
||||||
|
domain_available(domain, "AAAA", "::1")
|
||||||
|
elif IPV4 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "AAAA")
|
||||||
|
|
||||||
|
with subtest("valid auth fritzbox compatible updates records"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
if IPV4 and IPV6:
|
||||||
|
update_records("127.0.0.1", domain="test", ipv4="1.2.3.4", ipv6="::1234")
|
||||||
|
elif IPV4:
|
||||||
|
update_records("127.0.0.1", ipv4="1.2.3.4", ipv6="")
|
||||||
|
elif IPV6:
|
||||||
|
update_records("[::1]", ipv4="", ipv6="::1234")
|
||||||
|
|
||||||
|
for domain in DYNAMIC_DOMAINS:
|
||||||
|
if IPV4:
|
||||||
|
domain_available(domain, "A", "1.2.3.4")
|
||||||
|
elif IPV6 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "A")
|
||||||
|
|
||||||
|
if IPV6:
|
||||||
|
domain_available(domain, "AAAA", "::1234")
|
||||||
|
elif IPV4 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "AAAA")
|
||||||
|
|
||||||
|
with subtest("valid auth replaces records"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
if IPV4:
|
||||||
|
update_records("127.0.0.1")
|
||||||
|
if IPV6:
|
||||||
|
update_records("[::1]")
|
||||||
|
|
||||||
|
for domain in DYNAMIC_DOMAINS:
|
||||||
|
if IPV4:
|
||||||
|
domain_available(domain, "A", "127.0.0.1")
|
||||||
|
elif IPV6 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "A")
|
||||||
|
|
||||||
|
if IPV6:
|
||||||
|
domain_available(domain, "AAAA", "::1")
|
||||||
|
elif IPV4 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "AAAA")
|
||||||
|
|
||||||
|
machine.reboot()
|
||||||
|
machine.succeed("cat ${lastIPPath}")
|
||||||
|
machine.wait_for_unit("webnsupdate.service")
|
||||||
|
machine.succeed("cat ${lastIPPath}")
|
||||||
|
|
||||||
|
with subtest("static DNS records are available after reboot"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
for domain in STATIC_DOMAINS:
|
||||||
|
domain_available(domain, "A", "127.0.0.1") # IPv4
|
||||||
|
domain_available(domain, "AAAA", "::1") # IPv6
|
||||||
|
|
||||||
|
with subtest("dynamic DNS records are available after reboot"):
|
||||||
|
print(f"{IPV4=} {IPV6=} {EXCLUSIVE=}")
|
||||||
|
for domain in DYNAMIC_DOMAINS:
|
||||||
|
if IPV4:
|
||||||
|
domain_available(domain, "A", "127.0.0.1")
|
||||||
|
elif IPV6 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "A")
|
||||||
|
|
||||||
|
if IPV6:
|
||||||
|
domain_available(domain, "AAAA", "::1")
|
||||||
|
elif IPV4 and EXCLUSIVE:
|
||||||
|
domain_missing(domain, "AAAA")
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
{
|
||||||
|
module-ipv4-test = pkgs.testers.nixosTest {
|
||||||
|
name = "webnsupdate-ipv4-module";
|
||||||
|
nodes.machine = webnsupdate-ipv4-machine;
|
||||||
|
testScript = testTemplate { ipv4 = true; };
|
||||||
|
};
|
||||||
|
module-ipv6-test = pkgs.testers.nixosTest {
|
||||||
|
name = "webnsupdate-ipv6-module";
|
||||||
|
nodes.machine = webnsupdate-ipv6-machine;
|
||||||
|
testScript = testTemplate { ipv6 = true; };
|
||||||
|
};
|
||||||
|
module-nginx-test = pkgs.testers.nixosTest {
|
||||||
|
name = "webnsupdate-nginx-module";
|
||||||
|
nodes.machine = webnsupdate-nginx-machine;
|
||||||
|
testScript = testTemplate {
|
||||||
|
ipv4 = true;
|
||||||
|
ipv6 = true;
|
||||||
|
nginx = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
module-ipv4-only-test = pkgs.testers.nixosTest {
|
||||||
|
name = "webnsupdate-ipv4-only-module";
|
||||||
|
nodes.machine = webnsupdate-ipv4-only-machine;
|
||||||
|
testScript = testTemplate {
|
||||||
|
ipv4 = true;
|
||||||
|
nginx = true;
|
||||||
|
exclusive = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
module-ipv6-only-test = pkgs.testers.nixosTest {
|
||||||
|
name = "webnsupdate-ipv6-only-module";
|
||||||
|
nodes.machine = webnsupdate-ipv6-only-machine;
|
||||||
|
testScript = testTemplate {
|
||||||
|
ipv6 = true;
|
||||||
|
nginx = true;
|
||||||
|
exclusive = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
77
flake.lock
generated
77
flake.lock
generated
|
@ -1,24 +1,63 @@
|
||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"nixpkgs": {
|
"crane": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1716948383,
|
"lastModified": 1743908961,
|
||||||
"narHash": "sha256-SzDKxseEcHR5KzPXLwsemyTR/kaM9whxeiJohbL04rs=",
|
"narHash": "sha256-e1idZdpnnHWuosI3KsBgAgrhMR05T2oqskXCmNzGPq0=",
|
||||||
"owner": "NixOS",
|
"owner": "ipetkov",
|
||||||
"repo": "nixpkgs",
|
"repo": "crane",
|
||||||
"rev": "ad57eef4ef0659193044870c731987a6df5cf56b",
|
"rev": "80ceeec0dc94ef967c371dcdc56adb280328f591",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"id": "nixpkgs",
|
"owner": "ipetkov",
|
||||||
|
"repo": "crane",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-parts": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs-lib": [
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1743550720,
|
||||||
|
"narHash": "sha256-hIshGgKZCgWh6AYJpJmRgFdR3WUbkY04o82X05xqQiY=",
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "flake-parts",
|
||||||
|
"rev": "c621e8422220273271f52058f618c94e405bb0f5",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "flake-parts",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1744098102,
|
||||||
|
"narHash": "sha256-tzCdyIJj9AjysC3OuKA+tMD/kDEDAF9mICPDU7ix0JA=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "c8cd81426f45942bb2906d5ed2fe21d2f19d95b7",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
"ref": "nixos-unstable",
|
"ref": "nixos-unstable",
|
||||||
"type": "indirect"
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
|
"crane": "crane",
|
||||||
|
"flake-parts": "flake-parts",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"systems": "systems"
|
"systems": "systems",
|
||||||
|
"treefmt-nix": "treefmt-nix"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"systems": {
|
"systems": {
|
||||||
|
@ -35,6 +74,26 @@
|
||||||
"repo": "default",
|
"repo": "default",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"treefmt-nix": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1743748085,
|
||||||
|
"narHash": "sha256-uhjnlaVTWo5iD3LXics1rp9gaKgDRQj6660+gbUU3cE=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "treefmt-nix",
|
||||||
|
"rev": "815e4121d6a5d504c0f96e5be2dd7f871e4fd99d",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "treefmt-nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"root": "root",
|
"root": "root",
|
||||||
|
|
50
flake.nix
50
flake.nix
|
@ -1,39 +1,23 @@
|
||||||
{
|
{
|
||||||
description = "An http server that calls nsupdate internally";
|
description = "An http server that calls nsupdate internally";
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "nixpkgs/nixos-unstable";
|
crane.url = "github:ipetkov/crane";
|
||||||
systems.url = "github:nix-systems/default";
|
flake-parts = {
|
||||||
};
|
url = "github:hercules-ci/flake-parts";
|
||||||
|
inputs.nixpkgs-lib.follows = "nixpkgs";
|
||||||
outputs = {
|
};
|
||||||
self,
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||||
nixpkgs,
|
systems.url = "github:nix-systems/default";
|
||||||
systems,
|
treefmt-nix = {
|
||||||
}: let
|
url = "github:numtide/treefmt-nix";
|
||||||
forEachSupportedSystem = nixpkgs.lib.genAttrs (import systems);
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
in {
|
|
||||||
formatter = forEachSupportedSystem (system: nixpkgs.legacyPackages.${system}.alejandra);
|
|
||||||
|
|
||||||
packages = forEachSupportedSystem (system: {
|
|
||||||
default = nixpkgs.legacyPackages.${system}.callPackage ./default.nix {};
|
|
||||||
});
|
|
||||||
|
|
||||||
overlays.default = final: prev: {
|
|
||||||
webnsupdate = final.callPackage ./default.nix {};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
nixosModules.default = ./module.nix;
|
|
||||||
|
|
||||||
devShells = forEachSupportedSystem (system: let
|
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
|
||||||
in {
|
|
||||||
default = pkgs.mkShell {
|
|
||||||
packages = [
|
|
||||||
pkgs.cargo-insta
|
|
||||||
pkgs.cargo-udeps
|
|
||||||
pkgs.mold
|
|
||||||
];
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
inputs:
|
||||||
|
inputs.flake-parts.lib.mkFlake { inherit inputs; } {
|
||||||
|
imports = [ ./flake-modules ];
|
||||||
|
systems = import inputs.systems;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
2
justfile
Normal file
2
justfile
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
changelog version:
|
||||||
|
git cliff --unreleased --prepend=CHANGELOG.md --tag='{{ version }}'
|
206
module.nix
206
module.nix
|
@ -1,15 +1,18 @@
|
||||||
|
{ lib, pkgs, ... }@args:
|
||||||
|
let
|
||||||
|
cfg = args.config.services.webnsupdate;
|
||||||
|
inherit (lib)
|
||||||
|
mkOption
|
||||||
|
mkEnableOption
|
||||||
|
mkPackageOption
|
||||||
|
types
|
||||||
|
;
|
||||||
|
format = pkgs.formats.json { };
|
||||||
|
in
|
||||||
{
|
{
|
||||||
lib,
|
|
||||||
pkgs,
|
|
||||||
config,
|
|
||||||
...
|
|
||||||
}: let
|
|
||||||
cfg = config.services.webnsupdate;
|
|
||||||
inherit (lib) mkOption mkEnableOption types;
|
|
||||||
in {
|
|
||||||
options.services.webnsupdate = mkOption {
|
options.services.webnsupdate = mkOption {
|
||||||
description = "An HTTP server for nsupdate.";
|
description = "An HTTP server for nsupdate.";
|
||||||
default = {};
|
default = { };
|
||||||
type = types.submodule {
|
type = types.submodule {
|
||||||
options = {
|
options = {
|
||||||
enable = mkEnableOption "webnsupdate";
|
enable = mkEnableOption "webnsupdate";
|
||||||
|
@ -18,75 +21,79 @@ in {
|
||||||
Extra arguments to be passed to the webnsupdate server command.
|
Extra arguments to be passed to the webnsupdate server command.
|
||||||
'';
|
'';
|
||||||
type = types.listOf types.str;
|
type = types.listOf types.str;
|
||||||
default = [];
|
default = [ ];
|
||||||
example = ["--ip-source"];
|
example = [ "--ip-source" ];
|
||||||
};
|
};
|
||||||
bindIp = mkOption {
|
package = mkPackageOption pkgs "webnsupdate" { };
|
||||||
description = ''
|
settings = mkOption {
|
||||||
IP address to bind to.
|
description = "The webnsupdate JSON configuration";
|
||||||
|
default = { };
|
||||||
|
type = types.submodule {
|
||||||
|
freeformType = format.type;
|
||||||
|
options = {
|
||||||
|
address = mkOption {
|
||||||
|
description = ''
|
||||||
|
IP address and port to bind to.
|
||||||
|
|
||||||
Setting it to anything other than localhost is very insecure as
|
Setting it to anything other than localhost is very
|
||||||
`webnsupdate` only supports plain HTTP and should always be behind a
|
insecure as `webnsupdate` only supports plain HTTP and
|
||||||
reverse proxy.
|
should always be behind a reverse proxy.
|
||||||
'';
|
'';
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = "localhost";
|
default = "127.0.0.1:5353";
|
||||||
example = "0.0.0.0";
|
example = "[::1]:5353";
|
||||||
};
|
};
|
||||||
bindPort = mkOption {
|
ip_type = mkOption {
|
||||||
description = "Port to bind to.";
|
description = ''The allowed IP versions to accept updates from.'';
|
||||||
type = types.port;
|
type = types.enum [
|
||||||
default = 5353;
|
"Both"
|
||||||
};
|
"Ipv4Only"
|
||||||
passwordFile = mkOption {
|
"Ipv6Only"
|
||||||
description = ''
|
];
|
||||||
The file where the password is stored.
|
default = "Both";
|
||||||
|
example = "Ipv4Only";
|
||||||
|
};
|
||||||
|
password_file = mkOption {
|
||||||
|
description = ''
|
||||||
|
The file where the password is stored.
|
||||||
|
|
||||||
This file can be created by running `webnsupdate mkpasswd $USERNAME $PASSWORD`.
|
This file can be created by running `webnsupdate mkpasswd $USERNAME $PASSWORD`.
|
||||||
'';
|
'';
|
||||||
type = types.path;
|
type = types.path;
|
||||||
example = "/secrets/webnsupdate.pass";
|
example = "/secrets/webnsupdate.pass";
|
||||||
};
|
};
|
||||||
keyFile = mkOption {
|
key_file = mkOption {
|
||||||
description = ''
|
description = ''
|
||||||
The TSIG key that `nsupdate` should use.
|
The TSIG key that `nsupdate` should use.
|
||||||
|
|
||||||
This file will be passed to `nsupdate` through the `-k` option, so look
|
This file will be passed to `nsupdate` through the `-k` option, so look
|
||||||
at `man 8 nsupdate` for information on the key's format.
|
at `man 8 nsupdate` for information on the key's format.
|
||||||
'';
|
'';
|
||||||
type = types.path;
|
type = types.path;
|
||||||
example = "/secrets/webnsupdate.key";
|
example = "/secrets/webnsupdate.key";
|
||||||
};
|
};
|
||||||
ttl = mkOption {
|
ttl = mkOption {
|
||||||
description = "The TTL that should be set on the zone records created by `nsupdate`.";
|
description = "The TTL that should be set on the zone records created by `nsupdate`.";
|
||||||
type = types.ints.positive;
|
default = "10m";
|
||||||
default = 60;
|
example = "60s";
|
||||||
example = 3600;
|
type = types.str;
|
||||||
};
|
};
|
||||||
records = mkOption {
|
records = mkOption {
|
||||||
description = ''
|
description = ''
|
||||||
The fqdn of records that should be updated.
|
The fqdn of records that should be updated.
|
||||||
|
|
||||||
Empty lines will be ignored, but whitespace will not be.
|
Empty lines will be ignored, but whitespace will not be.
|
||||||
'';
|
'';
|
||||||
type = types.nullOr types.lines;
|
type = types.listOf types.str;
|
||||||
default = null;
|
default = [ ];
|
||||||
example = ''
|
example = [
|
||||||
example.com.
|
"example.com."
|
||||||
|
"example.org."
|
||||||
example.org.
|
"ci.example.org."
|
||||||
ci.example.org.
|
];
|
||||||
'';
|
};
|
||||||
};
|
};
|
||||||
recordsFile = mkOption {
|
};
|
||||||
description = ''
|
|
||||||
The fqdn of records that should be updated.
|
|
||||||
|
|
||||||
Empty lines will be ignored, but whitespace will not be.
|
|
||||||
'';
|
|
||||||
type = types.nullOr types.path;
|
|
||||||
default = null;
|
|
||||||
example = "/secrets/webnsupdate.records";
|
|
||||||
};
|
};
|
||||||
user = mkOption {
|
user = mkOption {
|
||||||
description = "The user to run as.";
|
description = "The user to run as.";
|
||||||
|
@ -102,46 +109,30 @@ in {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = let
|
config =
|
||||||
recordsFile =
|
let
|
||||||
if cfg.recordsFile != null
|
configFile = format.generate "webnsupdate.json" cfg.settings;
|
||||||
then cfg.recordsFile
|
args = lib.strings.escapeShellArgs ([ "--config=${configFile}" ] ++ cfg.extraArgs);
|
||||||
else pkgs.writeText "webnsrecords" cfg.records;
|
cmd = "${lib.getExe cfg.package} ${args}";
|
||||||
args = lib.strings.escapeShellArgs [
|
in
|
||||||
"--records"
|
|
||||||
recordsFile
|
|
||||||
"--key-file"
|
|
||||||
cfg.keyFile
|
|
||||||
"--password-file"
|
|
||||||
cfg.passwordFile
|
|
||||||
"--address"
|
|
||||||
cfg.bindIp
|
|
||||||
"--port"
|
|
||||||
(builtins.toString cfg.bindPort)
|
|
||||||
"--ttl"
|
|
||||||
(builtins.toString cfg.ttl)
|
|
||||||
] ++ cfg.extraArgs;
|
|
||||||
cmd = "${lib.getExe pkgs.webnsupdate} ${args}";
|
|
||||||
in
|
|
||||||
lib.mkIf cfg.enable {
|
lib.mkIf cfg.enable {
|
||||||
|
# FIXME: re-enable once I stop using the patched version of bind
|
||||||
# warnings =
|
# warnings =
|
||||||
# lib.optional (!config.services.bind.enable) "`webnsupdate` is expected to be used alongside `bind`. This is an unsopported configuration.";
|
# lib.optional (!config.services.bind.enable) "`webnsupdate` is expected to be used alongside `bind`. This is an unsupported configuration.";
|
||||||
assertions = [
|
|
||||||
{
|
|
||||||
assertion = (cfg.records != null || cfg.recordsFile != null) && !(cfg.records != null && cfg.recordsFile != null);
|
|
||||||
message = "Exactly one of `services.webnsupdate.records` and `services.webnsupdate.recordsFile` must be set.";
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
systemd.services.webnsupdate = {
|
systemd.services.webnsupdate = {
|
||||||
description = "Web interface for nsupdate.";
|
description = "Web interface for nsupdate.";
|
||||||
wantedBy = ["multi-user.target"];
|
wantedBy = [ "multi-user.target" ];
|
||||||
after = ["network.target" "bind.service"];
|
after = [
|
||||||
preStart = "${cmd} verify";
|
"network.target"
|
||||||
path = [pkgs.dig];
|
"bind.service"
|
||||||
|
];
|
||||||
|
preStart = "${lib.getExe cfg.package} verify ${configFile}";
|
||||||
|
path = [ pkgs.dig ];
|
||||||
startLimitIntervalSec = 60;
|
startLimitIntervalSec = 60;
|
||||||
|
environment.DATA_DIR = "%S/webnsupdate";
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
ExecStart = [cmd];
|
ExecStart = [ cmd ];
|
||||||
Type = "exec";
|
Type = "exec";
|
||||||
Restart = "on-failure";
|
Restart = "on-failure";
|
||||||
RestartSec = "10s";
|
RestartSec = "10s";
|
||||||
|
@ -157,6 +148,9 @@ in {
|
||||||
# Logs directory and mode
|
# Logs directory and mode
|
||||||
LogsDirectory = "webnsupdate";
|
LogsDirectory = "webnsupdate";
|
||||||
LogsDirectoryMode = "0750";
|
LogsDirectoryMode = "0750";
|
||||||
|
# State directory and mode
|
||||||
|
StateDirectory = "webnsupdate";
|
||||||
|
StateDirectoryMode = "0750";
|
||||||
# New file permissions
|
# New file permissions
|
||||||
UMask = "0027";
|
UMask = "0027";
|
||||||
# Security
|
# Security
|
||||||
|
|
104
src/auth.rs
Normal file
104
src/auth.rs
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||||
|
use base64::Engine;
|
||||||
|
use tower_http::validate_request::ValidateRequestHeaderLayer;
|
||||||
|
use tracing::{trace, warn};
|
||||||
|
|
||||||
|
use crate::password;
|
||||||
|
|
||||||
|
pub fn layer<'a, ResBody>(
|
||||||
|
user_pass_hash: &'a [u8],
|
||||||
|
salt: &'a str,
|
||||||
|
) -> ValidateRequestHeaderLayer<Basic<'a, ResBody>> {
|
||||||
|
ValidateRequestHeaderLayer::custom(Basic::new(user_pass_hash, salt))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy)]
|
||||||
|
pub struct Basic<'a, ResBody> {
|
||||||
|
pass: &'a [u8],
|
||||||
|
salt: &'a str,
|
||||||
|
_ty: std::marker::PhantomData<fn() -> ResBody>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<ResBody> std::fmt::Debug for Basic<'_, ResBody> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("BasicAuth")
|
||||||
|
.field("pass", &self.pass)
|
||||||
|
.field("salt", &self.salt)
|
||||||
|
.field("_ty", &self._ty)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<ResBody> Clone for Basic<'_, ResBody> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Self {
|
||||||
|
pass: self.pass,
|
||||||
|
salt: self.salt,
|
||||||
|
_ty: std::marker::PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, ResBody> Basic<'a, ResBody> {
|
||||||
|
pub fn new(pass: &'a [u8], salt: &'a str) -> Self {
|
||||||
|
Self {
|
||||||
|
pass,
|
||||||
|
salt,
|
||||||
|
_ty: std::marker::PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_headers(&self, headers: &http::HeaderMap<http::HeaderValue>) -> bool {
|
||||||
|
let Some(auth) = headers.get(http::header::AUTHORIZATION) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Poor man's split once: https://doc.rust-lang.org/std/primitive.slice.html#method.split_once
|
||||||
|
let Some(index) = auth.as_bytes().iter().position(|&c| c == b' ') else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let user_pass = &auth.as_bytes()[index + 1..];
|
||||||
|
|
||||||
|
match base64::engine::general_purpose::URL_SAFE.decode(user_pass) {
|
||||||
|
Ok(user_pass) => {
|
||||||
|
let hashed = password::hash_basic_auth(&user_pass, self.salt);
|
||||||
|
if hashed.as_ref() == self.pass {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
warn!("rejected update");
|
||||||
|
trace!(
|
||||||
|
"mismatched hashes:\nprovided: {}\nstored: {}",
|
||||||
|
URL_SAFE_NO_PAD.encode(hashed.as_ref()),
|
||||||
|
URL_SAFE_NO_PAD.encode(self.pass),
|
||||||
|
);
|
||||||
|
false
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
warn!("received invalid base64 when decoding Basic header: {err}");
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<B, ResBody> tower_http::validate_request::ValidateRequest<B> for Basic<'_, ResBody>
|
||||||
|
where
|
||||||
|
ResBody: Default,
|
||||||
|
{
|
||||||
|
type ResponseBody = ResBody;
|
||||||
|
|
||||||
|
fn validate(
|
||||||
|
&mut self,
|
||||||
|
request: &mut http::Request<B>,
|
||||||
|
) -> std::result::Result<(), http::Response<Self::ResponseBody>> {
|
||||||
|
if self.check_headers(request.headers()) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut res = http::Response::new(ResBody::default());
|
||||||
|
*res.status_mut() = http::status::StatusCode::UNAUTHORIZED;
|
||||||
|
res.headers_mut()
|
||||||
|
.insert(http::header::WWW_AUTHENTICATE, "Basic".parse().unwrap());
|
||||||
|
Err(res)
|
||||||
|
}
|
||||||
|
}
|
253
src/config.rs
Normal file
253
src/config.rs
Normal file
|
@ -0,0 +1,253 @@
|
||||||
|
use std::{
|
||||||
|
fs::File,
|
||||||
|
net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
use axum_client_ip::ClientIpSource;
|
||||||
|
use miette::{Context, IntoDiagnostic};
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Clone, Copy, serde::Deserialize, serde::Serialize)]
|
||||||
|
pub enum IpType {
|
||||||
|
#[default]
|
||||||
|
Both,
|
||||||
|
Ipv4Only,
|
||||||
|
Ipv6Only,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IpType {
|
||||||
|
pub fn valid_for_type(self, ip: IpAddr) -> bool {
|
||||||
|
match self {
|
||||||
|
IpType::Both => true,
|
||||||
|
IpType::Ipv4Only => ip.is_ipv4(),
|
||||||
|
IpType::Ipv6Only => ip.is_ipv6(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for IpType {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
IpType::Both => f.write_str("both"),
|
||||||
|
IpType::Ipv4Only => f.write_str("ipv4-only"),
|
||||||
|
IpType::Ipv6Only => f.write_str("ipv6-only"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for IpType {
|
||||||
|
type Err = miette::Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"both" => Ok(Self::Both),
|
||||||
|
"ipv4-only" => Ok(Self::Ipv4Only),
|
||||||
|
"ipv6-only" => Ok(Self::Ipv6Only),
|
||||||
|
_ => miette::bail!("expected one of 'ipv4-only', 'ipv6-only' or 'both', got '{s}'"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Webserver settings
|
||||||
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
pub struct Server {
|
||||||
|
/// Ip address and port of the server
|
||||||
|
#[serde(default = "default_address")]
|
||||||
|
pub address: SocketAddr,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Password settings
|
||||||
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
pub struct Password {
|
||||||
|
/// File containing password to match against
|
||||||
|
///
|
||||||
|
/// Should be of the format `username:password` and contain a single password
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub password_file: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Salt to get more unique hashed passwords and prevent table based attacks
|
||||||
|
#[serde(default = "default_salt")]
|
||||||
|
pub salt: Box<str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Records settings
|
||||||
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
pub struct Records {
|
||||||
|
/// Time To Live (in seconds) to set on the DNS records
|
||||||
|
#[serde(
|
||||||
|
default = "default_ttl",
|
||||||
|
serialize_with = "humantime_ser",
|
||||||
|
deserialize_with = "humantime_de"
|
||||||
|
)]
|
||||||
|
pub ttl: humantime::Duration,
|
||||||
|
|
||||||
|
/// List of domain names for which to update the IP when an update is requested
|
||||||
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||||
|
#[allow(clippy::struct_field_names)]
|
||||||
|
pub records: Vec<Box<str>>,
|
||||||
|
|
||||||
|
/// If provided, when an IPv6 prefix is provided with an update, this will be used to derive
|
||||||
|
/// the full IPv6 address of the client
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub client_id: Option<Ipv6Addr>,
|
||||||
|
|
||||||
|
/// If a client id is provided the ipv6 update will be ignored (only the prefix will be used).
|
||||||
|
/// This domain will point to the ipv6 address instead of the address derived from the client
|
||||||
|
/// id (usually this is the router).
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub router_domain: Option<Box<str>>,
|
||||||
|
|
||||||
|
/// Set client IP source
|
||||||
|
///
|
||||||
|
/// see: <https://docs.rs/axum-client-ip/latest/axum_client_ip/enum.ClientIpSource.html>
|
||||||
|
#[serde(default = "default_ip_source")]
|
||||||
|
pub ip_source: ClientIpSource,
|
||||||
|
|
||||||
|
/// Set which IPs to allow updating (ipv4, ipv6 or both)
|
||||||
|
#[serde(default = "default_ip_type")]
|
||||||
|
pub ip_type: IpType,
|
||||||
|
|
||||||
|
/// Keyfile `nsupdate` should use
|
||||||
|
///
|
||||||
|
/// If specified, then `webnsupdate` must have read access to the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub key_file: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
|
pub struct Config {
|
||||||
|
/// Server Configuration
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub server: Server,
|
||||||
|
|
||||||
|
/// Password Configuration
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub password: Password,
|
||||||
|
|
||||||
|
/// Records Configuration
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub records: Records,
|
||||||
|
|
||||||
|
/// The config schema (used for lsp completions)
|
||||||
|
#[serde(default, rename = "$schema", skip_serializing)]
|
||||||
|
pub _schema: serde::de::IgnoredAny,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
/// Load the configuration without verifying it
|
||||||
|
pub fn load(path: &std::path::Path) -> miette::Result<Self> {
|
||||||
|
serde_json::from_reader::<File, Self>(
|
||||||
|
File::open(path)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err_with(|| format!("failed open {}", path.display()))?,
|
||||||
|
)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err_with(|| format!("failed to load configuration from {}", path.display()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure only a verified configuration is returned
|
||||||
|
pub fn verified(self) -> miette::Result<Self> {
|
||||||
|
self.verify()?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify the configuration
|
||||||
|
pub fn verify(&self) -> Result<(), Invalid> {
|
||||||
|
let mut invalid_records: Vec<miette::Error> = self
|
||||||
|
.records
|
||||||
|
.records
|
||||||
|
.iter()
|
||||||
|
.filter_map(|record| crate::records::validate_record_str(record).err())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
invalid_records.extend(
|
||||||
|
self.records
|
||||||
|
.router_domain
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|domain| crate::records::validate_record_str(domain).err()),
|
||||||
|
);
|
||||||
|
|
||||||
|
let err = Invalid { invalid_records };
|
||||||
|
|
||||||
|
if err.invalid_records.is_empty() {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, miette::Diagnostic, thiserror::Error)]
|
||||||
|
#[error("the configuration was invalid")]
|
||||||
|
pub struct Invalid {
|
||||||
|
#[related]
|
||||||
|
pub invalid_records: Vec<miette::Error>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Default Values (sadly serde doesn't have a way to specify a constant as a default value) ---
|
||||||
|
|
||||||
|
fn default_ttl() -> humantime::Duration {
|
||||||
|
super::DEFAULT_TTL.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_salt() -> Box<str> {
|
||||||
|
super::DEFAULT_SALT.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_address() -> SocketAddr {
|
||||||
|
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 5353)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ip_source() -> ClientIpSource {
|
||||||
|
ClientIpSource::RightmostXForwardedFor
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ip_type() -> IpType {
|
||||||
|
IpType::Both
|
||||||
|
}
|
||||||
|
|
||||||
|
fn humantime_de<'de, D>(de: D) -> Result<humantime::Duration, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
struct Visitor;
|
||||||
|
impl serde::de::Visitor<'_> for Visitor {
|
||||||
|
type Value = humantime::Duration;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
write!(formatter, "a duration (e.g. 5s)")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||||
|
where
|
||||||
|
E: serde::de::Error,
|
||||||
|
{
|
||||||
|
v.parse().map_err(E::custom)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
de.deserialize_str(Visitor)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn humantime_ser<S>(duration: &humantime::Duration, ser: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
ser.serialize_str(&duration.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn default_values_config_snapshot() {
|
||||||
|
let config: Config = serde_json::from_str("{}").unwrap();
|
||||||
|
insta::assert_json_snapshot!(config, @r#"
|
||||||
|
{
|
||||||
|
"address": "127.0.0.1:5353",
|
||||||
|
"salt": "UpdateMyDNS",
|
||||||
|
"ttl": {
|
||||||
|
"secs": 60,
|
||||||
|
"nanos": 0
|
||||||
|
},
|
||||||
|
"ip_source": "RightmostXForwardedFor",
|
||||||
|
"ip_type": "Both"
|
||||||
|
}
|
||||||
|
"#);
|
||||||
|
}
|
1113
src/main.rs
1113
src/main.rs
File diff suppressed because it is too large
Load diff
156
src/nsupdate.rs
Normal file
156
src/nsupdate.rs
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
use std::{
|
||||||
|
ffi::OsStr,
|
||||||
|
net::IpAddr,
|
||||||
|
path::Path,
|
||||||
|
process::{ExitStatus, Stdio},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
use tokio::io::AsyncWriteExt;
|
||||||
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
|
pub enum Action<'a> {
|
||||||
|
// Reassign a domain to a different IP
|
||||||
|
Reassign {
|
||||||
|
domain: &'a str,
|
||||||
|
to: IpAddr,
|
||||||
|
ttl: Duration,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Action<'a> {
|
||||||
|
/// Create a set of [`Action`]s reassigning the domains in `records` to the specified
|
||||||
|
/// [`IpAddr`]
|
||||||
|
pub fn from_records(
|
||||||
|
to: IpAddr,
|
||||||
|
ttl: Duration,
|
||||||
|
records: &'a [&'a str],
|
||||||
|
) -> impl IntoIterator<Item = Self> + std::iter::ExactSizeIterator + 'a {
|
||||||
|
records
|
||||||
|
.iter()
|
||||||
|
.map(move |&domain| Action::Reassign { domain, to, ttl })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Action<'_> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Action::Reassign { domain, to, ttl } => {
|
||||||
|
let ttl = ttl.as_secs();
|
||||||
|
let kind = match to {
|
||||||
|
IpAddr::V4(_) => "A",
|
||||||
|
IpAddr::V6(_) => "AAAA",
|
||||||
|
};
|
||||||
|
// Delete previous record of type `kind`
|
||||||
|
writeln!(f, "update delete {domain} {ttl} IN {kind}")?;
|
||||||
|
// Add record with new IP
|
||||||
|
writeln!(f, "update add {domain} {ttl} IN {kind} {to}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "trace", skip(actions), ret(level = "warn"))]
|
||||||
|
pub async fn nsupdate(
|
||||||
|
key_file: Option<&Path>,
|
||||||
|
actions: impl IntoIterator<Item = Action<'_>>,
|
||||||
|
) -> std::io::Result<ExitStatus> {
|
||||||
|
let mut cmd = tokio::process::Command::new("nsupdate");
|
||||||
|
if let Some(key_file) = key_file {
|
||||||
|
cmd.args([OsStr::new("-k"), key_file.as_os_str()]);
|
||||||
|
}
|
||||||
|
debug!("spawning new process");
|
||||||
|
let mut child = cmd
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.inspect_err(|err| warn!("failed to spawn child: {err}"))?;
|
||||||
|
let mut stdin = child.stdin.take().expect("stdin not present");
|
||||||
|
debug!("sending update request");
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
update_ns_records(&mut buf, actions).unwrap();
|
||||||
|
stdin
|
||||||
|
.write_all(&buf)
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| warn!("failed to write to the stdin of nsupdate: {err}"))?;
|
||||||
|
|
||||||
|
debug!("closing stdin");
|
||||||
|
stdin
|
||||||
|
.shutdown()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| warn!("failed to close stdin to nsupdate: {err}"))?;
|
||||||
|
debug!("waiting for nsupdate to exit");
|
||||||
|
child
|
||||||
|
.wait()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| warn!("failed to wait for child: {err}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_ns_records<'a>(
|
||||||
|
mut buf: impl std::io::Write,
|
||||||
|
actions: impl IntoIterator<Item = Action<'a>>,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
writeln!(buf, "server 127.0.0.1")?;
|
||||||
|
for action in actions {
|
||||||
|
write!(buf, "{action}")?;
|
||||||
|
}
|
||||||
|
writeln!(buf, "send")?;
|
||||||
|
writeln!(buf, "quit")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
|
||||||
|
|
||||||
|
use insta::assert_snapshot;
|
||||||
|
|
||||||
|
use super::{update_ns_records, Action};
|
||||||
|
use crate::DEFAULT_TTL;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn expected_update_string_A() {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let actions = Action::from_records(
|
||||||
|
IpAddr::V4(Ipv4Addr::LOCALHOST),
|
||||||
|
DEFAULT_TTL,
|
||||||
|
&["example.com.", "example.org.", "example.net."],
|
||||||
|
);
|
||||||
|
update_ns_records(&mut buf, actions).unwrap();
|
||||||
|
|
||||||
|
assert_snapshot!(String::from_utf8(buf).unwrap(), @r###"
|
||||||
|
server 127.0.0.1
|
||||||
|
update delete example.com. 60 IN A
|
||||||
|
update add example.com. 60 IN A 127.0.0.1
|
||||||
|
update delete example.org. 60 IN A
|
||||||
|
update add example.org. 60 IN A 127.0.0.1
|
||||||
|
update delete example.net. 60 IN A
|
||||||
|
update add example.net. 60 IN A 127.0.0.1
|
||||||
|
send
|
||||||
|
quit
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn expected_update_string_AAAA() {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let actions = Action::from_records(
|
||||||
|
IpAddr::V6(Ipv6Addr::LOCALHOST),
|
||||||
|
DEFAULT_TTL,
|
||||||
|
&["example.com.", "example.org.", "example.net."],
|
||||||
|
);
|
||||||
|
update_ns_records(&mut buf, actions).unwrap();
|
||||||
|
|
||||||
|
assert_snapshot!(String::from_utf8(buf).unwrap(), @r###"
|
||||||
|
server 127.0.0.1
|
||||||
|
update delete example.com. 60 IN AAAA
|
||||||
|
update add example.com. 60 IN AAAA ::1
|
||||||
|
update delete example.org. 60 IN AAAA
|
||||||
|
update add example.org. 60 IN AAAA ::1
|
||||||
|
update delete example.net. 60 IN AAAA
|
||||||
|
update add example.net. 60 IN AAAA ::1
|
||||||
|
send
|
||||||
|
quit
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
}
|
80
src/password.rs
Normal file
80
src/password.rs
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
//! Make a password for use with webnsupdate
|
||||||
|
//!
|
||||||
|
//! You should call this command an give it's output to the app/script that will update the DNS
|
||||||
|
//! records
|
||||||
|
use std::io::Write;
|
||||||
|
use std::os::unix::fs::OpenOptionsExt;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use base64::prelude::*;
|
||||||
|
use miette::{Context, IntoDiagnostic, Result};
|
||||||
|
use ring::digest::Digest;
|
||||||
|
|
||||||
|
/// Create a password file
|
||||||
|
///
|
||||||
|
/// If `--password-file` is provided, the password is written to that file
|
||||||
|
#[derive(Debug, clap::Args)]
|
||||||
|
pub struct Mkpasswd {
|
||||||
|
/// The username
|
||||||
|
username: String,
|
||||||
|
|
||||||
|
/// The password
|
||||||
|
password: String,
|
||||||
|
|
||||||
|
/// An application specific value
|
||||||
|
#[arg(long, default_value = crate::DEFAULT_SALT)]
|
||||||
|
salt: String,
|
||||||
|
|
||||||
|
/// The file to write the password to
|
||||||
|
password_file: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Mkpasswd {
|
||||||
|
pub fn process(self, _args: &crate::Opts) -> Result<()> {
|
||||||
|
mkpasswd(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_basic_auth(user_pass: &[u8], salt: &str) -> Digest {
|
||||||
|
let mut context = ring::digest::Context::new(&ring::digest::SHA256);
|
||||||
|
context.update(user_pass);
|
||||||
|
context.update(salt.as_bytes());
|
||||||
|
context.finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_identity(username: &str, password: &str, salt: &str) -> Digest {
|
||||||
|
let mut context = ring::digest::Context::new(&ring::digest::SHA256);
|
||||||
|
context.update(username.as_bytes());
|
||||||
|
context.update(b":");
|
||||||
|
context.update(password.as_bytes());
|
||||||
|
context.update(salt.as_bytes());
|
||||||
|
context.finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mkpasswd(
|
||||||
|
Mkpasswd {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
salt,
|
||||||
|
password_file,
|
||||||
|
}: Mkpasswd,
|
||||||
|
) -> miette::Result<()> {
|
||||||
|
let hash = hash_identity(&username, &password, &salt);
|
||||||
|
let encoded = BASE64_URL_SAFE_NO_PAD.encode(hash.as_ref());
|
||||||
|
let Some(path) = password_file.as_deref() else {
|
||||||
|
println!("{encoded}");
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
let err = || format!("trying to save password hash to {}", path.display());
|
||||||
|
std::fs::File::options()
|
||||||
|
.mode(0o600)
|
||||||
|
.create_new(true)
|
||||||
|
.open(path)
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err_with(err)?
|
||||||
|
.write_all(encoded.as_bytes())
|
||||||
|
.into_diagnostic()
|
||||||
|
.wrap_err_with(err)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
187
src/records.rs
Normal file
187
src/records.rs
Normal file
|
@ -0,0 +1,187 @@
|
||||||
|
//! Deal with the DNS records
|
||||||
|
|
||||||
|
use miette::{ensure, miette, LabeledSpan, Result};
|
||||||
|
|
||||||
|
pub fn validate_record_str(record: &str) -> Result<()> {
|
||||||
|
validate_line(0, record).map_err(|err| err.with_source_code(String::from(record)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_line(offset: usize, line: &str) -> Result<()> {
|
||||||
|
if line.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
ensure!(
|
||||||
|
line.len() <= 255,
|
||||||
|
miette!(
|
||||||
|
labels = [LabeledSpan::new(
|
||||||
|
Some("this line".to_string()),
|
||||||
|
offset,
|
||||||
|
line.len(),
|
||||||
|
)],
|
||||||
|
help = "fully qualified domain names can be at most 255 characters long",
|
||||||
|
url = "https://en.wikipedia.org/wiki/Fully_qualified_domain_name",
|
||||||
|
"hostname too long ({} octets)",
|
||||||
|
line.len(),
|
||||||
|
)
|
||||||
|
);
|
||||||
|
ensure!(
|
||||||
|
line.ends_with('.'),
|
||||||
|
miette!(
|
||||||
|
labels = [LabeledSpan::new(
|
||||||
|
Some("last character".to_string()),
|
||||||
|
offset + line.len() - 1,
|
||||||
|
1,
|
||||||
|
)],
|
||||||
|
help = "hostname should be a fully qualified domain name (end with a '.')",
|
||||||
|
url = "https://en.wikipedia.org/wiki/Fully_qualified_domain_name",
|
||||||
|
"not a fully qualified domain name"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut label_offset = 0usize;
|
||||||
|
for label in line.strip_suffix('.').unwrap_or(line).split('.') {
|
||||||
|
validate_label(offset + label_offset, label)?;
|
||||||
|
label_offset += label.len() + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_label(offset: usize, label: &str) -> Result<()> {
|
||||||
|
ensure!(
|
||||||
|
!label.is_empty(),
|
||||||
|
miette!(
|
||||||
|
labels = [LabeledSpan::new(
|
||||||
|
Some("label".to_string()),
|
||||||
|
offset,
|
||||||
|
label.len(),
|
||||||
|
)],
|
||||||
|
help = "each label should have at least one character",
|
||||||
|
url = "https://en.wikipedia.org/wiki/Fully_qualified_domain_name",
|
||||||
|
"empty label",
|
||||||
|
)
|
||||||
|
);
|
||||||
|
ensure!(
|
||||||
|
label.len() <= 63,
|
||||||
|
miette!(
|
||||||
|
labels = [LabeledSpan::new(
|
||||||
|
Some("label".to_string()),
|
||||||
|
offset,
|
||||||
|
label.len(),
|
||||||
|
)],
|
||||||
|
help = "labels should be at most 63 octets",
|
||||||
|
url = "https://en.wikipedia.org/wiki/Fully_qualified_domain_name",
|
||||||
|
"label too long ({} octets)",
|
||||||
|
label.len(),
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (octet_offset, octet) in label.bytes().enumerate() {
|
||||||
|
validate_octet(offset + octet_offset, octet)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_octet(offset: usize, octet: u8) -> Result<()> {
|
||||||
|
let spans = || [LabeledSpan::new(Some("octet".to_string()), offset, 1)];
|
||||||
|
ensure!(
|
||||||
|
octet.is_ascii(),
|
||||||
|
miette!(
|
||||||
|
labels = spans(),
|
||||||
|
help = "we only accept ascii characters",
|
||||||
|
url = "https://en.wikipedia.org/wiki/Hostname#Syntax",
|
||||||
|
"invalid octet: '{}'",
|
||||||
|
octet.escape_ascii(),
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
ensure!(
|
||||||
|
octet.is_ascii_alphanumeric() || octet == b'-' || octet == b'_',
|
||||||
|
miette!(
|
||||||
|
labels = spans(),
|
||||||
|
help = "hostnames are only allowed to contain characters in [a-zA-Z0-9_-]",
|
||||||
|
url = "https://en.wikipedia.org/wiki/Hostname#Syntax",
|
||||||
|
"invalid octet: '{}'",
|
||||||
|
octet.escape_ascii(),
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use crate::records::validate_record_str;
|
||||||
|
|
||||||
|
macro_rules! assert_miette_snapshot {
|
||||||
|
($diag:expr) => {{
|
||||||
|
use std::borrow::Borrow;
|
||||||
|
|
||||||
|
use insta::{with_settings, assert_snapshot};
|
||||||
|
use miette::{GraphicalReportHandler, GraphicalTheme};
|
||||||
|
|
||||||
|
let mut out = String::new();
|
||||||
|
GraphicalReportHandler::new_themed(GraphicalTheme::unicode_nocolor())
|
||||||
|
.with_width(80)
|
||||||
|
.render_report(&mut out, $diag.borrow())
|
||||||
|
.unwrap();
|
||||||
|
with_settings!({
|
||||||
|
description => stringify!($diag)
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(out);
|
||||||
|
});
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn valid_records() -> miette::Result<()> {
|
||||||
|
for record in [
|
||||||
|
"example.com.",
|
||||||
|
"example.org.",
|
||||||
|
"example.net.",
|
||||||
|
"subdomain.example.com.",
|
||||||
|
] {
|
||||||
|
validate_record_str(record)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hostname_too_long() {
|
||||||
|
let err = validate_record_str("example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.net.").unwrap_err();
|
||||||
|
assert_miette_snapshot!(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_fqd() {
|
||||||
|
let err = validate_record_str("example.net").unwrap_err();
|
||||||
|
assert_miette_snapshot!(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_label() {
|
||||||
|
let err = validate_record_str("name..example.org.").unwrap_err();
|
||||||
|
assert_miette_snapshot!(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn label_too_long() {
|
||||||
|
let err = validate_record_str("name.an-entremely-long-label-that-should-not-exist-because-it-goes-against-the-spec.example.org.").unwrap_err();
|
||||||
|
assert_miette_snapshot!(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_ascii() {
|
||||||
|
let err = validate_record_str("name.this-is-not-ascii-ß.example.org.").unwrap_err();
|
||||||
|
assert_miette_snapshot!(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_octet() {
|
||||||
|
let err =
|
||||||
|
validate_record_str("name.this-character:-is-not-allowed.example.org.").unwrap_err();
|
||||||
|
assert_miette_snapshot!(err);
|
||||||
|
}
|
||||||
|
}
|
14
src/snapshots/webnsupdate__records__test__empty_label.snap
Normal file
14
src/snapshots/webnsupdate__records__test__empty_label.snap
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: src/records.rs
|
||||||
|
description: err
|
||||||
|
expression: out
|
||||||
|
---
|
||||||
|
]8;;https://en.wikipedia.org/wiki/Fully_qualified_domain_name\(link)]8;;\
|
||||||
|
|
||||||
|
× empty label
|
||||||
|
╭────
|
||||||
|
1 │ name..example.org.
|
||||||
|
· ▲
|
||||||
|
· ╰── label
|
||||||
|
╰────
|
||||||
|
help: each label should have at least one character
|
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: src/records.rs
|
||||||
|
description: err
|
||||||
|
expression: out
|
||||||
|
---
|
||||||
|
]8;;https://en.wikipedia.org/wiki/Fully_qualified_domain_name\(link)]8;;\
|
||||||
|
|
||||||
|
× hostname too long (260 octets)
|
||||||
|
╭────
|
||||||
|
1 │ example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.example.net.
|
||||||
|
· ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┬─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||||
|
· ╰── this line
|
||||||
|
╰────
|
||||||
|
help: fully qualified domain names can be at most 255 characters long
|
14
src/snapshots/webnsupdate__records__test__invalid_ascii.snap
Normal file
14
src/snapshots/webnsupdate__records__test__invalid_ascii.snap
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: src/records.rs
|
||||||
|
description: err
|
||||||
|
expression: out
|
||||||
|
---
|
||||||
|
]8;;https://en.wikipedia.org/wiki/Hostname#Syntax\(link)]8;;\
|
||||||
|
|
||||||
|
× invalid octet: '\xc3'
|
||||||
|
╭────
|
||||||
|
1 │ name.this-is-not-ascii-ß.example.org.
|
||||||
|
· ┬
|
||||||
|
· ╰── octet
|
||||||
|
╰────
|
||||||
|
help: we only accept ascii characters
|
14
src/snapshots/webnsupdate__records__test__invalid_octet.snap
Normal file
14
src/snapshots/webnsupdate__records__test__invalid_octet.snap
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: src/records.rs
|
||||||
|
description: err
|
||||||
|
expression: out
|
||||||
|
---
|
||||||
|
]8;;https://en.wikipedia.org/wiki/Hostname#Syntax\(link)]8;;\
|
||||||
|
|
||||||
|
× invalid octet: ':'
|
||||||
|
╭────
|
||||||
|
1 │ name.this-character:-is-not-allowed.example.org.
|
||||||
|
· ┬
|
||||||
|
· ╰── octet
|
||||||
|
╰────
|
||||||
|
help: hostnames are only allowed to contain characters in [a-zA-Z0-9_-]
|
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: src/records.rs
|
||||||
|
description: err
|
||||||
|
expression: out
|
||||||
|
---
|
||||||
|
]8;;https://en.wikipedia.org/wiki/Fully_qualified_domain_name\(link)]8;;\
|
||||||
|
|
||||||
|
× label too long (78 octets)
|
||||||
|
╭────
|
||||||
|
1 │ name.an-entremely-long-label-that-should-not-exist-because-it-goes-against-the-spec.example.org.
|
||||||
|
· ───────────────────────────────────────┬──────────────────────────────────────
|
||||||
|
· ╰── label
|
||||||
|
╰────
|
||||||
|
help: labels should be at most 63 octets
|
14
src/snapshots/webnsupdate__records__test__not_fqd.snap
Normal file
14
src/snapshots/webnsupdate__records__test__not_fqd.snap
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: src/records.rs
|
||||||
|
description: err
|
||||||
|
expression: out
|
||||||
|
---
|
||||||
|
]8;;https://en.wikipedia.org/wiki/Fully_qualified_domain_name\(link)]8;;\
|
||||||
|
|
||||||
|
× not a fully qualified domain name
|
||||||
|
╭────
|
||||||
|
1 │ example.net
|
||||||
|
· ┬
|
||||||
|
· ╰── last character
|
||||||
|
╰────
|
||||||
|
help: hostname should be a fully qualified domain name (end with a '.')
|
Loading…
Add table
Reference in a new issue