Compare commits
405 Commits
v0.3
...
21e3a9b9bb
| Author | SHA1 | Date | |
|---|---|---|---|
| 21e3a9b9bb | |||
| ca0c9c28fd | |||
| 1edc61d6e6 | |||
| b20bba529a | |||
| fb0948787a | |||
| 625457e474 | |||
| d4b7cabcf2 | |||
| b2727c7a96 | |||
| ff3cbfbdba | |||
| 44602e7bc2 | |||
| 1706df7236 | |||
| 80fc28c44a | |||
| 7335c0fc62 | |||
| a536e688c9 | |||
| 0d22b43ed0 | |||
| 9d7f566209 | |||
| 1e6f965f7a | |||
| 6183a58204 | |||
| 2a4934b01a | |||
| fc4261b7ac | |||
| 7d248c5ea3 | |||
| 8d4af73364 | |||
|
|
4c738d027a | ||
|
|
f2fa3411d8 | ||
|
|
19443bc4ca | ||
| 60a777276f | |||
|
|
1262ec5a33 | ||
|
|
4c6b69e125 | ||
|
|
28881a23a9 | ||
|
|
e796362e6b | ||
|
|
37094a3295 | ||
|
|
100bac8f87 | ||
|
|
fdafa3aa81 | ||
|
|
d267564bca | ||
|
|
2cc477e551 | ||
|
|
8cbdd9f408 | ||
|
|
21be3e40dd | ||
|
|
a3824c2671 | ||
|
|
8eeb010c3a | ||
|
|
956b172426 | ||
| 9b4ed2837e | |||
| 701e64df6f | |||
| 23fc942db7 | |||
| 38bba1472e | |||
| 7d131bbacf | |||
| 6e79dd318a | |||
| 5fc1d2148f | |||
| de184de5a0 | |||
| 8a3b2ae1c5 | |||
| 93e38b0572 | |||
| 43bbb8e143 | |||
| 0fd9ce68b8 | |||
| 68f2b81e3f | |||
| d7a39fa4e4 | |||
| 1f646fd5f8 | |||
| 96cbf8c568 | |||
| bf199c1b17 | |||
| 0f7a70c895 | |||
| 3151b97bc0 | |||
| 8ce5cfecd4 | |||
| 6da27c6444 | |||
| 3026b3e6de | |||
| 725414cc71 | |||
| defb8fafca | |||
| f747c085c9 | |||
| 85dd794b53 | |||
| be8d85cb66 | |||
| 0b7527fc7d | |||
| 3a568a774a | |||
| a6ecb66547 | |||
| 8a04db8130 | |||
| d5bbbe6171 | |||
| c6a27525fd | |||
| 5880066531 | |||
| 918b63047b | |||
| 0fa2a65c81 | |||
| 87d5b7ad85 | |||
| 7db3596302 | |||
| e65c744f9c | |||
| bd79602d3a | |||
| 6e876aaefc | |||
| 04753e8d9c | |||
| 5811476e27 | |||
| 5ac814b857 | |||
| 3a87772606 | |||
| 001911bed9 | |||
| d1d729c33d | |||
| b4cabae4ec | |||
| 196bbeb2c2 | |||
| 4d60012f44 | |||
| b2f413b033 | |||
| c25bb8bf55 | |||
| 3b923e3e13 | |||
|
|
061265bbd0 | ||
| a08a8d2000 | |||
| fea0299c95 | |||
| 444930199c | |||
| 95704b9a40 | |||
| 8d300827d0 | |||
| 9ab79b120a | |||
| 8cc9470aca | |||
| 9e95701a6e | |||
| fe90401688 | |||
| aeaaee9915 | |||
| 52b024c1ba | |||
| 3a95613132 | |||
| 2ea2c994d8 | |||
| 04686b6dfa | |||
| 78201d4759 | |||
| 5fe6600dc3 | |||
| 2a65f78cd4 | |||
| 4852dad71e | |||
| 0746be904a | |||
| 53c2ee404c | |||
| bd694c3f7d | |||
| 95e9fcbffe | |||
| 98665a3231 | |||
| 7a51ad135f | |||
| e386935bc7 | |||
| c62562e6f0 | |||
| 00e37996b7 | |||
| a7e2c61984 | |||
| 58919b2d58 | |||
| dd36eb886f | |||
|
|
d2e01db0ae | ||
| bfd7b01ea4 | |||
| da7a499da0 | |||
| 64965c32dd | |||
| 3207bdfdfb | |||
| d8dd604174 | |||
| 7ca9459675 | |||
| 989b0cdcce | |||
| 64d8397092 | |||
| a1b054a672 | |||
| 193c96c5aa | |||
| ee973432be | |||
| 38d0252101 | |||
| 280048264e | |||
| 129111273d | |||
| d62a19d741 | |||
| e34a6243c0 | |||
| 4464bb607b | |||
| 48fa888f9b | |||
| b4eafd0b41 | |||
| fa83063c61 | |||
| 7d8fbb844e | |||
| 494c6ecb3e | |||
| 91a37cb12d | |||
| 4e21a3daad | |||
| 0e9c8d0c01 | |||
| 512de5e187 | |||
| f027191896 | |||
| ee44fa40fd | |||
| e78dcf471a | |||
| 056480f65a | |||
| 3eabc0e8f8 | |||
| d7ab3c4d6b | |||
| 09ce9f043e | |||
| eac22148c5 | |||
| 92ec2e1a2d | |||
| 88961e1c6b | |||
| 8c384741b3 | |||
| 2053512559 | |||
| ad7ef9277e | |||
| 95da48b5e6 | |||
| 664cfb8965 | |||
| ba4240720c | |||
| ec04618a73 | |||
| 6dc298146a | |||
| 09606cfc27 | |||
| 465f877d6a | |||
| 763e014b44 | |||
| 474e0b60f9 | |||
| 10af4d7448 | |||
| 94bfe971b3 | |||
| b77c442f56 | |||
| a3f9c9fda1 | |||
| 2a0a591194 | |||
| 23526ae62b | |||
| addff12c17 | |||
| c56765ce26 | |||
| d18c49982e | |||
| 58db521b5b | |||
| c21fb5813b | |||
| 33a5a1a262 | |||
| df8e69bce2 | |||
| 58fdcfba9f | |||
| 27ef86c1b4 | |||
| 9fc34e6989 | |||
| 4b79b6dd1d | |||
| d0cbc2f985 | |||
| d53e28668b | |||
| 0b8896d11d | |||
| 8c0c3ad169 | |||
| aebed5639d | |||
| 4514de9ff5 | |||
| 31b9757ef3 | |||
| defb3d1b7d | |||
| e6b654e990 | |||
| 29ddc647e3 | |||
| 67c3e40108 | |||
| 7363ed48b4 | |||
| 96943c1483 | |||
| 9f7195282f | |||
| 30480fb568 | |||
| c3aaea3332 | |||
| fad6f71876 | |||
| 73158e3d47 | |||
| 6f4ae88260 | |||
| a8f8803a92 | |||
| 581a513ebd | |||
| f1e212ead9 | |||
| bc3001a4e6 | |||
| c4fd1d0452 | |||
| 1a65a163a1 | |||
| 4f68a563c6 | |||
| e04e8ceeeb | |||
|
|
b2542b341e | ||
| d402c1f8ce | |||
| e75aead3a8 | |||
| dca2b3c9b4 | |||
| a71711978e | |||
| 90d188e01e | |||
| 2e6166e807 | |||
| 8aaaa55d45 | |||
| df39bb3076 | |||
| bc3d4e1c49 | |||
| 32eb4676ee | |||
| 5d7480f7a4 | |||
| 2d34ba1bd7 | |||
| 5b78c3ba9e | |||
| 95cffc5f0e | |||
| 4841920c64 | |||
| c439595d92 | |||
| 62c1e430b2 | |||
| 1212917fae | |||
| f41b9b1684 | |||
| b17f4d68ef | |||
| b8c552fb62 | |||
| f2d2482476 | |||
| 6ef759a14e | |||
| 10e02c20a1 | |||
| 433dc090e0 | |||
| 35e7c34d11 | |||
| 50a0f4d766 | |||
| 1db3eadd4c | |||
| af45b13612 | |||
| c994c90247 | |||
| 3e8aad2221 | |||
| 127dd0535e | |||
| 664d44eddc | |||
| ad206297d8 | |||
| f2f1d5bcaf | |||
| 881a33dc96 | |||
| 38c66cad62 | |||
| 7ad51ccb47 | |||
| dd65f2cd81 | |||
| f01568a695 | |||
| be085e9b0f | |||
| 3557dd2686 | |||
| 908094f48b | |||
| c3c1c98913 | |||
| e940ab69fb | |||
| 1cf4e85014 | |||
| 14c95f2704 | |||
| 5f878793fd | |||
| fd6400ed68 | |||
| faf68e2052 | |||
| 7296795aec | |||
| 88252fffc8 | |||
| e67f5a7db4 | |||
| 87e0247b48 | |||
| d490d3ab84 | |||
| f7870797ac | |||
| 17ffc793e0 | |||
| d3738f0887 | |||
| 7da879d483 | |||
| c0bb71f84f | |||
| 230f380a6a | |||
| 852f445b1f | |||
| 584f68ba42 | |||
| 92092ed4af | |||
| fadf687a3e | |||
| 3a18870537 | |||
| cf80678ccc | |||
| 08ce4b6add | |||
| 39075a6269 | |||
| 906ead80a4 | |||
| 7038661296 | |||
| 543bf94a51 | |||
| 453f73c2a0 | |||
| 7e673200c8 | |||
| 44a716248e | |||
| d20006a325 | |||
| f8adec1413 | |||
| 868269359c | |||
| 61d4a4a0d8 | |||
| 4e4de95a07 | |||
| 9b64de7991 | |||
| e45de3b498 | |||
| 6e4c388195 | |||
| 6436a8194e | |||
| f10ae25b2a | |||
| fd6b3b7438 | |||
| d68ff012f2 | |||
| 9aad65edac | |||
| c370ef5815 | |||
| 8f5b743ea4 | |||
| c0e981dbd4 | |||
| 4303621b30 | |||
| 63e04a9dcf | |||
| 08ee946f2e | |||
| 81de5a2d70 | |||
| 3bf118f99a | |||
| dbf93c8f0e | |||
| 1ae0ceff60 | |||
| 07fa3ca291 | |||
| ab1892cbeb | |||
| ed1edf5075 | |||
| e6e9940757 | |||
| ff48b2a017 | |||
| ec45678ce3 | |||
| 9acf5b10d5 | |||
| eaf8e2bfa2 | |||
| 31b90af066 | |||
| f9d9dc587a | |||
| a94bd19362 | |||
| ef8a57c60e | |||
| 7a2fa7ae3f | |||
| 599973e10a | |||
| 0485facf33 | |||
| 2f6405ea10 | |||
| a4e993b7fc | |||
| 0fb9a22d47 | |||
| 76130c5b48 | |||
| 9f6c84d78c | |||
| fc91ee7a01 | |||
| c0168c3650 | |||
| 717b0d3a74 | |||
| ef381c7421 | |||
| fcbad5a3eb | |||
| cbc9792755 | |||
| ae9a928d45 | |||
| 3ff7b61518 | |||
| 3ac88260b5 | |||
| 54fc48b37d | |||
| 61a8d63374 | |||
| 92e4856dd9 | |||
| 02e9de0cbd | |||
| 552b3a6aad | |||
| fcc22791e5 | |||
| b183590096 | |||
| 27586b5ff0 | |||
| 70eac10eaa | |||
| 1e941e02ed | |||
| 1ffc522d51 | |||
| 66814876a8 | |||
| 3501b785c9 | |||
| 24badb9b64 | |||
| 4c94ead06f | |||
| 7514d1fd3e | |||
| d85c98c3ef | |||
| e3563fcaa6 | |||
| 1a1231b672 | |||
| a3ccea9dcb | |||
| 4a9f1bc278 | |||
| 0b181b9b79 | |||
| 6d747d8e89 | |||
| 72dd861677 | |||
| 59c6164c1f | |||
| 4722d5a8ff | |||
| 4eb88260c8 | |||
| a51e5f8918 | |||
| c66620a0fc | |||
| f0c8805cf3 | |||
| da601c2d5f | |||
| ed06c52c8f | |||
| 14b200ee3d | |||
| d5eddc4476 | |||
| d26a76d064 | |||
| d39df526de | |||
| 6e6050c71b | |||
| 98580d32ad | |||
| 115eb7c74a | |||
|
|
8b7b91d005 | ||
| 7aa45c7768 | |||
| a065de5b2d | |||
| d976ccefc2 | |||
| de186901d0 | |||
| c9f4d41780 | |||
| b3906c646a | |||
| 43c47bdca6 | |||
| df0b5728fc | |||
| d0b78686e2 | |||
| f02a0fc17a | |||
| 4e83aba672 | |||
| e2e55b8e79 | |||
| 655379cd61 | |||
| 340085abf8 | |||
| f5f8dfa188 | |||
| e43d4bf3cd | |||
| 48f3bc0199 | |||
| 0973ae36b8 | |||
| 09c67d4908 | |||
| 47841dadfb |
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
github: hakoerber
|
||||
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Check out [the developer
|
||||
documentation](https://hakoerber.github.io/git-repo-manager/developing.html) it
|
||||
you want to contribute!
|
||||
3
CONTRIBUTORS
Normal file
3
CONTRIBUTORS
Normal file
@@ -0,0 +1,3 @@
|
||||
nonnominandus
|
||||
Maximilian Volk
|
||||
Baptiste (@BapRx)
|
||||
1144
Cargo.lock
generated
1144
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
52
Cargo.toml
52
Cargo.toml
@@ -1,7 +1,8 @@
|
||||
[package]
|
||||
name = "git-repo-manager"
|
||||
version = "0.3.0"
|
||||
version = "0.7.13"
|
||||
edition = "2021"
|
||||
|
||||
authors = [
|
||||
"Hannes Körber <hannes@hkoerber.de>",
|
||||
]
|
||||
@@ -26,40 +27,69 @@ rust-version = "1.57"
|
||||
|
||||
license = "GPL-3.0-only"
|
||||
|
||||
[profile.e2e-tests]
|
||||
inherits = "dev"
|
||||
|
||||
[lib]
|
||||
name = "grm"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "grm"
|
||||
path = "src/main.rs"
|
||||
path = "src/grm/main.rs"
|
||||
|
||||
[dependencies]
|
||||
|
||||
[dependencies.toml]
|
||||
version = "0.5.8"
|
||||
version = "=0.7.3"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.130"
|
||||
version = "=1.0.162"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.git2]
|
||||
version = "0.13.24"
|
||||
version = "=0.17.1"
|
||||
|
||||
[dependencies.shellexpand]
|
||||
version = "2.1.0"
|
||||
version = "=3.1.0"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "3.0.0-beta.5"
|
||||
version = "=4.2.7"
|
||||
features = ["derive", "cargo"]
|
||||
|
||||
[dependencies.console]
|
||||
version = "0.15.0"
|
||||
version = "=0.15.5"
|
||||
|
||||
[dependencies.regex]
|
||||
version = "1.5.4"
|
||||
version = "=1.8.1"
|
||||
|
||||
[dependencies.comfy-table]
|
||||
version = "5.0.0"
|
||||
version = "=6.1.4"
|
||||
|
||||
[dependencies.serde_yaml]
|
||||
version = "=0.9.21"
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "=1.0.96"
|
||||
|
||||
[dependencies.isahc]
|
||||
version = "=1.7.2"
|
||||
default-features = false
|
||||
features = ["json", "http2", "text-decoding"]
|
||||
|
||||
[dependencies.parse_link_header]
|
||||
version = "=0.3.3"
|
||||
|
||||
[dependencies.url-escape]
|
||||
version = "=0.1.1"
|
||||
|
||||
[dev-dependencies.tempdir]
|
||||
version = "0.3.7"
|
||||
version = "=0.3.7"
|
||||
|
||||
[features]
|
||||
static-build = [
|
||||
"git2/vendored-openssl",
|
||||
"git2/vendored-libgit2",
|
||||
"isahc/static-curl",
|
||||
"isahc/static-ssl",
|
||||
]
|
||||
|
||||
97
Justfile
97
Justfile
@@ -1,23 +1,98 @@
|
||||
check:
|
||||
cargo check
|
||||
cargo fmt --check
|
||||
cargo clippy --no-deps
|
||||
set positional-arguments
|
||||
|
||||
set shell := ["/bin/bash", "-c"]
|
||||
|
||||
static_target := "x86_64-unknown-linux-musl"
|
||||
|
||||
cargo := "cargo +nightly"
|
||||
|
||||
check: fmt-check lint test
|
||||
{{cargo}} check
|
||||
|
||||
clean:
|
||||
{{cargo}} clean
|
||||
git clean -f -d -X
|
||||
|
||||
fmt:
|
||||
{{cargo}} fmt
|
||||
git ls-files | grep '\.py$' | xargs isort
|
||||
git ls-files | grep '\.py$' | xargs black
|
||||
git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --write
|
||||
|
||||
fmt-check:
|
||||
{{cargo}} fmt --check
|
||||
git ls-files | grep '\.py$' | xargs black --check
|
||||
git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --diff
|
||||
|
||||
lint:
|
||||
{{cargo}} clippy --no-deps -- -Dwarnings
|
||||
git ls-files | grep '\.py$' | xargs ruff --ignore E501
|
||||
git ls-files | grep '\.sh$' | xargs -L 1 shellcheck --norc
|
||||
|
||||
lint-fix:
|
||||
cargo clippy --no-deps --fix
|
||||
{{cargo}} clippy --no-deps --fix
|
||||
|
||||
release:
|
||||
cargo build --release
|
||||
build-release:
|
||||
{{cargo}} build --release
|
||||
|
||||
build-release-static:
|
||||
{{cargo}} build --release --target {{static_target}} --features=static-build
|
||||
|
||||
pushall:
|
||||
for r in $(git remote) ; do \
|
||||
for branch in develop master ; do \
|
||||
git push $r $branch ; \
|
||||
done ; \
|
||||
done
|
||||
|
||||
release-patch:
|
||||
./release.sh patch
|
||||
|
||||
test-binary:
|
||||
env \
|
||||
GITHUB_API_BASEURL=http://rest:5000/github \
|
||||
GITLAB_API_BASEURL=http://rest:5000/gitlab \
|
||||
{{cargo}} build --profile e2e-tests --target {{static_target}} --features=static-build
|
||||
|
||||
install:
|
||||
cargo install --path .
|
||||
{{cargo}} install --path .
|
||||
|
||||
test:
|
||||
cargo test --lib --bins
|
||||
install-static:
|
||||
{{cargo}} install --target {{static_target}} --features=static-build --path .
|
||||
|
||||
update-dependencies:
|
||||
build:
|
||||
{{cargo}} build
|
||||
|
||||
build-static:
|
||||
{{cargo}} build --target {{static_target}} --features=static-build
|
||||
|
||||
test: test-unit test-integration test-e2e
|
||||
|
||||
test-unit +tests="":
|
||||
{{cargo}} test --lib --bins -- --show-output {{tests}}
|
||||
|
||||
test-integration:
|
||||
{{cargo}} test --test "*"
|
||||
|
||||
test-e2e +tests=".": test-binary
|
||||
cd ./e2e_tests \
|
||||
&& docker-compose rm --stop -f \
|
||||
&& docker-compose build \
|
||||
&& docker-compose run \
|
||||
--rm \
|
||||
-v $PWD/../target/x86_64-unknown-linux-musl/e2e-tests/grm:/grm \
|
||||
pytest \
|
||||
"GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest --exitfirst -p no:cacheprovider --color=yes "$@"" \
|
||||
&& docker-compose rm --stop -f
|
||||
|
||||
update-dependencies: update-cargo-dependencies
|
||||
|
||||
update-cargo-dependencies:
|
||||
@cd ./depcheck \
|
||||
&& python3 -m venv ./venv \
|
||||
&& . ./venv/bin/activate \
|
||||
&& pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \
|
||||
&& ./update-cargo-dependencies.py
|
||||
|
||||
wait:
|
||||
read -p "[ENTER] to continue "
|
||||
|
||||
674
LICENSE
Normal file
674
LICENSE
Normal file
@@ -0,0 +1,674 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
36
README.md
36
README.md
@@ -1,7 +1,10 @@
|
||||
# GRM — Git Repository Manager
|
||||
|
||||
GRM helps you manage git repositories in a declarative way. Configure your
|
||||
repositories in a [TOML](https://toml.io/) file, GRM does the rest.
|
||||
repositories in a [TOML](https://toml.io/) or YAML file, GRM does the rest.
|
||||
|
||||
Also, GRM can be used to work with git worktrees in an opinionated,
|
||||
straightforward fashion.
|
||||
|
||||
**Take a look at the [official documentation](https://hakoerber.github.io/git-repo-manager/)
|
||||
for installation & quickstart.**
|
||||
@@ -34,23 +37,26 @@ like Terraform for your local git repositories. Write a config, run the tool, an
|
||||
your repos are ready. The only thing that is tracked by git it the list of
|
||||
repositories itself.
|
||||
|
||||
# Future & Ideas
|
||||
|
||||
* Operations over all repos (e.g. pull)
|
||||
* Show status of managed repositories (dirty, compare to remotes, ...)
|
||||
|
||||
# Optional Features
|
||||
|
||||
* Support multiple file formats (YAML, JSON).
|
||||
* Add systemd timer unit to run regular syncs
|
||||
|
||||
# Crates
|
||||
|
||||
* [`toml`](https://docs.rs/toml/) for the configuration file
|
||||
* [`serde`](https://docs.rs/serde/) because we're using Rust, after all
|
||||
* [`git2`](https://docs.rs/git2/), a safe wrapper around `libgit2`, for all git operations
|
||||
* [`clap`](https://docs.rs/clap/), [`console`](https://docs.rs/console/) and [`shellexpand`](https://docs.rs/shellexpand) for good UX
|
||||
* [`toml`](https://docs.rs/toml/) for the configuration file.
|
||||
* [`serde`](https://docs.rs/serde/), together with
|
||||
[`serde_yaml`](https://docs.rs/serde_yaml/) and
|
||||
[`serde_json`](https://docs.rs/serde_json/). Because we're using Rust, after
|
||||
all.
|
||||
* [`git2`](https://docs.rs/git2/), a safe wrapper around `libgit2`, for all git operations.
|
||||
* [`clap`](https://docs.rs/clap/), [`console`](https://docs.rs/console/), [`comfy_table`](https://docs.rs/comfy-table/) and [`shellexpand`](https://docs.rs/shellexpand) for good UX.
|
||||
* [`isahc`](https://docs.rs/isahc/) as the HTTP client for forge integrations.
|
||||
|
||||
# Links
|
||||
|
||||
* [crates.io](https://crates.io/crates/git-repo-manager)
|
||||
|
||||
# Mirrors
|
||||
|
||||
This repository can be found on multiple forges:
|
||||
|
||||
* https://github.com/hakoerber/git-repo-manager
|
||||
* https://code.hkoerber.de/hannes/git-repo-manager/
|
||||
* https://codeberg.org/hakoerber/git-repo-manager
|
||||
* https://git.sr.ht/~hkoerber/git-repo-manager
|
||||
|
||||
@@ -1,18 +1,25 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import semver
|
||||
import tomlkit
|
||||
|
||||
INDEX_DIR = "crates.io-index"
|
||||
|
||||
AUTOUPDATE_DISABLED = []
|
||||
|
||||
if os.path.exists(INDEX_DIR):
|
||||
subprocess.run(
|
||||
["git", "pull", "--depth=1", "origin"],
|
||||
["git", "fetch", "--depth=1", "origin"],
|
||||
cwd=INDEX_DIR,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
subprocess.run(
|
||||
["git", "reset", "--hard", "origin/master"],
|
||||
cwd=INDEX_DIR,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
@@ -21,7 +28,7 @@ else:
|
||||
subprocess.run(
|
||||
["git", "clone", "--depth=1", "https://github.com/rust-lang/crates.io-index"],
|
||||
check=True,
|
||||
capture_output=False, # to get some git output
|
||||
capture_output=False, # to get some git output
|
||||
)
|
||||
|
||||
with open("../Cargo.toml", "r") as cargo_config:
|
||||
@@ -29,9 +36,16 @@ with open("../Cargo.toml", "r") as cargo_config:
|
||||
|
||||
update_necessary = False
|
||||
|
||||
# This updates the crates.io index, see https://github.com/rust-lang/cargo/issues/3377
|
||||
subprocess.run(
|
||||
["cargo", "update", "--dry-run"],
|
||||
check=True,
|
||||
capture_output=False, # to get some git output
|
||||
)
|
||||
|
||||
for tier in ["dependencies", "dev-dependencies"]:
|
||||
for name, dependency in cargo[tier].items():
|
||||
version = dependency["version"]
|
||||
version = dependency["version"].lstrip("=")
|
||||
if len(name) >= 4:
|
||||
info_file = f"{INDEX_DIR}/{name[0:2]}/{name[2:4]}/{name}"
|
||||
elif len(name) == 3:
|
||||
@@ -46,13 +60,24 @@ for tier in ["dependencies", "dev-dependencies"]:
|
||||
latest_version = None
|
||||
for version_entry in open(info_file, "r").readlines():
|
||||
version = semver.VersionInfo.parse(json.loads(version_entry)["vers"])
|
||||
if current_version.prerelease == "" and version.prerelease != "":
|
||||
# skip prereleases, except when we are on a prerelease already
|
||||
continue
|
||||
if latest_version is None or version > latest_version:
|
||||
if (
|
||||
current_version.prerelease is None
|
||||
and version.prerelease is not None
|
||||
):
|
||||
# skip prereleases, except when we are on a prerelease already
|
||||
print(f"{name}: Skipping prerelease version {version}")
|
||||
continue
|
||||
latest_version = version
|
||||
|
||||
if latest_version != current_version:
|
||||
if name in AUTOUPDATE_DISABLED:
|
||||
print(
|
||||
f"{name} {current_version}: There is a new version available "
|
||||
f"({latest_version}, current {current_version}), but autoupdating "
|
||||
f"is explictly disabled for {name}"
|
||||
)
|
||||
continue
|
||||
update_necessary = True
|
||||
if latest_version < current_version:
|
||||
print(
|
||||
@@ -62,13 +87,83 @@ for tier in ["dependencies", "dev-dependencies"]:
|
||||
print(
|
||||
f"{name}: New version found: {latest_version} (current {current_version})"
|
||||
)
|
||||
cargo[tier][name]["version"] = str(latest_version)
|
||||
cargo[tier][name]["version"] = f"={str(latest_version)}"
|
||||
with open("../Cargo.toml", "w") as cargo_config:
|
||||
cargo_config.write(tomlkit.dumps(cargo))
|
||||
|
||||
try:
|
||||
cmd = subprocess.run(
|
||||
[
|
||||
"cargo",
|
||||
"update",
|
||||
"-Z",
|
||||
"no-index-update",
|
||||
"--aggressive",
|
||||
"--package",
|
||||
name,
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.stdout)
|
||||
print(e.stderr)
|
||||
raise
|
||||
|
||||
message = f"dependencies: Update {name} to {latest_version}"
|
||||
subprocess.run(
|
||||
[
|
||||
"git",
|
||||
"commit",
|
||||
"--message",
|
||||
message,
|
||||
"../Cargo.toml",
|
||||
"../Cargo.lock",
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
|
||||
if update_necessary is True:
|
||||
with open("../Cargo.toml", "w") as cargo_config:
|
||||
cargo_config.write(tomlkit.dumps(cargo))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Note that we have to restart this lookup every time, as later packages can depend
|
||||
# on former packages
|
||||
while True:
|
||||
with open("../Cargo.lock", "r") as f:
|
||||
cargo_lock = tomlkit.parse(f.read())
|
||||
for package in cargo_lock["package"]:
|
||||
spec = f"{package['name']}:{package['version']}"
|
||||
try:
|
||||
cmd = subprocess.run(
|
||||
[
|
||||
"cargo",
|
||||
"update",
|
||||
"-Z",
|
||||
"no-index-update",
|
||||
"--aggressive",
|
||||
"--package",
|
||||
spec,
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.stdout)
|
||||
print(e.stderr)
|
||||
raise
|
||||
if len(cmd.stderr) != 0:
|
||||
update_necessary = True
|
||||
message = "Cargo.lock: {}".format(cmd.stderr.split("\n")[0].strip())
|
||||
print(message)
|
||||
cmd = subprocess.run(
|
||||
["git", "commit", "--message", message, "../Cargo.lock"],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
if update_necessary is False:
|
||||
print("Everything up to date")
|
||||
sys.exit(0)
|
||||
|
||||
@@ -7,3 +7,8 @@ title = "Git Repo Manager"
|
||||
|
||||
[output.html]
|
||||
mathjax-support = true
|
||||
|
||||
# [output.linkcheck]
|
||||
# follow-web-links = true
|
||||
# traverse-parent-directories = false
|
||||
# warning-policy = "error"
|
||||
|
||||
@@ -1,7 +1,20 @@
|
||||
# Summary
|
||||
|
||||
- [Overview](./overview.md)
|
||||
- [Getting started](./getting_started.md)
|
||||
- [Repository trees](./repos.md)
|
||||
[Overview](./overview.md)
|
||||
|
||||
- [Installation](./installation.md)
|
||||
- [Tutorial](./tutorial.md)
|
||||
- [Managing Repositories](./repos.md)
|
||||
- [Local Configuration](./local_configuration.md)
|
||||
- [Forge Integrations](./forge_integration.md)
|
||||
- [Git Worktrees](./worktrees.md)
|
||||
- [Working with Worktrees](./worktree_working.md)
|
||||
- [Worktrees and Remotes](./worktree_remotes.md)
|
||||
- [Behavior Details](./worktree_behavior.md)
|
||||
- [FAQ](./faq.md)
|
||||
- [Developer Documentation](./developing.md)
|
||||
- [Testing](./testing.md)
|
||||
- [Dependency updates](./dependency_updates.md)
|
||||
- [Releases](./releases.md)
|
||||
- [Formatting & Style](./formatting_and_style.md)
|
||||
- [The Docs Themselves](./documentation.md)
|
||||
|
||||
10
docs/src/dependency_updates.md
Normal file
10
docs/src/dependency_updates.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Dependency updates
|
||||
|
||||
Rust has the same problem as the node ecosystem, just a few magnitudes smaller:
|
||||
Dependency sprawl. GRM has a dozen direct dependencies, but over 150 transitive
|
||||
ones.
|
||||
|
||||
To keep them up to date, there is a script:
|
||||
`depcheck/update-cargo-dependencies.py`. It updates direct dependencies to the
|
||||
latest stable version and updates transitive dependencies where possible. To run
|
||||
it, use `just update-dependencies`, which will create commits for each update.
|
||||
69
docs/src/developing.md
Normal file
69
docs/src/developing.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# Overview
|
||||
|
||||
GRM is still in very early development. I started GRM mainly to scratch my own
|
||||
itches (and am heavily dogfooding it). If you have a new use case for GRM, go
|
||||
for it!
|
||||
|
||||
## Contributing
|
||||
|
||||
To contribute, just fork the repo and create a pull request against `develop`.
|
||||
If you plan bigger changes, please consider opening an issue first, so we can
|
||||
discuss it.
|
||||
|
||||
If you want, add yourself to the `CONTRIBUTORS` file in your pull request.
|
||||
|
||||
## Branching strategy
|
||||
|
||||
The branching strategy is a simplified
|
||||
[git-flow](https://nvie.com/posts/a-successful-git-branching-model/).
|
||||
|
||||
* `master` is the "production" branch. Each commit is a new release.
|
||||
* `develop` is the branch where new stuff is coming in.
|
||||
* feature branches branch off of `develop` and merge back into it.
|
||||
|
||||
Feature branches are not required, there are also changes happening directly on
|
||||
`develop`.
|
||||
|
||||
## Required tooling
|
||||
|
||||
You will need the following tools:
|
||||
|
||||
* Rust (obviously) (easiest via `rustup`), with the nightly toolchain
|
||||
* Python3
|
||||
* [`just`](https://github.com/casey/just), a command runner like `make`. See
|
||||
[here](https://github.com/casey/just#installation) for installation
|
||||
instructions (it's most likely just a simple `cargo install just`).
|
||||
* Docker & docker-compose for the e2e tests
|
||||
* `isort`, `black` and `shfmt` for formatting.
|
||||
* `ruff` and `shellcheck` for linting.
|
||||
* `mdbook` for the documentation
|
||||
|
||||
Here are the tools:
|
||||
|
||||
| Distribution | Command |
|
||||
| ------------- | --------------------------------------------------------------------------------------------------- |
|
||||
| Arch Linux | `pacman -S --needed python3 rustup just docker docker-compose python-black shfmt shellcheck mdbook` |
|
||||
| Ubuntu/Debian | `apt-get install --no-install-recommends python3 docker.io docker-compose black shellcheck` |
|
||||
|
||||
Note that you will have to install `just` and `mdbook` manually on Ubuntu (e.g.
|
||||
via `cargo install just mdbook` if your rust build environment is set up
|
||||
correctly). Same for `shfmt`, which may just be a `go install
|
||||
mvdan.cc/sh/v3/cmd/shfmt@latest`, depending on your go build environment.
|
||||
|
||||
For details about rustup and the toolchains, see [the installation
|
||||
section](./installation.md).
|
||||
|
||||
## FAQ
|
||||
|
||||
### Why nightly?
|
||||
|
||||
For now, GRM requires the nightly toolchain for two reasons:
|
||||
|
||||
* [`io_error_more`](https://github.com/rust-lang/rust/issues/86442) to get
|
||||
better error messages on IO errors
|
||||
* [`const_option_ext`](https://github.com/rust-lang/rust/issues/91930) to have
|
||||
static variables read from the environment that fall back to hard coded
|
||||
defaults
|
||||
|
||||
Honestly, both of those are not really necessary or can be handled without
|
||||
nightly. It's just that I'm using nightly anyway.
|
||||
11
docs/src/documentation.md
Normal file
11
docs/src/documentation.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Documentation
|
||||
|
||||
The documentation lives in the `docs` folder and uses
|
||||
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
|
||||
features here!
|
||||
|
||||
Using [GitHub actions](https://github.com/features/actions), the documentation
|
||||
on `master` is automatically published to [the project
|
||||
homepage](https://hakoerber.github.io/git-repo-manager/) via GitHub pages. See
|
||||
`.github/workflows/gh-pages.yml` for the configuration of GitHub Actions.
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
# FAQ
|
||||
|
||||
## Why is the nightly toolchain required?
|
||||
|
||||
Building GRM currently requires nightly features due to the usage of
|
||||
[`std::path::Path::is_symlink()`](https://doc.rust-lang.org/std/fs/struct.FileType.html#method.is_symlink).
|
||||
See the [tracking issue](https://github.com/rust-lang/rust/issues/85748).
|
||||
|
||||
`is_symlink()` is actually available in rustc 1.57, so it will be on stable in
|
||||
the near future. This would mean that GRM can be built using the stable toolchain!
|
||||
Currently empty, as there are no questions that are asked frequently :D
|
||||
|
||||
209
docs/src/forge_integration.md
Normal file
209
docs/src/forge_integration.md
Normal file
@@ -0,0 +1,209 @@
|
||||
# Forge Integrations
|
||||
|
||||
In addition to managing repositories locally, `grm` also integrates with source
|
||||
code hosting platforms. Right now, the following platforms are supported:
|
||||
|
||||
* [GitHub](https://github.com/)
|
||||
* [GitLab](https://gitlab.com/)
|
||||
|
||||
Imagine you are just starting out with `grm` and want to clone all your
|
||||
repositories from GitHub. This is as simple as:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects
|
||||
```
|
||||
|
||||
You will end up with your projects cloned into
|
||||
`~/projects/{your_github_username}/`
|
||||
|
||||
## Authentication
|
||||
|
||||
The only currently supported authentication option is using a personal access
|
||||
token.
|
||||
|
||||
### GitHub
|
||||
|
||||
See the GitHub documentation for personal access tokens:
|
||||
[Link](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token).
|
||||
|
||||
The only required permission is the "repo" scope.
|
||||
|
||||
### GitLab
|
||||
|
||||
See the GitLab documentation for personal access tokens:
|
||||
[Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html).
|
||||
|
||||
The required scopes are a bit weird. Actually, the following should suffice:
|
||||
|
||||
* `read_user` to get user information (required to get the current
|
||||
authenticated user name for the `--owner` filter.
|
||||
* A scope that allows reading private repositories. (`read_repository` is just
|
||||
for *cloning* private repos). This unfortunately does not exist.
|
||||
|
||||
So currently, you'll need to select the `read_api` scope.
|
||||
|
||||
## Filters
|
||||
|
||||
By default, `grm` will sync **nothing**. This is quite boring, so you have to
|
||||
tell the command what repositories to include. They are all inclusive (i.e. act
|
||||
as a logical OR), so you can easily chain many filters to clone a bunch of
|
||||
repositories. It's quite simple:
|
||||
|
||||
* `--user <USER>` syncs all repositories of that remote user
|
||||
* `--group <GROUP>` syncs all repositories of that remote group/organization
|
||||
* `--owner` syncs all repositories of the user that is used for authentication.
|
||||
This is effectively a shortcut for `--user $YOUR_USER`
|
||||
* `--access` syncs all repositories that the current user has access to
|
||||
|
||||
Easiest to see in an example:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --user torvals --owner --group zalando [...]
|
||||
```
|
||||
|
||||
This would sync all of Torvald's repositories, all of my own repositories and
|
||||
all (public) repositories in the "zalando" group.
|
||||
|
||||
## Strategies
|
||||
|
||||
There are generally three ways how you can use `grm` with forges:
|
||||
|
||||
### Ad-hoc cloning
|
||||
|
||||
This is the easiest, there are no local files involved. You just run the
|
||||
command, `grm` clones the repos, that's it. If you run the command again, `grm`
|
||||
will figure out the differences between local and remote repositories and
|
||||
resolve them locally.
|
||||
|
||||
### Create a file
|
||||
|
||||
This is effectively `grm repos find local`, but using the forge instead of the
|
||||
local file system. You will end up with a normal repository file that you can
|
||||
commit to git. To update the list of repositories, just run the command again
|
||||
and commit the new file.
|
||||
|
||||
### Define options in a file
|
||||
|
||||
This is a hybrid approach: You define filtering options in a file that you can
|
||||
commit to source control. Effectively, you are persisting the options you gave
|
||||
to `grm` on the command line with the ad-hoc approach. Similarly, `grm` will
|
||||
figure out differences between local and remote and resolve them.
|
||||
|
||||
A file would look like this:
|
||||
|
||||
```toml
|
||||
provider = "github"
|
||||
token_command = "cat ~/.github_token"
|
||||
root = "~/projects"
|
||||
|
||||
[filters]
|
||||
owner = true
|
||||
groups = [
|
||||
"zalando"
|
||||
]
|
||||
```
|
||||
|
||||
The options in the file map to the command line options of the `grm repos sync
|
||||
remote` command.
|
||||
|
||||
You'd then run the `grm repos sync` command the same way as with a list of
|
||||
repositories in a configuration:
|
||||
|
||||
```bash
|
||||
$ grm repos sync --config example.config.toml
|
||||
```
|
||||
|
||||
You can even use that file to generate a repository list that you can feed into
|
||||
`grm repos sync`:
|
||||
|
||||
```bash
|
||||
$ grm repos find config --config example.config.toml > repos.toml
|
||||
$ grm repos sync config --config repos.toml
|
||||
```
|
||||
|
||||
## Using with self-hosted GitLab
|
||||
|
||||
By default, `grm` uses the default GitLab API endpoint
|
||||
([https://gitlab.com](https://gitlab.com)). You can override the endpoint by
|
||||
specifying the `--api-url` parameter. Like this:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...]
|
||||
```
|
||||
|
||||
## The cloning protocol
|
||||
|
||||
By default, `grm` will use HTTPS for public repositories and SSH otherwise. This
|
||||
can be overridden with the `--force-ssh` switch.
|
||||
|
||||
## About the token command
|
||||
|
||||
To ensure maximum flexibility, `grm` has a single way to get the token it uses
|
||||
to authenticate: Specify a command that returns the token via stdout. This
|
||||
easily integrates with password managers like
|
||||
[`pass`](https://www.passwordstore.org/).
|
||||
|
||||
Of course, you are also free to specify something like `echo mytoken` as the
|
||||
command, as long as you are OK with the security implications (like having the
|
||||
token in clear text in your shell history). It may be better to have the token
|
||||
in a file instead and read it: `cat ~/.gitlab_token`.
|
||||
|
||||
Generally, use whatever you want. The command just has to return successfully
|
||||
and return the token as the first line of stdout.
|
||||
|
||||
## Examples
|
||||
|
||||
Maybe you just want to locally clone all repos from your GitHub user?
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token"
|
||||
```
|
||||
|
||||
This will clone all repositories into
|
||||
`~/github_projects/{your_github_username}`.
|
||||
|
||||
If instead you want to clone **all** repositories you have access to (e.g. via
|
||||
organizations or other users' private repos you have access to), just change the
|
||||
filter a little bit:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --access --root ~/github_projects --token-command "pass show github_grm_access_token"
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
### GitHub
|
||||
|
||||
Unfortunately, GitHub does not have a nice API endpoint to get **private**
|
||||
repositories for a certain user
|
||||
([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user)
|
||||
only returns public repositories).
|
||||
|
||||
Therefore, using `--user {user}` will only show public repositories for GitHub.
|
||||
Note that this does not apply to `--access`: If you have access to another
|
||||
user's private repository, it will be listed.
|
||||
|
||||
## Adding integrations
|
||||
|
||||
Adding a new integration involves writing some Rust code. Most of the logic is
|
||||
generic, so you will not have to reinvent the wheel. Generally, you will need to
|
||||
gather the following information:
|
||||
|
||||
* A list of repositories for a single user
|
||||
* A list of repositories for a group (or any similar concept if applicable)
|
||||
* A list of repositories for the user that the API token belongs to
|
||||
* The username of the currently authenticated user
|
||||
|
||||
Authentication currently only works via a bearer token passed via the
|
||||
`Authorization` HTTP header.
|
||||
|
||||
Each repo has to have the following properties:
|
||||
|
||||
* A name (which also acts as the identifier for diff between local and remote
|
||||
repositories)
|
||||
* An SSH URL to push to
|
||||
* An HTTPS URL to clone and fetch from
|
||||
* A flag that marks the repository as private
|
||||
|
||||
If you plan to implement another forge, please first open an issue so we can go
|
||||
through the required setup. I'm happy to help!
|
||||
45
docs/src/formatting_and_style.md
Normal file
45
docs/src/formatting_and_style.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Formatting & Style
|
||||
|
||||
## Code formatting
|
||||
|
||||
I'm allergic to discussions about formatting. I'd rather make the computer do it
|
||||
for me.
|
||||
|
||||
For Rust, just use `cargo fmt`. For Python, use
|
||||
[black](https://github.com/psf/black). I'd rather not spend any effort in
|
||||
configuring the formatters (not possible for black anyway). For shell scripts,
|
||||
use [`shfmt`](https://github.com/mvdan/sh).
|
||||
|
||||
To autoformat all code, use `just fmt`
|
||||
|
||||
## Style
|
||||
|
||||
Honestly, no idea about style. I'm still learning Rust, so I'm trying to find a
|
||||
good style. Just try to keep it consistent when you add code.
|
||||
|
||||
## Linting
|
||||
|
||||
You can use `just lint` to run all lints.
|
||||
|
||||
### Rust
|
||||
|
||||
Clippy is the guard that prevents shitty code from getting into the code base.
|
||||
When running `just check`, any clippy suggestions will make the command fail.
|
||||
So make clippy happy! The easiest way:
|
||||
|
||||
* Commit your changes (so clippy can change safely).
|
||||
* Run `cargo clippy --fix` to do the easy changes automatically.
|
||||
* Run `cargo clippy` and take a look at the messages.
|
||||
|
||||
Until now, I had no need to override or silence any clippy suggestions.
|
||||
|
||||
### Shell
|
||||
|
||||
`shellcheck` lints all shell scripts. As they change very rarely, this is not
|
||||
too important.
|
||||
|
||||
## Unsafe code
|
||||
|
||||
Any `unsafe` code is forbidden for now globally via `#![forbid(unsafe_code)]`.
|
||||
I cannot think of any reason GRM may need `unsafe`. If it comes up, it needs to
|
||||
be discussed.
|
||||
@@ -1,22 +0,0 @@
|
||||
# Quickstart
|
||||
|
||||
## Installation
|
||||
|
||||
Building GRM currently requires the nightly Rust toolchain. The easiest way
|
||||
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
|
||||
|
||||
Make sure that the nightly toolchain is installed:
|
||||
|
||||
```
|
||||
$ rustup toolchain install nightly
|
||||
```
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch master
|
||||
```
|
||||
|
||||
If you're brave, you can also run the development build:
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
|
||||
```
|
||||
57
docs/src/installation.md
Normal file
57
docs/src/installation.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Installation
|
||||
|
||||
## Installation
|
||||
|
||||
Building GRM currently requires the nightly Rust toolchain. The easiest way is
|
||||
using [`rustup`](https://rustup.rs/). Make sure that rustup is properly
|
||||
installed.
|
||||
|
||||
Make sure that the nightly toolchain is installed:
|
||||
|
||||
```
|
||||
$ rustup toolchain install nightly
|
||||
```
|
||||
|
||||
Then, install the build dependencies:
|
||||
|
||||
| Distribution | Command |
|
||||
| ------------- | ------------------------------------------------------------------------------ |
|
||||
| Arch Linux | `pacman -S --needed gcc openssl pkg-config` |
|
||||
| Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` |
|
||||
|
||||
Then, it's a simple command to install the latest stable version:
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install git-repo-manager
|
||||
```
|
||||
|
||||
If you're brave, you can also run the development build:
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
|
||||
```
|
||||
|
||||
## Static build
|
||||
|
||||
Note that by default, you will get a dynamically linked executable.
|
||||
Alternatively, you can also build a statically linked binary. For this, you will
|
||||
need `musl` and a few other build dependencies installed installed:
|
||||
|
||||
| Distribution | Command |
|
||||
| ------------- | --------------------------------------------------------------------------- |
|
||||
| Arch Linux | `pacman -S --needed gcc musl perl make` |
|
||||
| Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` |
|
||||
|
||||
(`perl` and `make` are required for the OpenSSL build script)
|
||||
|
||||
The, add the musl target via `rustup`:
|
||||
|
||||
```
|
||||
$ rustup +nightly target add x86_64-unknown-linux-musl
|
||||
```
|
||||
|
||||
Then, use a modified build command to get a statically linked binary:
|
||||
|
||||
```
|
||||
$ cargo +nightly install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build
|
||||
```
|
||||
90
docs/src/local_configuration.md
Normal file
90
docs/src/local_configuration.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Local Configuration
|
||||
|
||||
When managing multiple git repositories with GRM, you'll generally have a
|
||||
configuration file containing information about all the repos you have. GRM then
|
||||
makes sure that you repositories match that configuration. If they don't exist
|
||||
yet, it will clone them. It will also make sure that all remotes are configured
|
||||
properly.
|
||||
|
||||
Let's try it out:
|
||||
|
||||
## Get the example configuration
|
||||
|
||||
```bash
|
||||
curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml
|
||||
```
|
||||
|
||||
Then, you're ready to run the first sync. This will clone all configured
|
||||
repositories and set up the remotes.
|
||||
|
||||
```bash
|
||||
$ grm repos sync config --config example.config.toml
|
||||
[⚙] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
|
||||
[✔] git-repo-manager: Repository successfully cloned
|
||||
[⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
|
||||
[✔] git-repo-manager: OK
|
||||
[⚙] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git"
|
||||
[✔] dotfiles: Repository successfully cloned
|
||||
[✔] dotfiles: OK
|
||||
```
|
||||
|
||||
If you run it again, it will report no changes:
|
||||
|
||||
```bash
|
||||
$ grm repos sync config -c example.config.toml
|
||||
[✔] git-repo-manager: OK
|
||||
[✔] dotfiles: OK
|
||||
```
|
||||
|
||||
### Generate your own configuration
|
||||
|
||||
Now, if you already have a few repositories, it would be quite laborious to
|
||||
write a configuration from scratch. Luckily, GRM has a way to generate a
|
||||
configuration from an existing file tree:
|
||||
|
||||
```bash
|
||||
grm repos find local ~/your/project/root > config.toml
|
||||
```
|
||||
|
||||
This will detect all repositories and remotes and write them to `config.toml`.
|
||||
|
||||
You can exclude repositories from the generated configuration by providing
|
||||
a regex that will be test against the path of each discovered repository:
|
||||
|
||||
```bash
|
||||
grm repos find local ~/your/project/root --exclude "^.*/subdir/match-(foo|bar)/.*$" > config.toml
|
||||
```
|
||||
|
||||
### Show the state of your projects
|
||||
|
||||
```bash
|
||||
$ grm repos status --config example.config.toml
|
||||
╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮
|
||||
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
|
||||
╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡
|
||||
│ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │
|
||||
│ ┆ ┆ ┆ <origin/master> ✔ ┆ ┆ origin │
|
||||
├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤
|
||||
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
|
||||
╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯
|
||||
```
|
||||
|
||||
You can also use `status` without `--config` to check the repository you're
|
||||
currently in:
|
||||
|
||||
```bash
|
||||
$ cd ~/example-projects/dotfiles
|
||||
$ grm repos status
|
||||
╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮
|
||||
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
|
||||
╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡
|
||||
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
|
||||
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
|
||||
```
|
||||
|
||||
## YAML
|
||||
|
||||
By default, the repo configuration uses TOML. If you prefer YAML, just give it a
|
||||
YAML file instead (file ending does not matter, `grm` will figure out the
|
||||
format). For generating a configuration, pass `--format yaml` to `grm repo
|
||||
find` which generates a YAML configuration instead of a TOML configuration.
|
||||
@@ -1,22 +1,23 @@
|
||||
# Overview
|
||||
|
||||
Welcome! This is the documentation for [Git Repo
|
||||
Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a
|
||||
tool that helps you manage git repositories.
|
||||
Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a tool
|
||||
that helps you manage git repositories in a declarative way.
|
||||
|
||||
GRM helps you manage git repositories in a declarative way. Configure your
|
||||
repositories in a TOML file, GRM does the rest. Take a look at [the example
|
||||
repositories in a TOML or YAML file, GRM does the rest. Take a look at [the
|
||||
example
|
||||
configuration](https://github.com/hakoerber/git-repo-manager/blob/master/example.config.toml)
|
||||
to get a feel for the way you configure your repositories. See the [repository
|
||||
tree chapter](./repos.md) for details.
|
||||
|
||||
GRM also provides some tooling to work with single git repositories using
|
||||
`git-worktree`. See [the worktree chapter](./worktree.md) for more details.
|
||||
`git-worktree`. See [the worktree chapter](./worktrees.md) for more details.
|
||||
|
||||
## Why use GRM?
|
||||
|
||||
If you're working with a lot of git repositories, GRM can help you to manage them
|
||||
in an easy way:
|
||||
If you're working with a lot of git repositories, GRM can help you to manage
|
||||
them in an easy way:
|
||||
|
||||
* You want to easily clone many repositories to a new machine.
|
||||
* You want to change remotes for multiple repositories (e.g. because your GitLab
|
||||
|
||||
27
docs/src/releases.md
Normal file
27
docs/src/releases.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Releases
|
||||
|
||||
To make a release, make sure you are on a clean `develop` branch, sync your
|
||||
remotes and then run `./release (major|minor|patch)`. It will handle a
|
||||
git-flow-y release, meaning that it will perform a merge from `develop` to
|
||||
`master`, create a git tag, sync all remotes and run `cargo publish`.
|
||||
|
||||
Make sure to run `just check` before releasing to make sure that nothing is
|
||||
broken.
|
||||
|
||||
As GRM is still `v0.x`, there is not much consideration for backwards
|
||||
compatibility. Generally, update the patch version for small stuff and the minor
|
||||
version for bigger / backwards incompatible changes.
|
||||
|
||||
Generally, it's good to regularly release a new patch release with [updated
|
||||
dependencies](./dependency_updates.md). As `./release.sh patch` is exposed as a
|
||||
Justfile target (`release-patch`), it's possible to do both in one step:
|
||||
|
||||
```bash
|
||||
$ just update-dependencies check release-patch
|
||||
```
|
||||
|
||||
## Release notes
|
||||
|
||||
There are currently no release notes. Things are changing quite quickly and
|
||||
there is simply no need for a record of changes (except the git history of
|
||||
course).
|
||||
@@ -1,76 +1,13 @@
|
||||
# Managing tree of git repositories
|
||||
# Managing Repositories
|
||||
|
||||
When managing multiple git repositories with GRM, you'll generally have a
|
||||
configuration file containing information about all the repos you have. GRM then
|
||||
makes sure that you repositories match that config. If they don't exist yet, it
|
||||
will clone them. It will also make sure that all remotes are configured properly.
|
||||
GRM helps you manage a bunch of git repositories easily. There are generally two
|
||||
ways to go about that:
|
||||
|
||||
Let's try it out:
|
||||
You can either manage a list of repositories in a TOML or YAML file, and use GRM
|
||||
to sync the configuration with the state of the repository.
|
||||
|
||||
## Get the example configuration
|
||||
|
||||
```bash
|
||||
$ curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml
|
||||
```
|
||||
|
||||
Then, you're ready to run the first sync. This will clone all configured repositories
|
||||
and set up the remotes.
|
||||
|
||||
```bash
|
||||
$ grm repos sync --config example.config.toml
|
||||
[⚙] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
|
||||
[✔] git-repo-manager: Repository successfully cloned
|
||||
[⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
|
||||
[✔] git-repo-manager: OK
|
||||
[⚙] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git"
|
||||
[✔] dotfiles: Repository successfully cloned
|
||||
[✔] dotfiles: OK
|
||||
```
|
||||
|
||||
If you run it again, it will report no changes:
|
||||
|
||||
```
|
||||
$ grm repos sync --config example.config.toml
|
||||
[✔] git-repo-manager: OK
|
||||
[✔] dotfiles: OK
|
||||
```
|
||||
|
||||
### Generate your own configuration
|
||||
|
||||
Now, if you already have a few repositories, it would be quite laborious to write
|
||||
a configuration from scratch. Luckily, GRM has a way to generate a configuration
|
||||
from an existing file tree:
|
||||
|
||||
```bash
|
||||
$ grm repos find ~/your/project/root > config.toml
|
||||
```
|
||||
|
||||
This will detect all repositories and remotes and write them to `config.toml`.
|
||||
|
||||
### Show the state of your projects
|
||||
|
||||
```bash
|
||||
$ grm repos status --config example.config.toml
|
||||
╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮
|
||||
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
|
||||
╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡
|
||||
│ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │
|
||||
│ ┆ ┆ ┆ <origin/master> ✔ ┆ ┆ origin │
|
||||
├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤
|
||||
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
|
||||
╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯
|
||||
```
|
||||
|
||||
You can also use `status` without `--config` to check the repository you're currently
|
||||
in:
|
||||
|
||||
```
|
||||
$ cd ~/example-projects/dotfiles
|
||||
$ grm repos status
|
||||
╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮
|
||||
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
|
||||
╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡
|
||||
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
|
||||
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
|
||||
```
|
||||
Or, you can pull repository information from a forge (e.g. GitHub, GitLab) and
|
||||
clone the repositories.
|
||||
|
||||
There are also hybrid modes where you pull information from a forge and create a
|
||||
configuration file that you can use later.
|
||||
|
||||
124
docs/src/testing.md
Normal file
124
docs/src/testing.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# Testing
|
||||
|
||||
There are two distinct test suites: One for unit test (`just test-unit`) and
|
||||
integration tests (`just test-integration`) that is part of the rust crate, and
|
||||
a separate e2e test suite in python (`just test-e2e`).
|
||||
|
||||
To run all tests, run `just test`.
|
||||
|
||||
When contributing, consider whether it makes sense to add tests which could
|
||||
prevent regressions in the future. When fixing bugs, it makes sense to add tests
|
||||
that expose the wrong behavior beforehand.
|
||||
|
||||
The unit and integration tests are very small and only test a few self-contained
|
||||
functions (like validation of certain input).
|
||||
|
||||
## E2E tests
|
||||
|
||||
The main focus of the testing setup lays on the e2e tests. Each user-facing
|
||||
behavior *should* have a corresponding e2e test. These are the most important
|
||||
tests, as they test functionality the user will use in the end.
|
||||
|
||||
The test suite is written in python and uses
|
||||
[pytest](https://docs.pytest.org/en/stable/). There are helper functions that
|
||||
set up temporary git repositories and remotes in a `tmpfs`.
|
||||
|
||||
Effectively, each tests works like this:
|
||||
|
||||
* Set up some prerequisites (e.g. different git repositories or configuration
|
||||
files)
|
||||
* Run `grm`
|
||||
* Check that everything is according to expected behavior (e.g. that `grm` had
|
||||
certain output and exit code, that the target repositories have certain
|
||||
branches, heads and remotes, ...)
|
||||
|
||||
As there are many different scenarios, the tests make heavy use of the
|
||||
[`@pytest.mark.parametrize`](https://docs.pytest.org/en/stable/how-to/parametrize.html#pytest-mark-parametrize)
|
||||
decorator to get all permutations of input parameters (e.g. whether a
|
||||
configuration exists, what a config value is set to, how the repository looks
|
||||
like, ...)
|
||||
|
||||
Whenever you write a new test, think about the different circumstances that can
|
||||
happen. What are the failure modes? What affects the behavior? Parametrize each
|
||||
of these behaviors.
|
||||
|
||||
### Optimization
|
||||
|
||||
Note: You will most likely not need to read this.
|
||||
|
||||
Each test parameter will exponentially increase the number of tests that will be
|
||||
run. As a general rule, comprehensiveness is more important than test suite
|
||||
runtime (so if in doubt, better to add another parameter to catch every edge
|
||||
case). But try to keep the total runtime sane. Currently, the whole `just test-e2e`
|
||||
target runs ~8'000 tests and takes around 5 minutes on my machine, exlucding
|
||||
binary and docker build time. I'd say that keeping it under 10 minutes is a good
|
||||
idea.
|
||||
|
||||
To optimize tests, look out for two patterns: Dependency and Orthogonality
|
||||
|
||||
#### Dependency
|
||||
|
||||
If a parameter depends on another one, it makes little sense to handle them
|
||||
independently. Example: You have a paramter that specifies whether a
|
||||
configuration is used, and another parameter that sets a certain value in that
|
||||
configuration file. It might look something like this:
|
||||
|
||||
```python
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
@pytest.mark.parametrize("use_value", ["0", "1"])
|
||||
def test(...):
|
||||
```
|
||||
|
||||
This leads to 4 tests being instantiated. But there is little point in setting a
|
||||
configuration value when no config is used, so the combinations `(False, "0")`
|
||||
and `(False, "1")` are redundant. To remedy this, spell out the optimized
|
||||
permutation manually:
|
||||
|
||||
```python
|
||||
@pytest.mark.parametrize("config", ((True, "0"), (True, "1"), (False, None)))
|
||||
def test(...):
|
||||
(use_config, use_value) = config
|
||||
```
|
||||
|
||||
This cuts down the number of tests by 25%. If you have more dependent parameters
|
||||
(e.g. additional configuration values), this gets even better. Generally, this
|
||||
will cut down the number of tests to
|
||||
|
||||
\\[ \frac{1}{o \cdot c} + \frac{1}{(o \cdot c) ^ {(n + 1)}} \\]
|
||||
|
||||
with \\( o \\) being the number of values of a parent parameters a parameter is
|
||||
dependent on, \\( c \\) being the cardinality of the test input (so you can
|
||||
assume \\( o = 1 \\) and \\( c = 2 \\) for boolean parameters), and \\( n \\)
|
||||
being the number of parameters that are optimized, i.e. folded into their
|
||||
dependent parameter.
|
||||
|
||||
As an example: Folding down two boolean parameters into one dependent parent
|
||||
boolean parameter will cut down the number of tests to 62.5%!
|
||||
|
||||
#### Orthogonality
|
||||
|
||||
If different test parameters are independent of each other, there is little
|
||||
point in testing their combinations. Instead, split them up into different test
|
||||
functions. For boolean parameters, this will cut the number of tests in half.
|
||||
|
||||
So instead of this:
|
||||
|
||||
```python
|
||||
@pytest.mark.parametrize("param1", [True, False])
|
||||
@pytest.mark.parametrize("param2", [True, False])
|
||||
def test(...):
|
||||
```
|
||||
|
||||
Rather do this:
|
||||
|
||||
```python
|
||||
@pytest.mark.parametrize("param1", [True, False])
|
||||
def test_param1(...):
|
||||
|
||||
@pytest.mark.parametrize("param2", [True, False])
|
||||
def test_param2(...):
|
||||
```
|
||||
|
||||
The tests are running in Docker via docker-compose. This is mainly needed to
|
||||
test networking functionality like GitLab integration, with the GitLab API being
|
||||
mocked by a simple flask container.
|
||||
183
docs/src/tutorial.md
Normal file
183
docs/src/tutorial.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Tutorial
|
||||
|
||||
Here, you'll find a quick overview over the most common functionality of GRM.
|
||||
|
||||
## Managing existing repositories
|
||||
|
||||
Let's say you have your git repositories at `~/code`. To start managing them via
|
||||
GRM, first create a configuration:
|
||||
|
||||
```bash
|
||||
grm repos find local ~/code --format yaml > ~/code/config.yml
|
||||
```
|
||||
|
||||
The result may look something like this:
|
||||
|
||||
```yaml
|
||||
---
|
||||
trees:
|
||||
- root: ~/code
|
||||
repos:
|
||||
- name: git-repo-manager
|
||||
worktree_setup: true
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "https://github.com/hakoerber/git-repo-manager.git"
|
||||
type: https
|
||||
```
|
||||
|
||||
To apply the configuration and check whether all repositories are in sync, run
|
||||
the following:
|
||||
|
||||
```bash
|
||||
$ grm repos sync config --config ~/code/config.yml
|
||||
[✔] git-repo-manager: OK
|
||||
```
|
||||
|
||||
Well, obiously there are no changes. To check how changes would be applied,
|
||||
let's change the name of the remote (currently `origin`):
|
||||
|
||||
```bash
|
||||
$ sed -i 's/name: origin/name: github/' ~/code/config.yml
|
||||
$ grm repos sync config --config ~/code/config.yml
|
||||
[⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
|
||||
[⚙] git-repo-manager: Deleting remote "origin"
|
||||
[✔] git-repo-manager: OK
|
||||
```
|
||||
|
||||
GRM replaced the `origin` remote with `github`.
|
||||
|
||||
The configuration (`~/code/config.yml` in this example) would usually be
|
||||
something you'd track in git or synchronize between machines via some other
|
||||
means. Then, on every machine, all your repositories are a single `grm repos
|
||||
sync` away!
|
||||
|
||||
## Getting repositories from a forge
|
||||
|
||||
Let's say you have a bunch of repositories on GitHub and you'd like to clone
|
||||
them all to your local machine.
|
||||
|
||||
To authenticate, you'll need to get a personal access token, as described in
|
||||
[the forge documentation](./forge_integration.md#github). Let's assume you put
|
||||
your token into `~/.github_token` (please don't if you're doing this "for
|
||||
real"!)
|
||||
|
||||
Let's first see what kind of repos we can find:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml
|
||||
---
|
||||
trees: []
|
||||
$
|
||||
```
|
||||
|
||||
Ummm, ok? No repos? This is because you have to *tell* GRM what to look for (if
|
||||
you don't, GRM will just relax, as it's lazy).
|
||||
|
||||
There are different filters (see [the forge
|
||||
documentation](./forge_integration.md#filters) for more info). In our case,
|
||||
we'll just use the `--owner` filter to get all repos that belong to us:
|
||||
|
||||
```bash
|
||||
$ grm repos find remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml
|
||||
---
|
||||
trees:
|
||||
- root: ~/code/github.com
|
||||
repos:
|
||||
- name: git-repo-manager
|
||||
worktree_setup: false
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "https://github.com/hakoerber/git-repo-manager.git"
|
||||
type: https
|
||||
```
|
||||
|
||||
Nice! The format is the same as we got from `grm repos find local` above. So if
|
||||
we wanted, we could save this file and use it with `grm repos sync config` as
|
||||
above. But there is an even easier way: We can directly clone the repositories!
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/
|
||||
[⚙] Cloning into "~/code/github.com/git-repo-manager" from "https://github.com/hakoerber/git-repo-manager.git"
|
||||
[✔] git-repo-manager: Repository successfully cloned
|
||||
[✔] git-repo-manager: OK
|
||||
```
|
||||
|
||||
Nice! Just to make sure, let's run the same command again:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/
|
||||
[✔] git-repo-manager: OK
|
||||
```
|
||||
|
||||
GRM saw that the repository is already there and did nothing (remember, it's
|
||||
lazy).
|
||||
|
||||
## Using worktrees
|
||||
|
||||
Worktrees are something that make it easier to work with multiple branches at
|
||||
the same time in a repository. Let's say we wanted to hack on the codebase of
|
||||
GRM:
|
||||
|
||||
```bash
|
||||
$ cd ~/code/github.com/git-repo-manager
|
||||
$ ls
|
||||
.gitignore
|
||||
Cargo.toml
|
||||
...
|
||||
```
|
||||
|
||||
Well, this is just a normal git repository. But let's try worktrees! First, we
|
||||
have to convert the existing repository to use the special worktree setup. For
|
||||
all worktree operations, we will use `grm worktree` (or `grm wt` for short):
|
||||
|
||||
```bash
|
||||
$ grm wt convert
|
||||
[✔] Conversion done
|
||||
$ ls
|
||||
$
|
||||
```
|
||||
|
||||
So, the code is gone? Not really, there is just no active worktree right now. So
|
||||
let's add one for `master`:
|
||||
|
||||
|
||||
```bash
|
||||
$ grm wt add master --track origin/master
|
||||
[✔] Conversion done
|
||||
$ ls
|
||||
master
|
||||
$ (cd ./master && git status)
|
||||
On branch master
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
|
||||
Now, a single worktree is kind of pointless (if we only have one, we could also
|
||||
just use the normal setup, without worktrees). So let's another one for
|
||||
`develop`:
|
||||
|
||||
```bash
|
||||
$ grm wt add develop --track origin/develop
|
||||
[✔] Conversion done
|
||||
$ ls
|
||||
develop
|
||||
master
|
||||
$ (cd ./develop && git status)
|
||||
On branch develop
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
|
||||
What's the point? The cool thing is that we can now start working in the
|
||||
`develop` worktree, without affecting the `master` worktree at all. If you're
|
||||
working on `develop` and want to quickly see what a certain file looks like in
|
||||
`master`, just look inside `./master`, it's all there!
|
||||
|
||||
This becomes especially interesting when you have many feature branches and are
|
||||
working on multiple features at the same time.
|
||||
|
||||
There are a lot of options that influence how worktrees are handled. Maybe you
|
||||
want to automatically track `origin/master` when you add a worktree called
|
||||
`master`? Maybe you want your feature branches to have a prefix, so when you're
|
||||
working on the `feature1` worktree, the remote branch will be
|
||||
`origin/awesomefeatures/feature1`? Check out [the chapter on
|
||||
worktrees](./worktrees.md) for all the things that are possible.
|
||||
32
docs/src/worktree_behavior.md
Normal file
32
docs/src/worktree_behavior.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Behavior Details
|
||||
|
||||
When working with worktrees and GRM, there is a lot going on under the hood.
|
||||
Each time you create a new worktree, GRM has to figure out what commit to set
|
||||
your new branch to and how to configure any potential remote branches.
|
||||
|
||||
To state again, the most important guideline is the following:
|
||||
|
||||
**The branch inside the worktree is always the same as the directory name of the
|
||||
worktree.**
|
||||
|
||||
The second set of guidelines relates to the commit to check out, and the remote
|
||||
branches to use:
|
||||
|
||||
* When a branch already exists, you will get a worktree for that branch
|
||||
* Existing local branches are never changed
|
||||
* Only do remote operations if specifically requested (via configuration file or
|
||||
command line parameters)
|
||||
* When you specify `--track`, you will get that exact branch as the tracking
|
||||
branch
|
||||
* When you specify `--no-track`, you will get no tracking branch
|
||||
|
||||
Apart from that, GRM tries to do The Right Thing<sup>TM</sup>. It should be as
|
||||
little surprising as possible.
|
||||
|
||||
In 99% of the cases, you will not have to care about the details, as the normal
|
||||
workflows are covered by the rules above. In case you want to know the exact
|
||||
behavior "specification", take a look at the [module documentation for
|
||||
`grm::worktree`](https://docs.rs/git-repo-manager/latest/grm/worktree/index.html).
|
||||
|
||||
If you think existing behavior is super-duper confusing and you have a better
|
||||
idea, do not hesitate to open a GitHub issue to discuss this!
|
||||
75
docs/src/worktree_remotes.md
Normal file
75
docs/src/worktree_remotes.md
Normal file
@@ -0,0 +1,75 @@
|
||||
# Worktrees and Remotes
|
||||
|
||||
To fetch all remote references from all remotes in a worktree setup, you can use
|
||||
the following command:
|
||||
|
||||
```
|
||||
$ grm wt fetch
|
||||
[✔] Fetched from all remotes
|
||||
```
|
||||
|
||||
This is equivalent to running `git fetch --all` in any of the worktrees.
|
||||
|
||||
Often, you may want to pull all remote changes into your worktrees. For this,
|
||||
use the `git pull` equivalent:
|
||||
|
||||
```
|
||||
$ grm wt pull
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
This will refuse when there are local changes, or if the branch cannot be fast
|
||||
forwarded. If you want to rebase your local branches, use the `--rebase` switch:
|
||||
|
||||
```
|
||||
$ grm wt pull --rebase
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
As noted, this will fail if there are any local changes in your worktree. If you
|
||||
want to stash these changes automatically before the pull (and unstash them
|
||||
afterwards), use the `--stash` option.
|
||||
|
||||
This will rebase your changes onto the upstream branch. This is mainly helpful
|
||||
for persistent branches that change on the remote side.
|
||||
|
||||
There is a similar rebase feature that rebases onto the **default** branch
|
||||
instead:
|
||||
|
||||
```
|
||||
$ grm wt rebase
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
This is super helpful for feature branches. If you want to incorporate changes
|
||||
made on the remote branches, use `grm wt rebase` and all your branches will be
|
||||
up to date. If you want to also update to remote tracking branches in one go,
|
||||
use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
|
||||
on non-fast-forwards:
|
||||
|
||||
```
|
||||
$ grm wt rebase --pull --rebase
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
"So, what's the difference between `pull --rebase` and `rebase --pull`? Why the
|
||||
hell is there a `--rebase` flag in the `rebase` command?"
|
||||
|
||||
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree to
|
||||
their remote branch, if possible. `rebase` rebases onto the **default** branch
|
||||
instead. The switches to `rebase` are just convenience, so you do not have to
|
||||
run two commands.
|
||||
|
||||
* `rebase --pull` is the same as `pull` && `rebase`
|
||||
* `rebase --pull --rebase` is the same as `pull --rebase` && `rebase`
|
||||
|
||||
I understand that the UX is not the most intuitive. If you can think of an
|
||||
improvement, please let me know (e.g. via an GitHub issue)!
|
||||
|
||||
As with `pull`, `rebase` will also refuse to run when there are changes in your
|
||||
worktree. And you can also use the `--stash` option to stash/unstash changes
|
||||
automatically.
|
||||
173
docs/src/worktree_working.md
Normal file
173
docs/src/worktree_working.md
Normal file
@@ -0,0 +1,173 @@
|
||||
# Working with Worktrees
|
||||
|
||||
## Creating a new worktree
|
||||
|
||||
To actually work, you'll first have to create a new worktree checkout. All
|
||||
worktree-related commands are available as subcommands of `grm worktree` (or
|
||||
`grm wt` for short):
|
||||
|
||||
```
|
||||
$ grm wt add mybranch
|
||||
[✔] Worktree mybranch created
|
||||
```
|
||||
|
||||
You'll see that there is now a directory called `mybranch` that contains a
|
||||
checkout of your repository, using the branch `mybranch`
|
||||
|
||||
```bash
|
||||
$ cd ./mybranch && git status
|
||||
On branch mybranch
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
|
||||
You can work in this repository as usual. Make changes, commit them, revert
|
||||
them, whatever you're up to :)
|
||||
|
||||
Just note that you *should* not change the branch inside the worktree directory.
|
||||
There is nothing preventing you from doing so, but you will notice that you'll
|
||||
run into problems when trying to remove a worktree (more on that later). It may
|
||||
also lead to confusing behavior, as there can be no two worktrees that have the
|
||||
same branch checked out. So if you decide to use the worktree setup, go all in,
|
||||
let `grm` manage your branches and bury `git branch` (and `git checkout -b`).
|
||||
|
||||
You will notice that there is no tracking branch set up for the new branch. You
|
||||
can of course set up one manually after creating the worktree, but there is an
|
||||
easier way, using the `--track` flag during creation. Let's create another
|
||||
worktree. Go back to the root of the repository, and run:
|
||||
|
||||
```bash
|
||||
$ grm wt add mybranch2 --track origin/mybranch2
|
||||
[✔] Worktree mybranch2 created
|
||||
```
|
||||
|
||||
You'll see that this branch is now tracking `mybranch` on the `origin` remote:
|
||||
|
||||
```bash
|
||||
$ cd ./mybranch2 && git status
|
||||
On branch mybranch
|
||||
|
||||
Your branch is up to date with 'origin/mybranch2'.
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
|
||||
The behavior of `--track` differs depending on the existence of the remote
|
||||
branch:
|
||||
|
||||
* If the remote branch already exists, `grm` uses it as the base of the new
|
||||
local branch.
|
||||
* If the remote branch does not exist (as in our example), `grm` will create a
|
||||
new remote tracking branch, using the default branch (either `main` or
|
||||
`master`) as the base
|
||||
|
||||
Often, you'll have a workflow that uses tracking branches by default. It would
|
||||
be quite tedious to add `--track` every single time. Luckily, the `grm.toml`
|
||||
file supports defaults for the tracking behavior. See this for an example:
|
||||
|
||||
```toml
|
||||
[track]
|
||||
default = true
|
||||
default_remote = "origin"
|
||||
```
|
||||
|
||||
This will set up a tracking branch on `origin` that has the same name as the
|
||||
local branch.
|
||||
|
||||
Sometimes, you might want to have a certain prefix for all your tracking
|
||||
branches. Maybe to prevent collisions with other contributors. You can simply
|
||||
set `default_remote_prefix` in `grm.toml`:
|
||||
|
||||
```toml
|
||||
[track]
|
||||
default = true
|
||||
default_remote = "origin"
|
||||
default_remote_prefix = "myname"
|
||||
```
|
||||
|
||||
When using branch `my-feature-branch`, the remote tracking branch would be
|
||||
`origin/myname/my-feature-branch` in this case.
|
||||
|
||||
Note that `--track` overrides any configuration in `grm.toml`. If you want to
|
||||
disable tracking, use `--no-track`.
|
||||
|
||||
## Showing the status of your worktrees
|
||||
|
||||
There is a handy little command that will show your an overview over all
|
||||
worktrees in a repository, including their status (i.e. changes files). Just run
|
||||
the following in the root of your repository:
|
||||
|
||||
```
|
||||
$ grm wt status
|
||||
╭───────────┬────────┬──────────┬──────────────────╮
|
||||
│ Worktree ┆ Status ┆ Branch ┆ Remote branch │
|
||||
╞═══════════╪════════╪══════════╪══════════════════╡
|
||||
│ mybranch ┆ ✔ ┆ mybranch ┆ │
|
||||
│ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │
|
||||
╰───────────┴────────┴──────────┴──────────────────╯
|
||||
```
|
||||
|
||||
The "Status" column would show any uncommitted changes (new / modified / deleted
|
||||
files) and the "Remote branch" would show differences to the remote branch (e.g.
|
||||
if there are new pushes to the remote branch that are not yet incorporated into
|
||||
your local branch).
|
||||
|
||||
|
||||
## Deleting worktrees
|
||||
|
||||
If you're done with your worktrees, use `grm wt delete` to delete them. Let's
|
||||
start with `mybranch2`:
|
||||
|
||||
```
|
||||
$ grm wt delete mybranch2
|
||||
[✔] Worktree mybranch2 deleted
|
||||
```
|
||||
|
||||
Easy. On to `mybranch`:
|
||||
|
||||
```
|
||||
$ grm wt delete mybranch
|
||||
[!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete
|
||||
```
|
||||
|
||||
Hmmm. `grm` tells you:
|
||||
|
||||
"Hey, there is no remote branch that you could have pushed your changes to. I'd
|
||||
rather not delete work that you cannot recover."
|
||||
|
||||
Note that `grm` is very cautious here. As your repository will not be deleted,
|
||||
you could still recover the commits via
|
||||
[`git-reflog`](https://git-scm.com/docs/git-reflog). But better safe than
|
||||
sorry! Note that you'd get a similar error message if your worktree had any
|
||||
uncommitted files, for the same reason. Now you can either commit & push your
|
||||
changes, or your tell `grm` that you know what you're doing:
|
||||
|
||||
```
|
||||
$ grm wt delete mybranch --force
|
||||
[✔] Worktree mybranch deleted
|
||||
```
|
||||
|
||||
If you just want to delete all worktrees that do not contain any changes, you
|
||||
can also use the following:
|
||||
|
||||
```
|
||||
$ grm wt clean
|
||||
```
|
||||
|
||||
Note that this will not delete the default branch of the repository. It can of
|
||||
course still be delete with `grm wt delete` if necessary.
|
||||
|
||||
### Converting an existing repository
|
||||
|
||||
It is possible to convert an existing directory to a worktree setup, using `grm
|
||||
wt convert`. This command has to be run in the root of the repository you want
|
||||
to convert:
|
||||
|
||||
```
|
||||
$ grm wt convert
|
||||
[✔] Conversion successful
|
||||
```
|
||||
|
||||
This command will refuse to run if you have any changes in your repository.
|
||||
Commit them and try again!
|
||||
|
||||
Afterwards, the directory is empty, as there are no worktrees checked out yet.
|
||||
Now you can use the usual commands to set up worktrees.
|
||||
@@ -1,58 +1,60 @@
|
||||
# Git Worktrees
|
||||
|
||||
## Why?
|
||||
The default workflow when using git is having your repository in a single
|
||||
directory. Then, you can check out a certain reference (usually a branch),
|
||||
which will update the files in the directory to match the state of that
|
||||
reference. Most of the time, this is exactly what you need and works perfectly.
|
||||
But especially when you're working with branches a lot, you may notice that
|
||||
there is a lot of work required to make everything run smoothly.
|
||||
|
||||
The default workflow when using git is having your repository in a single directory.
|
||||
Then, you can check out a certain reference (usually a branch), which will update
|
||||
the files in the directory to match the state of that reference. Most of the time,
|
||||
this is exactly what you need and works perfectly. But especially when you're using
|
||||
with branches a lot, you may notice that there is a lot of work required to make
|
||||
everything run smootly.
|
||||
|
||||
Maybe you experienced the following: You're working on a feature branch. Then,
|
||||
for some reason, you have to change branches (maybe to investigate some issue).
|
||||
But you get the following:
|
||||
Maybe you have experienced the following: You're working on a feature branch.
|
||||
Then, for some reason, you have to change branches (maybe to investigate some
|
||||
issue). But you get the following:
|
||||
|
||||
```
|
||||
error: Your local changes to the following files would be overwritten by checkout
|
||||
```
|
||||
|
||||
Now you can create a temporary commit or stash your changes. In any case, you have
|
||||
some mental overhead before you can work on something else. Especially with stashes,
|
||||
you'll have to remember to do a `git stash pop` before resuming your work (I
|
||||
cannot count the number of times where is "rediscovered" some code hidden in some
|
||||
old stash I forgot about.
|
||||
Now you can create a temporary commit or stash your changes. In any case, you
|
||||
have some mental overhead before you can work on something else. Especially with
|
||||
stashes, you'll have to remember to do a `git stash pop` before resuming your
|
||||
work (I cannot count the number of times where I "rediscovered" some code hidden
|
||||
in some old stash I forgot about). Also, conflicts on a `git stash pop` are just
|
||||
horrible.
|
||||
|
||||
And even worse: If you're currently in the process of resolving merge conflicts or an
|
||||
interactive rebase, there is just no way to "pause" this work to check out a
|
||||
different branch.
|
||||
And even worse: If you're currently in the process of resolving merge conflicts
|
||||
or an interactive rebase, there is just no way to "pause" this work to check out
|
||||
a different branch.
|
||||
|
||||
Sometimes, it's crucial to have an unchanging state of your repository until some
|
||||
long-running process finishes. I'm thinking of Ansible and Terraform runs. I'd
|
||||
rather not change to a different branch while ansible or Terraform are running as
|
||||
I have no idea how those tools would behave (and I'm not too eager to find out).
|
||||
Sometimes, it's crucial to have an unchanging state of your repository until
|
||||
some long-running process finishes. I'm thinking of Ansible and Terraform runs.
|
||||
I'd rather not change to a different branch while ansible or Terraform are
|
||||
running as I have no idea how those tools would behave (and I'm not too eager to
|
||||
find out).
|
||||
|
||||
In any case, Git Worktrees are here for the rescue:
|
||||
|
||||
## What are git worktrees?
|
||||
|
||||
[Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have multiple
|
||||
independent checkouts of your repository on different directories. You can have
|
||||
multiple directories that correspond to different references in your repository.
|
||||
Each worktree has it's independent working tree (duh) and index, so there is no
|
||||
to run into conflicts. Changing to a different branch is just a `cd` away (if
|
||||
the worktree is already set up).
|
||||
[Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have
|
||||
multiple independent checkouts of your repository on different directories. You
|
||||
can have multiple directories that correspond to different references in your
|
||||
repository. Each worktree has it's independent working tree (duh) and index, so
|
||||
there is no way to run into conflicts. Changing to a different branch is just a
|
||||
`cd` away (if the worktree is already set up).
|
||||
|
||||
## Worktrees in GRM
|
||||
|
||||
GRM exposes an opinionated way to use worktrees in your repositories. Opinionated,
|
||||
because there is a single invariant that makes reasoning about your worktree
|
||||
setup quite easy:
|
||||
GRM exposes an opinionated way to use worktrees in your repositories.
|
||||
Opinionated, because there is a single invariant that makes reasoning about your
|
||||
worktree setup quite easy:
|
||||
|
||||
**The branch inside the worktree is always the same as the directory name of the worktree.**
|
||||
**The branch inside the worktree is always the same as the directory name of the
|
||||
worktree.**
|
||||
|
||||
In other words: If you're checking out branch `mybranch` into a new worktree, the
|
||||
worktree directory will be named `mybranch`.
|
||||
In other words: If you're checking out branch `mybranch` into a new worktree,
|
||||
the worktree directory will be named `mybranch`.
|
||||
|
||||
GRM can be used with both "normal" and worktree-enabled repositories. But note
|
||||
that a single repository can be either the former or the latter. You'll have to
|
||||
@@ -67,161 +69,27 @@ name = "git-repo-manager"
|
||||
worktree_setup = true
|
||||
```
|
||||
|
||||
Now, when you run a `grm sync`, you'll notice that the directory of the repository
|
||||
is empty! Well, not totally, there is a hidden directory called `.git-main-working-tree`.
|
||||
This is where the repository actually "lives" (it's a bare checkout).
|
||||
Now, when you run a `grm sync`, you'll notice that the directory of the
|
||||
repository is empty! Well, not totally, there is a hidden directory called
|
||||
`.git-main-working-tree`. This is where the repository actually "lives" (it's a
|
||||
bare checkout).
|
||||
|
||||
### Creating a new worktree
|
||||
|
||||
To actually work, you'll first have to create a new worktree checkout. All
|
||||
worktree-related commands are available as subcommands of `grm worktree` (or
|
||||
`grm wt` for short):
|
||||
|
||||
```
|
||||
$ grm wt add mybranch
|
||||
[✔] Worktree mybranch created
|
||||
```
|
||||
|
||||
You'll see that there is now a directory called `mybranch` that contains a checkout
|
||||
of your repository, using the branch `mybranch`
|
||||
|
||||
```bash
|
||||
$ cd ./mybranch && git status
|
||||
On branch mybranch
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
|
||||
You can work in this repository as usual. Make changes, commit them, revert them,
|
||||
whatever you're up to :)
|
||||
|
||||
Just note that you *should* not change the branch inside the worktree
|
||||
directory. There is nothing preventing you from doing so, but you will notice
|
||||
that you'll run into problems when trying to remove a worktree (more on that
|
||||
later). It may also lead to confusing behaviour, as there can be no two
|
||||
worktrees that have the same branch checked out. So if you decide to use the
|
||||
worktree setup, go all in, let `grm` manage your branches and bury `git branch`
|
||||
(and `git checkout -b`).
|
||||
|
||||
You will notice that there is no tracking branch set up for the new branch. You
|
||||
can of course set up one manually after creating the worktree, but there is an
|
||||
easier way, using the `--track` flag during creation. Let's create another
|
||||
worktree. Go back to the root of the repository, and run:
|
||||
|
||||
```bash
|
||||
$ grm wt add mybranch2 --track origin/mybranch2
|
||||
[✔] Worktree mybranch2 created
|
||||
```
|
||||
|
||||
You'll see that this branch is now tracking `mybranch` on the `origin` remote:
|
||||
|
||||
```bash
|
||||
$ cd ./mybranch2 && git status
|
||||
On branch mybranch
|
||||
|
||||
Your branch is up to date with 'origin/mybranch2'.
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
|
||||
The behaviour of `--track` differs depending on the existence of the remote branch:
|
||||
|
||||
* If the remote branch already exists, `grm` uses it as the base of the new
|
||||
local branch.
|
||||
* If the remote branch does not exist (as in our example), `grm` will create a
|
||||
new remote tracking branch, using the default branch (either `main` or `master`)
|
||||
as the base
|
||||
|
||||
### Showing the status of your worktrees
|
||||
|
||||
There is a handy little command that will show your an overview over all worktrees
|
||||
in a repository, including their status (i.e. changes files). Just run the following
|
||||
in the root of your repository:
|
||||
|
||||
```
|
||||
$ grm wt status
|
||||
╭───────────┬────────┬──────────┬──────────────────╮
|
||||
│ Worktree ┆ Status ┆ Branch ┆ Remote branch │
|
||||
╞═══════════╪════════╪══════════╪══════════════════╡
|
||||
│ mybranch ┆ ✔ ┆ mybranch ┆ │
|
||||
│ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │
|
||||
╰───────────┴────────┴──────────┴──────────────────╯
|
||||
```
|
||||
|
||||
The "Status" column would show any uncommitted changes (new / modified / deleted
|
||||
files) and the "Remote branch" would show differences to the remote branch (e.g.
|
||||
if there are new pushes to the remote branch that are not yet incorporated into
|
||||
your local branch).
|
||||
Note that there are few specific things you can configure for a certain
|
||||
workspace. This is all done in an optional `grm.toml` file right in the root of
|
||||
the worktree. More on that later.
|
||||
|
||||
|
||||
### Deleting worktrees
|
||||
## Manual access
|
||||
|
||||
If you're done with your worktrees, use `grm wt delete` to delete them. Let's
|
||||
start with `mybranch2`:
|
||||
|
||||
```
|
||||
$ grm wt delete mybranch2
|
||||
[✔] Worktree mybranch2 deleted
|
||||
```
|
||||
|
||||
Easy. On to `mybranch`:
|
||||
|
||||
```
|
||||
$ grm wt delete mybranch
|
||||
[!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete
|
||||
```
|
||||
|
||||
Hmmm. `grm` tells you:
|
||||
|
||||
"Hey, there is no remote branch that you could have pushed
|
||||
your changes to. I'd rather not delete work that you cannot recover."
|
||||
|
||||
Note that `grm` is very cautious here. As your repository will not be deleted,
|
||||
you could still recover the commits via [`git-reflog`](https://git-scm.com/docs/git-reflog).
|
||||
But better safe then sorry! Note that you'd get a similar error message if your
|
||||
worktree had any uncommitted files, for the same reason. Now you can either
|
||||
commit & push your changes, or your tell `grm` that you know what you're doing:
|
||||
|
||||
```
|
||||
$ grm wt delete mybranch --force
|
||||
[✔] Worktree mybranch deleted
|
||||
```
|
||||
|
||||
If you just want to delete all worktrees that do not contain any changes, you
|
||||
can also use the following:
|
||||
|
||||
```
|
||||
$ grm wt clean
|
||||
```
|
||||
|
||||
Note that this will not delete the default branch of the repository. It can of
|
||||
course still be delete with `grm wt delete` if neccessary.
|
||||
|
||||
### Converting an existing repository
|
||||
|
||||
It is possible to convert an existing directory to a worktree setup, using `grm
|
||||
wt convert`. This command has to be run in the root of the repository you want
|
||||
to convert:
|
||||
|
||||
```
|
||||
grm wt convert
|
||||
[✔] Conversion successful
|
||||
```
|
||||
|
||||
This command will refuse to run if you have any changes in your repository.
|
||||
Commit them and try again!
|
||||
|
||||
Afterwards, the directory is empty, as there are no worktrees checked out yet.
|
||||
Now you can use the usual commands to set up worktrees.
|
||||
|
||||
### Manual access
|
||||
|
||||
GRM isn't doing any magic, it's just git under the hood. If you need to have access
|
||||
to the underlying git repository, you can always do this:
|
||||
GRM isn't doing any magic, it's just git under the hood. If you need to have
|
||||
access to the underlying git repository, you can always do this:
|
||||
|
||||
```
|
||||
$ git --git-dir ./.git-main-working-tree [...]
|
||||
```
|
||||
|
||||
This should never be required (whenever you have to do this, you can consider
|
||||
this a bug in GRM and open an [issue](https://github.com/hakoerber/git-repo-manager/issues/new),
|
||||
but it may help in a pinch.
|
||||
this a bug in GRM and open an
|
||||
[issue](https://github.com/hakoerber/git-repo-manager/issues/new), but it may
|
||||
help in a pinch.
|
||||
|
||||
|
||||
1
e2e_tests/.gitignore
vendored
Normal file
1
e2e_tests/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/__pycache__/
|
||||
12
e2e_tests/conftest.py
Normal file
12
e2e_tests/conftest.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import os
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
os.environ["GIT_AUTHOR_NAME"] = "Example user"
|
||||
os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com"
|
||||
os.environ["GIT_COMMITTER_NAME"] = "Example user"
|
||||
os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com"
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
pass
|
||||
32
e2e_tests/docker-compose.yml
Normal file
32
e2e_tests/docker-compose.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
pytest:
|
||||
build: ./docker
|
||||
volumes:
|
||||
- type: bind
|
||||
source: ./
|
||||
target: /tests
|
||||
read_only: true
|
||||
- type: tmpfs
|
||||
target: /tmp
|
||||
environment:
|
||||
TMPDIR: /tmp
|
||||
depends_on:
|
||||
- rest
|
||||
command:
|
||||
- "true"
|
||||
networks:
|
||||
main:
|
||||
|
||||
rest:
|
||||
build: ./docker-rest/
|
||||
expose:
|
||||
- "5000"
|
||||
networks:
|
||||
main:
|
||||
aliases:
|
||||
- alternate-rest
|
||||
|
||||
networks:
|
||||
main:
|
||||
19
e2e_tests/docker-rest/Dockerfile
Normal file
19
e2e_tests/docker-rest/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM docker.io/debian:11.3
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV FLASK_APP=app.py
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
dumb-init \
|
||||
python3-flask \
|
||||
python3-jinja2 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
COPY flask .
|
||||
|
||||
CMD ["/usr/bin/dumb-init", "--", "flask", "run", "--port", "5000", "--host", "0.0.0.0"]
|
||||
7
e2e_tests/docker-rest/flask/app.py
Normal file
7
e2e_tests/docker-rest/flask/app.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
app.url_map.strict_slashes = False
|
||||
|
||||
import github # noqa: E402,F401
|
||||
import gitlab # noqa: E402,F401
|
||||
101
e2e_tests/docker-rest/flask/github.py
Normal file
101
e2e_tests/docker-rest/flask/github.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import os.path
|
||||
|
||||
import jinja2
|
||||
from app import app
|
||||
from flask import abort, jsonify, make_response, request
|
||||
|
||||
|
||||
def check_headers():
|
||||
if request.headers.get("accept") != "application/vnd.github.v3+json":
|
||||
app.logger.error("Invalid accept header")
|
||||
abort(500)
|
||||
auth_header = request.headers.get("authorization")
|
||||
if auth_header != "token secret-token:myauthtoken":
|
||||
app.logger.error("Invalid authorization header: %s", auth_header)
|
||||
abort(
|
||||
make_response(
|
||||
jsonify(
|
||||
{
|
||||
"message": "Bad credentials",
|
||||
"documentation_url": "https://docs.example.com/rest",
|
||||
}
|
||||
),
|
||||
401,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def add_pagination(response, page, last_page):
|
||||
host = request.headers["host"]
|
||||
link_header = ""
|
||||
|
||||
def args(page):
|
||||
args = request.args.copy()
|
||||
args["page"] = page
|
||||
return "&".join([f"{k}={v}" for k, v in args.items()])
|
||||
|
||||
if page < last_page:
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
|
||||
)
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
|
||||
)
|
||||
response.headers["link"] = link_header
|
||||
|
||||
|
||||
def read_project_files(namespaces=[]):
|
||||
last_page = 4
|
||||
page = int(request.args.get("page", "1"))
|
||||
response_file = f"./github_api_page_{page}.json.j2"
|
||||
if not os.path.exists(response_file):
|
||||
return jsonify([])
|
||||
|
||||
response = make_response(
|
||||
jinja2.Template(open(response_file).read()).render(
|
||||
namespace=namespaces[page - 1]
|
||||
)
|
||||
)
|
||||
add_pagination(response, page, last_page)
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
|
||||
|
||||
def single_namespaced_projects(namespace):
|
||||
return read_project_files([namespace] * 4)
|
||||
|
||||
|
||||
def mixed_projects(namespaces):
|
||||
return read_project_files(namespaces)
|
||||
|
||||
|
||||
@app.route("/github/users/<string:user>/repos/")
|
||||
def github_user_repos(user):
|
||||
check_headers()
|
||||
if user == "myuser1":
|
||||
return single_namespaced_projects("myuser1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/github/orgs/<string:group>/repos/")
|
||||
def github_group_repos(group):
|
||||
check_headers()
|
||||
if not (request.args.get("type") == "all"):
|
||||
abort(500, "wrong arguments")
|
||||
if group == "mygroup1":
|
||||
return single_namespaced_projects("mygroup1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/github/user/repos/")
|
||||
def github_own_repos():
|
||||
check_headers()
|
||||
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
|
||||
|
||||
|
||||
@app.route("/github/user/")
|
||||
def github_user():
|
||||
check_headers()
|
||||
response = make_response(open("./github_api_user.json").read())
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
228
e2e_tests/docker-rest/flask/github_api_page_1.json.j2
Normal file
228
e2e_tests/docker-rest/flask/github_api_page_1.json.j2
Normal file
@@ -0,0 +1,228 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject1",
|
||||
"full_name": "{{ namespace }}/myproject1",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject1",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject1",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject1/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject1/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject1/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject1/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject1/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject1/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject1/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject1/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject1/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject1/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject1/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject1/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject1/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject1/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject1/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject1.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject1.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject1.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject1",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject2",
|
||||
"full_name": "{{ namespace }}/myproject2",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject2",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject2",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject2/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject2/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject2/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject2/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject2/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject2/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject2/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject2/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject2/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject2/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject2/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject2/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject2/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject2/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject2/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject2.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject2.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject2.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject2",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
115
e2e_tests/docker-rest/flask/github_api_page_2.json.j2
Normal file
115
e2e_tests/docker-rest/flask/github_api_page_2.json.j2
Normal file
@@ -0,0 +1,115 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject3",
|
||||
"full_name": "{{ namespace }}/myproject3",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject3",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject3",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject3/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject3/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject3/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject3/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject3/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject3/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject3/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject3/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject3/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject3/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject3/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject3/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject3/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject3/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject3/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject3.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject3.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject3.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject3",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
115
e2e_tests/docker-rest/flask/github_api_page_3.json.j2
Normal file
115
e2e_tests/docker-rest/flask/github_api_page_3.json.j2
Normal file
@@ -0,0 +1,115 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject4",
|
||||
"full_name": "{{ namespace }}/myproject4",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject4",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject4",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject4/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject4/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject4/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject4/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject4/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject4/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject4/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject4/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject4/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject4/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject4/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject4/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject4/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject4/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject4/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject4.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject4.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject4.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject4",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
115
e2e_tests/docker-rest/flask/github_api_page_4.json.j2
Normal file
115
e2e_tests/docker-rest/flask/github_api_page_4.json.j2
Normal file
@@ -0,0 +1,115 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject5",
|
||||
"full_name": "{{ namespace }}/myproject5",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject5",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject5",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject5/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject5/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject5/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject5/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject5/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject5/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject5/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject5/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject5/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject5/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject5/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject5/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject5/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject5/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject5/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject5.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject5.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject5.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject5",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
46
e2e_tests/docker-rest/flask/github_api_user.json
Normal file
46
e2e_tests/docker-rest/flask/github_api_user.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"login": "myuser1",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjE=",
|
||||
"avatar_url": "https://example.com/images/error/octocat_happy.gif",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/octocat",
|
||||
"html_url": "https://example.com/octocat",
|
||||
"followers_url": "https://api.example.com/users/octocat/followers",
|
||||
"following_url": "https://api.example.com/users/octocat/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/octocat/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/octocat/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/octocat/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/octocat/orgs",
|
||||
"repos_url": "https://api.example.com/users/octocat/repos",
|
||||
"events_url": "https://api.example.com/users/octocat/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/octocat/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false,
|
||||
"name": "monalisa octocat",
|
||||
"company": "GitHub",
|
||||
"blog": "https://example.com/blog",
|
||||
"location": "San Francisco",
|
||||
"email": "octocat@example.com",
|
||||
"hireable": false,
|
||||
"bio": "There once was...",
|
||||
"twitter_username": "monatheoctocat",
|
||||
"public_repos": 2,
|
||||
"public_gists": 1,
|
||||
"followers": 20,
|
||||
"following": 0,
|
||||
"created_at": "2008-01-14T04:33:35Z",
|
||||
"updated_at": "2008-01-14T04:33:35Z",
|
||||
"private_gists": 81,
|
||||
"total_private_repos": 100,
|
||||
"owned_private_repos": 100,
|
||||
"disk_usage": 10000,
|
||||
"collaborators": 8,
|
||||
"two_factor_authentication": true,
|
||||
"plan": {
|
||||
"name": "Medium",
|
||||
"space": 400,
|
||||
"private_repos": 20,
|
||||
"collaborators": 0
|
||||
}
|
||||
}
|
||||
104
e2e_tests/docker-rest/flask/gitlab.py
Normal file
104
e2e_tests/docker-rest/flask/gitlab.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import os.path
|
||||
|
||||
import jinja2
|
||||
from app import app
|
||||
from flask import abort, jsonify, make_response, request
|
||||
|
||||
|
||||
def check_headers():
|
||||
if request.headers.get("accept") != "application/json":
|
||||
app.logger.error("Invalid accept header")
|
||||
abort(500)
|
||||
auth_header = request.headers.get("authorization")
|
||||
if auth_header != "bearer secret-token:myauthtoken":
|
||||
app.logger.error("Invalid authorization header: %s", auth_header)
|
||||
abort(
|
||||
make_response(
|
||||
jsonify(
|
||||
{
|
||||
"message": "Bad credentials",
|
||||
"documentation_url": "https://docs.example.com/rest",
|
||||
}
|
||||
),
|
||||
401,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def add_pagination(response, page, last_page):
|
||||
host = request.headers["host"]
|
||||
link_header = ""
|
||||
|
||||
def args(page):
|
||||
args = request.args.copy()
|
||||
args["page"] = page
|
||||
return "&".join([f"{k}={v}" for k, v in args.items()])
|
||||
|
||||
if page < last_page:
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
|
||||
)
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
|
||||
)
|
||||
response.headers["link"] = link_header
|
||||
|
||||
|
||||
def read_project_files(namespaces=[]):
|
||||
last_page = 4
|
||||
page = int(request.args.get("page", "1"))
|
||||
response_file = f"./gitlab_api_page_{page}.json"
|
||||
if not os.path.exists(response_file):
|
||||
return jsonify([])
|
||||
|
||||
response = make_response(
|
||||
jinja2.Template(open(response_file).read()).render(
|
||||
namespace=namespaces[page - 1]
|
||||
)
|
||||
)
|
||||
add_pagination(response, page, last_page)
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
|
||||
|
||||
def single_namespaced_projects(namespace):
|
||||
return read_project_files([namespace] * 4)
|
||||
|
||||
|
||||
def mixed_projects(namespaces):
|
||||
return read_project_files(namespaces)
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/users/<string:user>/projects")
|
||||
def gitlab_user_repos(user):
|
||||
check_headers()
|
||||
if user == "myuser1":
|
||||
return single_namespaced_projects("myuser1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/groups/<string:group>/projects")
|
||||
def gitlab_group_repos(group):
|
||||
check_headers()
|
||||
if not (
|
||||
request.args.get("include_subgroups") == "true"
|
||||
and request.args.get("archived") == "false"
|
||||
):
|
||||
abort(500, "wrong arguments")
|
||||
if group == "mygroup1":
|
||||
return single_namespaced_projects("mygroup1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/projects/")
|
||||
def gitlab_own_repos():
|
||||
check_headers()
|
||||
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/user/")
|
||||
def gitlab_user():
|
||||
check_headers()
|
||||
response = make_response(open("./gitlab_api_user.json").read())
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
236
e2e_tests/docker-rest/flask/gitlab_api_page_1.json
Normal file
236
e2e_tests/docker-rest/flask/gitlab_api_page_1.json
Normal file
@@ -0,0 +1,236 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"description": "",
|
||||
"name": "myproject1",
|
||||
"name_with_namespace": "{{ namespace }} / myproject1",
|
||||
"path": "myproject1",
|
||||
"path_with_namespace": "{{ namespace }}/myproject1",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject1.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject1.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject1",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject1",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "private",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject1-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"description": "",
|
||||
"name": "myproject2",
|
||||
"name_with_namespace": "{{ namespace }} / myproject2",
|
||||
"path": "myproject2",
|
||||
"path_with_namespace": "{{ namespace }}/myproject2",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject2.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject2.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject2",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject2",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject2-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
119
e2e_tests/docker-rest/flask/gitlab_api_page_2.json
Normal file
119
e2e_tests/docker-rest/flask/gitlab_api_page_2.json
Normal file
@@ -0,0 +1,119 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"description": "",
|
||||
"name": "myproject3",
|
||||
"name_with_namespace": "{{ namespace }} / myproject3",
|
||||
"path": "myproject3",
|
||||
"path_with_namespace": "{{ namespace }}/myproject3",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject3.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject3.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject3",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject3",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject3-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
119
e2e_tests/docker-rest/flask/gitlab_api_page_3.json
Normal file
119
e2e_tests/docker-rest/flask/gitlab_api_page_3.json
Normal file
@@ -0,0 +1,119 @@
|
||||
[
|
||||
{
|
||||
"id": 4,
|
||||
"description": "",
|
||||
"name": "myproject4",
|
||||
"name_with_namespace": "{{ namespace }} / myproject4",
|
||||
"path": "myproject4",
|
||||
"path_with_namespace": "{{ namespace }}/myproject4",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject4.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject4.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject4",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject4",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject4-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
119
e2e_tests/docker-rest/flask/gitlab_api_page_4.json
Normal file
119
e2e_tests/docker-rest/flask/gitlab_api_page_4.json
Normal file
@@ -0,0 +1,119 @@
|
||||
[
|
||||
{
|
||||
"id": 5,
|
||||
"description": "",
|
||||
"name": "myproject5",
|
||||
"name_with_namespace": "{{ namespace }} / myproject5",
|
||||
"path": "myproject5",
|
||||
"path_with_namespace": "{{ namespace }}/myproject5",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject5.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject5.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject5",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject5",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject5-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
42
e2e_tests/docker-rest/flask/gitlab_api_user.json
Normal file
42
e2e_tests/docker-rest/flask/gitlab_api_user.json
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"id": 1,
|
||||
"username": "myuser1",
|
||||
"name": "My User",
|
||||
"state": "active",
|
||||
"avatar_url": "https://example.com/avatar",
|
||||
"web_url": "https://example.com/myuser1",
|
||||
"created_at": "2016-12-10T10:09:11.585Z",
|
||||
"bio": "",
|
||||
"location": "",
|
||||
"public_email": "",
|
||||
"skype": "",
|
||||
"linkedin": "",
|
||||
"twitter": "",
|
||||
"website_url": "",
|
||||
"organization": "",
|
||||
"job_title": "",
|
||||
"pronouns": "",
|
||||
"bot": false,
|
||||
"work_information": null,
|
||||
"followers": 0,
|
||||
"following": 0,
|
||||
"is_followed": false,
|
||||
"local_time": "11:59 PM",
|
||||
"last_sign_in_at": "2020-03-14T09:13:44.977Z",
|
||||
"confirmed_at": "2022-05-19T23:48:47.033Z",
|
||||
"last_activity_on": "2022-05-19",
|
||||
"email": "myuser1@example.com",
|
||||
"theme_id": null,
|
||||
"color_scheme_id": 1,
|
||||
"projects_limit": 100000,
|
||||
"current_sign_in_at": "2022-05-19T23:45:49.661Z",
|
||||
"identities": [],
|
||||
"can_create_group": true,
|
||||
"can_create_project": true,
|
||||
"two_factor_enabled": false,
|
||||
"external": false,
|
||||
"private_profile": false,
|
||||
"commit_email": "myuser1@example.com",
|
||||
"shared_runners_minutes_limit": 2000,
|
||||
"extra_shared_runners_minutes_limit": null
|
||||
}
|
||||
14
e2e_tests/docker/Dockerfile
Normal file
14
e2e_tests/docker/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM docker.io/debian:11.3
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
python3-pytest \
|
||||
python3-toml \
|
||||
python3-git \
|
||||
python3-yaml \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /tests
|
||||
|
||||
ENTRYPOINT ["/bin/sh", "-c", "--"]
|
||||
434
e2e_tests/helpers.py
Normal file
434
e2e_tests/helpers.py
Normal file
@@ -0,0 +1,434 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import git
|
||||
|
||||
binary = os.environ["GRM_BINARY"]
|
||||
|
||||
|
||||
def funcname():
|
||||
return inspect.stack()[1][3]
|
||||
|
||||
|
||||
def copytree(src, dest):
|
||||
shutil.copytree(src, dest, dirs_exist_ok=True)
|
||||
|
||||
|
||||
def get_temporary_directory(dir=None):
|
||||
return tempfile.TemporaryDirectory(dir=dir)
|
||||
|
||||
|
||||
def grm(args, cwd=None, is_invalid=False):
|
||||
cmd = subprocess.run([binary] + args, cwd=cwd, capture_output=True, text=True)
|
||||
if not is_invalid:
|
||||
assert "usage" not in cmd.stderr.lower()
|
||||
print(f"grmcmd: {args}")
|
||||
print(f"stdout:\n{cmd.stdout}")
|
||||
print(f"stderr:\n{cmd.stderr}")
|
||||
assert "secret-token:" not in cmd.stdout
|
||||
assert "secret-token:" not in cmd.stderr
|
||||
assert "panicked" not in cmd.stderr
|
||||
return cmd
|
||||
|
||||
|
||||
def shell(script):
|
||||
script = "set -o errexit\nset -o nounset\nset -o pipefail\n" + script
|
||||
cmd = subprocess.run(["bash"], input=script, text=True, capture_output=True)
|
||||
if cmd.returncode != 0:
|
||||
print(cmd.stdout)
|
||||
print(cmd.stderr)
|
||||
cmd.check_returncode()
|
||||
|
||||
|
||||
def checksum_directory(path):
|
||||
"""
|
||||
Gives a "checksum" of a directory that includes all files & directories
|
||||
recursively, including owner/group/permissions. Useful to compare that a
|
||||
directory did not change after a command was run.
|
||||
|
||||
The following makes it a bit complicated:
|
||||
|
||||
> Whether or not the lists are sorted depends on the file system.
|
||||
|
||||
- https://docs.python.org/3/library/os.html#os.walk
|
||||
|
||||
This means we have to first get a list of all hashes of files and
|
||||
directories, then sort the hashes and then create the hash for the whole
|
||||
directory.
|
||||
"""
|
||||
path = os.path.realpath(path)
|
||||
|
||||
hashes = []
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise f"{path} not found"
|
||||
|
||||
def get_stat_hash(path):
|
||||
checksum = hashlib.md5()
|
||||
|
||||
# A note about bytes(). You may think that it converts something to
|
||||
# bytes (akin to str()). But it actually creates a list of zero bytes
|
||||
# with the length specified by the parameter.
|
||||
#
|
||||
# This is kinda couterintuitive to me:
|
||||
#
|
||||
# str(5) => '5'
|
||||
# bytes(5) => b'\x00\x00\x00\x00\x00'
|
||||
def int_to_bytes(i):
|
||||
return i.to_bytes((i.bit_length() + 7) // 8, byteorder="big")
|
||||
|
||||
# lstat() instead of stat() so symlinks are not followed. So symlinks
|
||||
# are treated as-is and will also be checked for changes.
|
||||
stat = os.lstat(path)
|
||||
|
||||
# Note that the list of attributes does not include any timings except
|
||||
# mtime.
|
||||
for s in [
|
||||
stat.st_mode, # type & permission bits
|
||||
stat.st_ino, # inode
|
||||
stat.st_uid,
|
||||
stat.st_gid,
|
||||
# it's a float in seconds, so this gives us ~1us precision
|
||||
int(stat.st_mtime * 1e6),
|
||||
]:
|
||||
checksum.update(int_to_bytes(s))
|
||||
return checksum.digest()
|
||||
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
checksum = hashlib.md5()
|
||||
filepath = os.path.join(root, file)
|
||||
checksum.update(str.encode(filepath))
|
||||
checksum.update(get_stat_hash(filepath))
|
||||
with open(filepath, "rb") as f:
|
||||
while True:
|
||||
data = f.read(8192)
|
||||
if not data:
|
||||
break
|
||||
checksum.update(data)
|
||||
hashes.append(checksum.digest())
|
||||
|
||||
for d in dirs:
|
||||
checksum = hashlib.md5()
|
||||
dirpath = os.path.join(root, d)
|
||||
checksum.update(get_stat_hash(dirpath))
|
||||
hashes.append(checksum.digest())
|
||||
|
||||
checksum = hashlib.md5()
|
||||
for c in sorted(hashes):
|
||||
checksum.update(c)
|
||||
return checksum.hexdigest()
|
||||
|
||||
|
||||
class TempGitRepository:
|
||||
def __init__(self, dir=None):
|
||||
self.dir = dir
|
||||
|
||||
def __enter__(self):
|
||||
self.tmpdir = get_temporary_directory(self.dir)
|
||||
self.remote_1 = get_temporary_directory()
|
||||
self.remote_2 = get_temporary_directory()
|
||||
cmd = f"""
|
||||
cd {self.tmpdir.name}
|
||||
git -c init.defaultBranch=master init
|
||||
echo test > root-commit
|
||||
git add root-commit
|
||||
git commit -m "root-commit"
|
||||
git remote add origin file://{self.remote_1.name}
|
||||
git remote add otherremote file://{self.remote_2.name}
|
||||
"""
|
||||
|
||||
shell(cmd)
|
||||
return self.tmpdir.name
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
class TempGitRemote:
|
||||
obj = {}
|
||||
|
||||
def __init__(self, tmpdir, remoteid=None):
|
||||
self.tmpdir = tmpdir
|
||||
self.remoteid = remoteid
|
||||
|
||||
@classmethod
|
||||
def get(cls, cachekey=None, initfunc=None):
|
||||
if cachekey is None:
|
||||
tmpdir = get_temporary_directory()
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir.name}
|
||||
git -c init.defaultBranch=master init --bare
|
||||
"""
|
||||
)
|
||||
newobj = cls(tmpdir)
|
||||
remoteid = None
|
||||
if initfunc is not None:
|
||||
remoteid = newobj.init(initfunc)
|
||||
newobj.remoteid = remoteid
|
||||
return newobj, remoteid
|
||||
else:
|
||||
if cachekey not in cls.obj:
|
||||
tmpdir = get_temporary_directory()
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir.name}
|
||||
git -c init.defaultBranch=master init --bare
|
||||
"""
|
||||
)
|
||||
newobj = cls(tmpdir)
|
||||
remoteid = newobj.init(initfunc)
|
||||
newobj.remoteid = remoteid
|
||||
cls.obj[cachekey] = newobj
|
||||
return cls.clone(cls.obj[cachekey])
|
||||
|
||||
@classmethod
|
||||
def clone(cls, source):
|
||||
new_remote = get_temporary_directory()
|
||||
copytree(source.tmpdir.name, new_remote.name)
|
||||
return cls(new_remote, source.remoteid), source.remoteid
|
||||
|
||||
def init(self, func):
|
||||
return func(self.tmpdir.name)
|
||||
|
||||
def __enter__(self):
|
||||
return self.tmpdir
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
class TempGitRepositoryWorktree:
|
||||
obj = {}
|
||||
|
||||
def __init__(self, remotes, tmpdir, commit, remote1, remote2, remote1id, remote2id):
|
||||
self.remotes = remotes
|
||||
self.tmpdir = tmpdir
|
||||
self.commit = commit
|
||||
self.remote1 = remote1
|
||||
self.remote2 = remote2
|
||||
self.remote1id = remote1id
|
||||
self.remote2id = remote2id
|
||||
|
||||
@classmethod
|
||||
def get(cls, cachekey, branch=None, remotes=2, basedir=None, remote_setup=None):
|
||||
if cachekey not in cls.obj:
|
||||
tmpdir = get_temporary_directory()
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir.name}
|
||||
git -c init.defaultBranch=master init
|
||||
echo test > root-commit-in-worktree-1
|
||||
git add root-commit-in-worktree-1
|
||||
git commit -m "root-commit-in-worktree-1"
|
||||
echo test > root-commit-in-worktree-2
|
||||
git add root-commit-in-worktree-2
|
||||
git commit -m "root-commit-in-worktree-2"
|
||||
|
||||
git ls-files | xargs rm -rf
|
||||
mv .git .git-main-working-tree
|
||||
git --git-dir .git-main-working-tree config core.bare true
|
||||
"""
|
||||
)
|
||||
|
||||
repo = git.Repo(f"{tmpdir.name}/.git-main-working-tree")
|
||||
|
||||
commit = repo.head.commit.hexsha
|
||||
if branch is not None:
|
||||
repo.create_head(branch)
|
||||
|
||||
remote1 = None
|
||||
remote2 = None
|
||||
remote1id = None
|
||||
remote2id = None
|
||||
|
||||
if remotes >= 1:
|
||||
cachekeyremote, initfunc = (remote_setup or ((None, None),))[0]
|
||||
remote1, remote1id = TempGitRemote.get(
|
||||
cachekey=cachekeyremote, initfunc=initfunc
|
||||
)
|
||||
remote1 = remote1
|
||||
remote1id = remote1id
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir.name}
|
||||
git --git-dir .git-main-working-tree remote add origin file://{remote1.tmpdir.name}
|
||||
"""
|
||||
)
|
||||
repo.remotes.origin.fetch()
|
||||
repo.remotes.origin.push("master")
|
||||
|
||||
if remotes >= 2:
|
||||
cachekeyremote, initfunc = (remote_setup or (None, (None, None)))[1]
|
||||
remote2, remote2id = TempGitRemote.get(
|
||||
cachekey=cachekeyremote, initfunc=initfunc
|
||||
)
|
||||
remote2 = remote2
|
||||
remote2id = remote2id
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir.name}
|
||||
git --git-dir .git-main-working-tree remote add otherremote file://{remote2.tmpdir.name}
|
||||
"""
|
||||
)
|
||||
repo.remotes.otherremote.fetch()
|
||||
repo.remotes.otherremote.push("master")
|
||||
|
||||
cls.obj[cachekey] = cls(
|
||||
remotes, tmpdir, commit, remote1, remote2, remote1id, remote2id
|
||||
)
|
||||
|
||||
return cls.clone(cls.obj[cachekey], remote_setup=remote_setup)
|
||||
|
||||
@classmethod
|
||||
def clone(cls, source, remote_setup):
|
||||
newdir = get_temporary_directory()
|
||||
|
||||
copytree(source.tmpdir.name, newdir.name)
|
||||
|
||||
remote1 = None
|
||||
remote2 = None
|
||||
remote1id = None
|
||||
remote2id = None
|
||||
repo = git.Repo(os.path.join(newdir.name, ".git-main-working-tree"))
|
||||
if source.remotes >= 1:
|
||||
cachekey, initfunc = (remote_setup or ((None, None),))[0]
|
||||
remote1, remote1id = TempGitRemote.get(cachekey=cachekey, initfunc=initfunc)
|
||||
if remote1id != source.remote1id:
|
||||
repo.remotes.origin.fetch()
|
||||
repo.remotes.origin.push("master")
|
||||
if source.remotes >= 2:
|
||||
cachekey, initfunc = (remote_setup or (None, (None, None)))[1]
|
||||
remote2, remote2id = TempGitRemote.get(cachekey=cachekey, initfunc=initfunc)
|
||||
if remote2id != source.remote2id:
|
||||
repo.remotes.otherremote.fetch()
|
||||
repo.remotes.otherremote.push("master")
|
||||
|
||||
return cls(
|
||||
source.remotes,
|
||||
newdir,
|
||||
source.commit,
|
||||
remote1,
|
||||
remote2,
|
||||
remote1id,
|
||||
remote2id,
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
return (self.tmpdir.name, self.commit)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
class RepoTree:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.root = get_temporary_directory()
|
||||
self.config = tempfile.NamedTemporaryFile()
|
||||
with open(self.config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{self.root.name}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test_worktree"
|
||||
worktree_setup = true
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", self.config.name])
|
||||
assert cmd.returncode == 0
|
||||
return (self.root.name, self.config.name, ["test", "test_worktree"])
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
del self.root
|
||||
del self.config
|
||||
|
||||
|
||||
class EmptyDir:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.tmpdir = get_temporary_directory()
|
||||
return self.tmpdir.name
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
del self.tmpdir
|
||||
|
||||
|
||||
class NonGitDir:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.tmpdir = get_temporary_directory()
|
||||
shell(
|
||||
f"""
|
||||
cd {self.tmpdir.name}
|
||||
mkdir testdir
|
||||
touch testdir/test
|
||||
touch test2
|
||||
"""
|
||||
)
|
||||
return self.tmpdir.name
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
del self.tmpdir
|
||||
|
||||
|
||||
class TempGitFileRemote:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.tmpdir = get_temporary_directory()
|
||||
shell(
|
||||
f"""
|
||||
cd {self.tmpdir.name}
|
||||
git -c init.defaultBranch=master init
|
||||
echo test > root-commit-in-remote-1
|
||||
git add root-commit-in-remote-1
|
||||
git commit -m "root-commit-in-remote-1"
|
||||
echo test > root-commit-in-remote-2
|
||||
git add root-commit-in-remote-2
|
||||
git commit -m "root-commit-in-remote-2"
|
||||
git ls-files | xargs rm -rf
|
||||
mv .git/* .
|
||||
git config core.bare true
|
||||
"""
|
||||
)
|
||||
head_commit_sha = git.Repo(self.tmpdir.name).head.commit.hexsha
|
||||
return (self.tmpdir.name, head_commit_sha)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
del self.tmpdir
|
||||
|
||||
|
||||
class NonExistentPath:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.dir = "/doesnotexist"
|
||||
if os.path.exists(self.dir):
|
||||
raise f"{self.dir} exists for some reason"
|
||||
return self.dir
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
13
e2e_tests/test_basic.py
Normal file
13
e2e_tests/test_basic.py
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from helpers import grm
|
||||
|
||||
|
||||
def test_invalid_command():
|
||||
cmd = grm(["whatever"], is_invalid=True)
|
||||
assert "usage" in cmd.stderr.lower()
|
||||
|
||||
|
||||
def test_help():
|
||||
cmd = grm(["--help"])
|
||||
assert "usage" in cmd.stdout.lower()
|
||||
289
e2e_tests/test_repos_find.py
Normal file
289
e2e_tests/test_repos_find.py
Normal file
@@ -0,0 +1,289 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
import toml
|
||||
import yaml
|
||||
from helpers import NonExistentPath, TempGitRepository, grm, shell
|
||||
|
||||
|
||||
def test_repos_find_nonexistent():
|
||||
with NonExistentPath() as nonexistent_dir:
|
||||
cmd = grm(["repos", "find", "local", nonexistent_dir])
|
||||
assert "does not exist" in cmd.stderr.lower()
|
||||
assert cmd.returncode != 0
|
||||
assert not os.path.exists(nonexistent_dir)
|
||||
|
||||
|
||||
def test_repos_find_file():
|
||||
with tempfile.NamedTemporaryFile() as tmpfile:
|
||||
cmd = grm(["repos", "find", "local", tmpfile.name])
|
||||
assert "not a directory" in cmd.stderr.lower()
|
||||
assert cmd.returncode != 0
|
||||
|
||||
|
||||
def test_repos_find_empty():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cmd = grm(["repos", "find", "local", tmpdir])
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_repos_find_invalid_format():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cmd = grm(
|
||||
["repos", "find", "local", tmpdir, "--format", "invalidformat"],
|
||||
is_invalid=True,
|
||||
)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "invalid value 'invalidformat'" in cmd.stderr
|
||||
|
||||
|
||||
def test_repos_find_non_git_repos():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir}
|
||||
mkdir non_git
|
||||
(
|
||||
cd ./non_git
|
||||
echo test > test
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "find", "local", tmpdir])
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("default_format", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("exclude", [None, "^.*/repo2$", "^not_matching$"])
|
||||
def test_repos_find(configtype, exclude, default_format):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir}
|
||||
mkdir repo1
|
||||
(
|
||||
cd ./repo1
|
||||
git -c init.defaultBranch=master init
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
git remote add origin https://example.com/repo2.git
|
||||
git remote add someremote ssh://example.com/repo2.git
|
||||
)
|
||||
mkdir repo2
|
||||
(
|
||||
cd ./repo2
|
||||
git -c init.defaultBranch=master init
|
||||
git checkout -b main
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
git remote add origin https://example.com/repo2.git
|
||||
)
|
||||
mkdir non_git
|
||||
(
|
||||
cd non_git
|
||||
echo test > test
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
args = ["repos", "find", "local", tmpdir]
|
||||
if not default_format:
|
||||
args += ["--format", configtype]
|
||||
if exclude:
|
||||
args += ["--exclude", exclude]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
if exclude == "^.*/repo2$":
|
||||
assert re.match(r"^.*\[skipped\] .*\/repo2$", cmd.stderr.lower())
|
||||
assert "repo2" in cmd.stderr.lower()
|
||||
else:
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if default_format or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
for tree in output["trees"]:
|
||||
assert set(tree.keys()) == {"root", "repos"}
|
||||
assert tree["root"] == tmpdir
|
||||
assert isinstance(tree["repos"], list)
|
||||
assert len(tree["repos"]) == (1 if exclude == "^.*/repo2$" else 2)
|
||||
|
||||
repo1 = [r for r in tree["repos"] if r["name"] == "repo1"][0]
|
||||
assert repo1["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo1["remotes"]) == 2
|
||||
|
||||
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "https"
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
|
||||
someremote = [r for r in repo1["remotes"] if r["name"] == "someremote"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert someremote["type"] == "ssh"
|
||||
assert someremote["url"] == "ssh://example.com/repo2.git"
|
||||
|
||||
if exclude == "^.*/repo2$":
|
||||
assert [r for r in tree["repos"] if r["name"] == "repo2"] == []
|
||||
else:
|
||||
repo2 = [r for r in tree["repos"] if r["name"] == "repo2"][0]
|
||||
assert repo2["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo2["remotes"]) == 1
|
||||
|
||||
origin = [r for r in repo2["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "https"
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("default_format", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_find_in_root(configtype, default_format):
|
||||
with TempGitRepository() as repo_dir:
|
||||
|
||||
args = ["repos", "find", "local", repo_dir]
|
||||
if not default_format:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if default_format or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
for tree in output["trees"]:
|
||||
assert set(tree.keys()) == {"root", "repos"}
|
||||
assert tree["root"] == os.path.dirname(repo_dir)
|
||||
assert isinstance(tree["repos"], list)
|
||||
assert len(tree["repos"]) == 1
|
||||
|
||||
repo1 = [
|
||||
r for r in tree["repos"] if r["name"] == os.path.basename(repo_dir)
|
||||
][0]
|
||||
assert repo1["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo1["remotes"]) == 2
|
||||
|
||||
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "file"
|
||||
|
||||
someremote = [r for r in repo1["remotes"] if r["name"] == "otherremote"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert someremote["type"] == "file"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("default_format", [True, False])
|
||||
def test_repos_find_with_invalid_repo(configtype, default_format):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir}
|
||||
mkdir repo1
|
||||
(
|
||||
cd ./repo1
|
||||
git -c init.defaultBranch=master init
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
git remote add origin https://example.com/repo2.git
|
||||
git remote add someremote ssh://example.com/repo2.git
|
||||
)
|
||||
mkdir repo2
|
||||
(
|
||||
cd ./repo2
|
||||
git -c init.defaultBranch=master init
|
||||
git checkout -b main
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
git remote add origin https://example.com/repo2.git
|
||||
)
|
||||
mkdir broken_repo
|
||||
(
|
||||
cd broken_repo
|
||||
echo "broken" > .git
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
args = ["repos", "find", "local", tmpdir]
|
||||
if not default_format:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert "broken" in cmd.stderr
|
||||
|
||||
if default_format or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
for tree in output["trees"]:
|
||||
assert set(tree.keys()) == {"root", "repos"}
|
||||
assert tree["root"] == tmpdir
|
||||
assert isinstance(tree["repos"], list)
|
||||
assert len(tree["repos"]) == 2
|
||||
|
||||
repo1 = [r for r in tree["repos"] if r["name"] == "repo1"][0]
|
||||
assert repo1["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo1["remotes"]) == 2
|
||||
|
||||
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "https"
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
|
||||
someremote = [r for r in repo1["remotes"] if r["name"] == "someremote"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert someremote["type"] == "ssh"
|
||||
assert someremote["url"] == "ssh://example.com/repo2.git"
|
||||
|
||||
repo2 = [r for r in tree["repos"] if r["name"] == "repo2"][0]
|
||||
assert repo2["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo2["remotes"]) == 1
|
||||
|
||||
origin = [r for r in repo2["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "https"
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
955
e2e_tests/test_repos_find_remote.py
Normal file
955
e2e_tests/test_repos_find_remote.py
Normal file
@@ -0,0 +1,955 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
import toml
|
||||
import yaml
|
||||
from helpers import grm
|
||||
|
||||
ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"]
|
||||
PROVIDERS = ["github", "gitlab"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_invalid_provider(use_config):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
provider = "thisproviderdoesnotexist"
|
||||
token_command = "true"
|
||||
root = "/"
|
||||
"""
|
||||
)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
"thisproviderdoesnotexist",
|
||||
"--token-command",
|
||||
"true",
|
||||
"--root",
|
||||
"/",
|
||||
]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
if not use_config:
|
||||
assert re.match(".*invalid value 'thisproviderdoesnotexist' for.*provider", cmd.stderr)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
def test_repos_find_remote_invalid_format(provider):
|
||||
cmd = grm(
|
||||
[
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--format",
|
||||
"invalidformat",
|
||||
"--token-command",
|
||||
"true",
|
||||
"--root",
|
||||
"/myroot",
|
||||
],
|
||||
is_invalid=True,
|
||||
)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "invalid value 'invalidformat'" in cmd.stderr
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
def test_repos_find_remote_token_command_failed(provider):
|
||||
cmd = grm(
|
||||
[
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--format",
|
||||
"yaml",
|
||||
"--token-command",
|
||||
"false",
|
||||
"--root",
|
||||
"/myroot",
|
||||
],
|
||||
is_invalid=True,
|
||||
)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "token command failed" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_wrong_token(provider, use_config):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo wrongtoken"
|
||||
root = "/myroot"
|
||||
[filters]
|
||||
access = true
|
||||
"""
|
||||
)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo wrongtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--access",
|
||||
]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "bad credentials" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert "did not specify any filters" in cmd.stderr.lower()
|
||||
|
||||
if default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_user_empty(
|
||||
provider, configtype, configtype_default, use_config
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
|
||||
[filters]
|
||||
users = ["someotheruser"]
|
||||
"""
|
||||
|
||||
f.write(cfg)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--user",
|
||||
"someotheruser",
|
||||
]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("use_owner", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
@pytest.mark.parametrize("override_remote_name", [True, False])
|
||||
def test_repos_find_remote_user(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
use_owner,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
override_remote_name,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += "force_ssh = true\n"
|
||||
if override_remote_name:
|
||||
cfg += 'remote_name = "otherremote"\n'
|
||||
if use_owner:
|
||||
cfg += """
|
||||
[filters]
|
||||
owner = true\n
|
||||
"""
|
||||
else:
|
||||
cfg += """
|
||||
[filters]
|
||||
users = ["myuser1"]\n
|
||||
"""
|
||||
|
||||
print(cfg)
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
]
|
||||
if use_owner:
|
||||
args += ["--owner"]
|
||||
else:
|
||||
args += ["--user", "myuser1"]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if override_remote_name:
|
||||
args += ["--remote-name", "otherremote"]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
|
||||
assert set(output["trees"][0].keys()) == {"root", "repos"}
|
||||
assert isinstance(output["trees"][0]["repos"], list)
|
||||
assert len(output["trees"][0]["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
|
||||
0
|
||||
]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
if override_remote_name:
|
||||
assert repo["remotes"][0]["name"] == "otherremote"
|
||||
else:
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_group_empty(
|
||||
provider, configtype, configtype_default, use_alternate_endpoint, use_config
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
groups = ["someothergroup"]\n
|
||||
"""
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--group",
|
||||
"someothergroup",
|
||||
]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_group(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += "force_ssh = true\n"
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
groups = ["mygroup1"]\n
|
||||
"""
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--group",
|
||||
"mygroup1",
|
||||
]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
|
||||
assert set(output["trees"][0].keys()) == {"root", "repos"}
|
||||
assert isinstance(output["trees"][0]["repos"], list)
|
||||
assert len(output["trees"][0]["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
|
||||
0
|
||||
]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
if force_ssh or i == 1:
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("use_owner", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_user_and_group(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
use_owner,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += "force_ssh = true\n"
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
groups = ["mygroup1"]\n
|
||||
"""
|
||||
|
||||
if use_owner:
|
||||
cfg += "owner = true\n"
|
||||
else:
|
||||
cfg += 'users = ["myuser1"]\n'
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--group",
|
||||
"mygroup1",
|
||||
]
|
||||
if use_owner:
|
||||
args += ["--owner"]
|
||||
else:
|
||||
args += ["--user", "myuser1"]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 2
|
||||
|
||||
user_namespace = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
|
||||
|
||||
assert set(user_namespace.keys()) == {"root", "repos"}
|
||||
assert isinstance(user_namespace["repos"], list)
|
||||
assert len(user_namespace["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in user_namespace["repos"] if r["name"] == f"myproject{i}"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
group_namespace = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][0]
|
||||
|
||||
assert set(group_namespace.keys()) == {"root", "repos"}
|
||||
assert isinstance(group_namespace["repos"], list)
|
||||
assert len(group_namespace["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in group_namespace["repos"] if r["name"] == f"myproject{i}"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("with_user_filter", [True, False])
|
||||
@pytest.mark.parametrize("with_group_filter", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_owner(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
with_user_filter,
|
||||
with_group_filter,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += "force_ssh = true\n"
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
access = true\n
|
||||
"""
|
||||
|
||||
if with_user_filter:
|
||||
cfg += 'users = ["myuser1"]\n'
|
||||
if with_group_filter:
|
||||
cfg += 'groups = ["mygroup1"]\n'
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--access",
|
||||
]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if with_user_filter:
|
||||
args += ["--user", "myuser1"]
|
||||
if with_group_filter:
|
||||
args += ["--group", "mygroup1"]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 4
|
||||
|
||||
user_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
|
||||
|
||||
assert set(user_namespace_1.keys()) == {"root", "repos"}
|
||||
assert isinstance(user_namespace_1["repos"], list)
|
||||
|
||||
if with_user_filter:
|
||||
assert len(user_namespace_1["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [
|
||||
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
|
||||
][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
else:
|
||||
assert len(user_namespace_1["repos"]) == 2
|
||||
|
||||
for i in range(1, 3):
|
||||
repo = [
|
||||
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
|
||||
][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
user_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/myuser2"][0]
|
||||
|
||||
assert set(user_namespace_2.keys()) == {"root", "repos"}
|
||||
assert isinstance(user_namespace_2["repos"], list)
|
||||
assert len(user_namespace_2["repos"]) == 1
|
||||
|
||||
repo = user_namespace_2["repos"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh:
|
||||
assert (
|
||||
repo["remotes"][0]["url"] == "ssh://git@example.com/myuser2/myproject3.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert repo["remotes"][0]["url"] == "https://example.com/myuser2/myproject3.git"
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][
|
||||
0
|
||||
]
|
||||
|
||||
assert set(group_namespace_1.keys()) == {"root", "repos"}
|
||||
assert isinstance(group_namespace_1["repos"], list)
|
||||
|
||||
if with_group_filter:
|
||||
assert len(group_namespace_1["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [
|
||||
r for r in group_namespace_1["repos"] if r["name"] == f"myproject{i}"
|
||||
][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
else:
|
||||
assert len(group_namespace_1["repos"]) == 1
|
||||
|
||||
repo = group_namespace_1["repos"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== "ssh://git@example.com/mygroup1/myproject4.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== "https://example.com/mygroup1/myproject4.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
group_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup2"][
|
||||
0
|
||||
]
|
||||
|
||||
assert set(group_namespace_2.keys()) == {"root", "repos"}
|
||||
assert isinstance(group_namespace_2["repos"], list)
|
||||
assert len(group_namespace_2["repos"]) == 1
|
||||
|
||||
repo = group_namespace_2["repos"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == "origin"
|
||||
if force_ssh:
|
||||
assert (
|
||||
repo["remotes"][0]["url"] == "ssh://git@example.com/mygroup2/myproject5.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"] == "https://example.com/mygroup2/myproject5.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
11
e2e_tests/test_repos_status.py
Normal file
11
e2e_tests/test_repos_status.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from helpers import RepoTree, grm
|
||||
|
||||
|
||||
def test_repos_sync_worktree_clone():
|
||||
with RepoTree() as (root, config, repos):
|
||||
cmd = grm(["repos", "status", "--config", config])
|
||||
assert cmd.returncode == 0
|
||||
for repo in repos:
|
||||
assert repo in cmd.stdout
|
||||
830
e2e_tests/test_repos_sync.py
Normal file
830
e2e_tests/test_repos_sync.py
Normal file
@@ -0,0 +1,830 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
import textwrap
|
||||
|
||||
import git
|
||||
import pytest
|
||||
from helpers import (
|
||||
NonExistentPath,
|
||||
TempGitFileRemote,
|
||||
TempGitRepository,
|
||||
checksum_directory,
|
||||
grm,
|
||||
shell,
|
||||
)
|
||||
|
||||
templates = {
|
||||
"repo_simple": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
""",
|
||||
"yaml": """
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: "test"
|
||||
""",
|
||||
},
|
||||
"repo_with_remote": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "{remotename}"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: test
|
||||
remotes:
|
||||
- name: "{remotename}"
|
||||
url: "file://{remote}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"repo_with_two_remotes": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: "test"
|
||||
remotes:
|
||||
- name: "origin"
|
||||
url: "file://{remote1}"
|
||||
type: "file"
|
||||
- name: "origin2"
|
||||
url: "file://{remote2}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"worktree_repo_simple": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: test
|
||||
worktree_setup: true
|
||||
"""
|
||||
),
|
||||
},
|
||||
"worktree_repo_with_remote": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: test
|
||||
worktree_setup: true
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"repo_in_subdirectory": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "outer/inner"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: outer/inner
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"nested_trees": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "outer"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees]]
|
||||
root = "{root}/subdir"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "inner"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: outer
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote1}"
|
||||
type: "file"
|
||||
- root: "{root}/subdir"
|
||||
repos:
|
||||
- name: inner
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote2}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_config_is_valid_symlink(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote, head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with tempfile.TemporaryDirectory() as config_dir:
|
||||
config_symlink = os.path.join(config_dir, "cfglink")
|
||||
os.symlink(config.name, config_symlink)
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
subprocess.run(["cat", config.name])
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == head_commit_sha
|
||||
|
||||
|
||||
def test_repos_sync_config_is_invalid_symlink():
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with tempfile.TemporaryDirectory() as config_dir:
|
||||
with NonExistentPath() as nonexistent_dir:
|
||||
config_symlink = os.path.join(config_dir, "cfglink")
|
||||
os.symlink(nonexistent_dir, config_symlink)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
|
||||
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "not found" in cmd.stderr.lower()
|
||||
assert not os.path.exists(os.path.join(target, "test"))
|
||||
assert not os.path.exists(os.path.join(target, "test"))
|
||||
|
||||
|
||||
def test_repos_sync_config_is_directory():
|
||||
with tempfile.TemporaryDirectory() as config:
|
||||
cmd = grm(["repos", "sync", "config", "--config", config])
|
||||
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "is a directory" in cmd.stderr.lower()
|
||||
|
||||
|
||||
def test_repos_sync_config_is_unreadable():
|
||||
with tempfile.TemporaryDirectory() as config_dir:
|
||||
config_path = os.path.join(config_dir, "cfg")
|
||||
open(config_path, "w")
|
||||
os.chmod(config_path, 0o0000)
|
||||
cmd = grm(["repos", "sync", "config", "--config", config_path])
|
||||
|
||||
assert os.path.exists(config_path)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_unmanaged_repos(configtype):
|
||||
with tempfile.TemporaryDirectory() as root:
|
||||
with TempGitRepository(dir=root) as unmanaged_repo:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(templates["repo_simple"][configtype].format(root=root))
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(root, "test")
|
||||
assert os.path.exists(git_dir)
|
||||
|
||||
# this removes the prefix (root) from the path (unmanaged_repo)
|
||||
unmanaged_repo_name = os.path.relpath(unmanaged_repo, root)
|
||||
regex = f".*unmanaged.*{unmanaged_repo_name}"
|
||||
assert any(
|
||||
[re.match(regex, line) for line in cmd.stderr.lower().split("\n")]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_root_is_file(configtype):
|
||||
with tempfile.NamedTemporaryFile() as target:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(templates["repo_simple"][configtype].format(root=target.name))
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
assert "not a directory" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_clone(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {
|
||||
"origin",
|
||||
"origin2",
|
||||
}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote1_head_commit_sha
|
||||
|
||||
assert len(repo.remotes) == 2
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote1}"
|
||||
|
||||
urls = list(repo.remote("origin2").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_repo_in_subdirectory(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote, remote_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_in_subdirectory"][configtype].format(
|
||||
root=target, remote=remote
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "outer", "inner")
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote_head_commit_sha
|
||||
|
||||
assert len(repo.remotes) == 1
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote}"
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert "found unmanaged repository" not in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_nested_clone(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["nested_trees"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
def validate(git_dir, sha, remote):
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == sha
|
||||
|
||||
assert len(repo.remotes) == 1
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote}"
|
||||
|
||||
validate(
|
||||
os.path.join(target, "outer"), remote1_head_commit_sha, remote1
|
||||
)
|
||||
validate(
|
||||
os.path.join(target, "subdir", "inner"),
|
||||
remote2_head_commit_sha,
|
||||
remote2,
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
print(cmd.stdout)
|
||||
print(cmd.stderr)
|
||||
assert "found unmanaged repository" not in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_init(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(templates["repo_simple"][configtype].format(root=target))
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
# as there are no commits yet, HEAD does not point to anything
|
||||
# valid
|
||||
assert not repo.head.is_valid()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_add_remote(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote1_head_commit_sha
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert set([str(r) for r in repo.remotes]) == {
|
||||
"origin",
|
||||
"origin2",
|
||||
}
|
||||
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote1}"
|
||||
|
||||
urls = list(repo.remote("origin2").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_remove_remote(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {
|
||||
"origin",
|
||||
"origin2",
|
||||
}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote1_head_commit_sha
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote2, remotename="origin2"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
shell(f"cd {git_dir} && git remote -v")
|
||||
with git.Repo(git_dir) as repo:
|
||||
"""
|
||||
There is some bug(?) in GitPython. It does not properly
|
||||
detect removed remotes. It will still report the old
|
||||
remove in repo.remotes.
|
||||
|
||||
So instead, we make sure that we get an Exception when
|
||||
we try to access the old remove via repo.remote().
|
||||
|
||||
Note that repo.remote() checks the actual repo lazily.
|
||||
Even `exists()` seems to just check against repo.remotes
|
||||
and will return True even if the remote is not actually
|
||||
configured. So we have to force GitPython to hit the filesystem.
|
||||
calling Remotes.urls does. But it returns an iterator
|
||||
that first has to be unwrapped via list(). Only THEN
|
||||
do we actually get an exception of the remotes does not
|
||||
exist.
|
||||
"""
|
||||
with pytest.raises(git.exc.GitCommandError):
|
||||
list(repo.remote("origin").urls)
|
||||
|
||||
urls = list(repo.remote("origin2").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_change_remote_url(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote1_head_commit_sha
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote2, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_change_remote_name(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote1_head_commit_sha
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin2"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
with git.Repo(git_dir) as repo:
|
||||
# See the note in `test_repos_sync_normal_remove_remote()`
|
||||
# about repo.remotes
|
||||
with pytest.raises(git.exc.GitCommandError):
|
||||
list(repo.remote("origin").urls)
|
||||
|
||||
urls = list(repo.remote("origin2").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote1}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("init_worktree", [True, False, "default"])
|
||||
def test_repos_sync_worktree_clone(configtype, init_worktree):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote, head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["worktree_repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
args = ["repos", "sync", "config", "--config", config.name]
|
||||
if init_worktree is True:
|
||||
args.append("--init-worktree=true")
|
||||
if init_worktree is False:
|
||||
args.append("--init-worktree=false")
|
||||
|
||||
for i in [1, 2]:
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
worktree_dir = f"{target}/test"
|
||||
assert os.path.exists(worktree_dir)
|
||||
|
||||
if init_worktree is True or init_worktree == "default":
|
||||
assert set(os.listdir(worktree_dir)) == {
|
||||
".git-main-working-tree",
|
||||
"master",
|
||||
}
|
||||
else:
|
||||
assert set(os.listdir(worktree_dir)) == {
|
||||
".git-main-working-tree"
|
||||
}
|
||||
|
||||
with git.Repo(
|
||||
os.path.join(worktree_dir, ".git-main-working-tree")
|
||||
) as repo:
|
||||
assert repo.bare
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == head_commit_sha
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_worktree_init(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["worktree_repo_simple"][configtype].format(root=target)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
worktree_dir = f"{target}/test"
|
||||
assert os.path.exists(worktree_dir)
|
||||
|
||||
assert set(os.listdir(worktree_dir)) == {".git-main-working-tree"}
|
||||
with git.Repo(os.path.join(worktree_dir, ".git-main-working-tree")) as repo:
|
||||
assert repo.bare
|
||||
# as there are no commits yet, HEAD does not point to anything
|
||||
# valid
|
||||
assert not repo.head.is_valid()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_invalid_syntax(configtype):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
if configtype == "toml":
|
||||
f.write(
|
||||
"""
|
||||
[[trees]]
|
||||
root = invalid as there are no quotes ;)
|
||||
"""
|
||||
)
|
||||
elif configtype == "yaml":
|
||||
f.write(
|
||||
"""
|
||||
trees:
|
||||
wrong:
|
||||
indentation:
|
||||
"""
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_unchanged(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
before = checksum_directory(target)
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
after = checksum_directory(target)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert before == after
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_change_to_worktree(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["worktree_repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
assert "already exists" in cmd.stderr
|
||||
assert "not using a worktree setup" in cmd.stderr
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_worktree_change_to_normal(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["worktree_repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
assert "already exists" in cmd.stderr
|
||||
assert "using a worktree setup" in cmd.stderr
|
||||
205
e2e_tests/test_worktree_clean.py
Normal file
205
e2e_tests/test_worktree_clean.py
Normal file
@@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from helpers import (
|
||||
NonGitDir,
|
||||
TempGitRepository,
|
||||
TempGitRepositoryWorktree,
|
||||
checksum_directory,
|
||||
funcname,
|
||||
grm,
|
||||
shell,
|
||||
)
|
||||
|
||||
|
||||
def test_worktree_clean():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_no_tracking_branch():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_uncommited_changes_new_file():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(f"cd {base_dir}/test && touch changed_file")
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_uncommited_changes_changed_file():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(f"cd {base_dir}/test && git ls-files | shuf | head | xargs rm -rf")
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_uncommited_changes_cleand_file():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"cd {base_dir}/test && git ls-files | shuf | head | while read f ; do echo $RANDOM > $f ; done"
|
||||
)
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_commited_changes():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f'cd {base_dir}/test && touch changed_file && git add changed_file && git commit -m "commitmsg"'
|
||||
)
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_tracking_branch_mismatch():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"cd {base_dir}/test && git push origin test && git reset --hard origin/test^"
|
||||
)
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_fail_from_subdir():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=f"{base_dir}/test")
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_worktree_clean_non_worktree():
|
||||
with TempGitRepository() as git_dir:
|
||||
cmd = grm(["wt", "clean"], cwd=git_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_worktree_clean_non_git():
|
||||
with NonGitDir() as base_dir:
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configure_default_branch", [True, False])
|
||||
@pytest.mark.parametrize("branch_list_empty", [True, False])
|
||||
def test_worktree_clean_configured_default_branch(
|
||||
configure_default_branch, branch_list_empty
|
||||
):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
if configure_default_branch:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
if branch_list_empty:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = []
|
||||
"""
|
||||
)
|
||||
else:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"mybranch"
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
(
|
||||
cd ./test
|
||||
touch change
|
||||
git add change
|
||||
git commit -m commit
|
||||
)
|
||||
|
||||
git --git-dir ./.git-main-working-tree worktree add mybranch
|
||||
(
|
||||
cd ./mybranch
|
||||
git merge --no-ff test
|
||||
)
|
||||
git --git-dir ./.git-main-working-tree worktree remove mybranch
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
if configure_default_branch and not branch_list_empty:
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
else:
|
||||
assert "test" in os.listdir(base_dir)
|
||||
144
e2e_tests/test_worktree_config_presistent_branch.py
Normal file
144
e2e_tests/test_worktree_config_presistent_branch.py
Normal file
@@ -0,0 +1,144 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os.path
|
||||
|
||||
import git
|
||||
from helpers import TempGitRepositoryWorktree, checksum_directory, funcname, grm, shell
|
||||
|
||||
|
||||
def test_worktree_never_clean_persistent_branches():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"mybranch",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "mybranch", "--track", "origin/master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
before = checksum_directory(f"{base_dir}/mybranch")
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "mybranch" in os.listdir(base_dir)
|
||||
repo = git.Repo(os.path.join(base_dir, "mybranch"))
|
||||
assert str(repo.active_branch) == "mybranch"
|
||||
|
||||
after = checksum_directory(f"{base_dir}/mybranch")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_branch_merged_into_persistent():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"master",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/test
|
||||
touch change1
|
||||
git add change1
|
||||
git commit -m "commit1"
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
git merge --no-ff test
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_no_clean_unmerged_branch():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"master",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/test
|
||||
touch change1
|
||||
git add change1
|
||||
git commit -m "commit1"
|
||||
git push origin test
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_delete_branch_merged_into_persistent():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"master",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/test
|
||||
touch change1
|
||||
git add change1
|
||||
git commit -m "commit1"
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
git merge --no-ff test
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
63
e2e_tests/test_worktree_conversion.py
Normal file
63
e2e_tests/test_worktree_conversion.py
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
|
||||
from helpers import (
|
||||
EmptyDir,
|
||||
NonGitDir,
|
||||
TempGitRepository,
|
||||
TempGitRepositoryWorktree,
|
||||
checksum_directory,
|
||||
funcname,
|
||||
grm,
|
||||
)
|
||||
|
||||
|
||||
def test_convert():
|
||||
with TempGitRepository() as git_dir:
|
||||
cmd = grm(["wt", "convert"], cwd=git_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(git_dir)
|
||||
assert len(files) == 1
|
||||
assert files[0] == ".git-main-working-tree"
|
||||
|
||||
cmd = grm(["wt", "add", "test"], cwd=git_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(git_dir)
|
||||
assert len(files) == 2
|
||||
assert set(files) == {".git-main-working-tree", "test"}
|
||||
|
||||
|
||||
def test_convert_already_worktree():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (git_dir, _commit):
|
||||
before = checksum_directory(git_dir)
|
||||
|
||||
cmd = grm(["wt", "convert"], cwd=git_dir)
|
||||
assert cmd.returncode != 0
|
||||
|
||||
after = checksum_directory(git_dir)
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_convert_non_git():
|
||||
with NonGitDir() as dir:
|
||||
before = checksum_directory(dir)
|
||||
|
||||
cmd = grm(["wt", "convert"], cwd=dir)
|
||||
assert cmd.returncode != 0
|
||||
|
||||
after = checksum_directory(dir)
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_convert_empty():
|
||||
with EmptyDir() as dir:
|
||||
before = checksum_directory(dir)
|
||||
|
||||
cmd = grm(["wt", "convert"], cwd=dir)
|
||||
assert cmd.returncode != 0
|
||||
|
||||
after = checksum_directory(dir)
|
||||
assert before == after
|
||||
182
e2e_tests/test_worktree_fetch.py
Normal file
182
e2e_tests/test_worktree_fetch.py
Normal file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
|
||||
import git
|
||||
import pytest
|
||||
from helpers import (
|
||||
EmptyDir,
|
||||
TempGitFileRemote,
|
||||
TempGitRepositoryWorktree,
|
||||
funcname,
|
||||
grm,
|
||||
shell,
|
||||
)
|
||||
|
||||
|
||||
def test_worktree_fetch():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit):
|
||||
with TempGitFileRemote() as (remote_path, _remote_sha):
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir .git-main-working-tree remote add upstream file://{remote_path}
|
||||
git --git-dir .git-main-working-tree push --force upstream master:master
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "fetch"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha
|
||||
assert repo.commit("master").hexsha == repo.commit("upstream/master").hexsha
|
||||
|
||||
with EmptyDir() as tmp:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmp}
|
||||
git clone {remote_path} tmp
|
||||
cd tmp
|
||||
echo change > mychange-remote
|
||||
git add mychange-remote
|
||||
git commit -m "change-remote"
|
||||
git push origin HEAD:master
|
||||
"""
|
||||
)
|
||||
remote_commit = git.Repo(f"{tmp}/tmp").commit("master").hexsha
|
||||
|
||||
assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha
|
||||
assert repo.commit("master").hexsha == repo.commit("upstream/master").hexsha
|
||||
|
||||
cmd = grm(["wt", "fetch"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha
|
||||
assert repo.commit("master").hexsha == root_commit
|
||||
assert repo.commit("upstream/master").hexsha == remote_commit
|
||||
|
||||
|
||||
@pytest.mark.parametrize("rebase", [True, False])
|
||||
@pytest.mark.parametrize("ffable", [True, False])
|
||||
@pytest.mark.parametrize("has_changes", [True, False])
|
||||
@pytest.mark.parametrize("stash", [True, False])
|
||||
def test_worktree_pull(rebase, ffable, has_changes, stash):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit):
|
||||
with TempGitFileRemote() as (remote_path, _remote_sha):
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir .git-main-working-tree remote add upstream file://{remote_path}
|
||||
git --git-dir .git-main-working-tree push --force upstream master:master
|
||||
"""
|
||||
)
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
assert repo.commit("origin/master").hexsha == repo.commit("master").hexsha
|
||||
assert repo.commit("upstream/master").hexsha == repo.commit("master").hexsha
|
||||
|
||||
with EmptyDir() as tmp:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmp}
|
||||
git clone {remote_path} tmp
|
||||
cd tmp
|
||||
git checkout origin/master
|
||||
echo change > mychange-remote
|
||||
git add mychange-remote
|
||||
git commit -m "change-remote"
|
||||
git push origin HEAD:master
|
||||
"""
|
||||
)
|
||||
remote_commit = git.Repo(f"{tmp}/tmp").commit("HEAD").hexsha
|
||||
|
||||
grm(["wt", "add", "master", "--track", "upstream/master"], cwd=base_dir)
|
||||
|
||||
repo = git.Repo(f"{base_dir}/master")
|
||||
if not ffable:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
echo change > mychange
|
||||
git add mychange
|
||||
git commit -m "local-commit-in-master"
|
||||
"""
|
||||
)
|
||||
|
||||
if has_changes:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
echo change >> root-commit-in-worktree-1
|
||||
echo uncommitedchange > uncommitedchange
|
||||
"""
|
||||
)
|
||||
|
||||
args = ["wt", "pull"]
|
||||
if rebase:
|
||||
args += ["--rebase"]
|
||||
if stash:
|
||||
args += ["--stash"]
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
if has_changes and not stash:
|
||||
assert cmd.returncode != 0
|
||||
assert re.match(r".*master.*contains changes.*", cmd.stderr)
|
||||
else:
|
||||
assert repo.commit("upstream/master").hexsha == remote_commit
|
||||
assert repo.commit("origin/master").hexsha == root_commit
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
if has_changes:
|
||||
assert ["uncommitedchange"] == repo.untracked_files
|
||||
assert repo.is_dirty()
|
||||
else:
|
||||
assert not repo.is_dirty()
|
||||
|
||||
if not rebase:
|
||||
if ffable:
|
||||
assert cmd.returncode == 0
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
== repo.commit("upstream/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("upstream/master").hexsha == remote_commit
|
||||
)
|
||||
else:
|
||||
assert cmd.returncode != 0
|
||||
assert "cannot be fast forwarded" in cmd.stderr
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
assert repo.commit("master").hexsha != remote_commit
|
||||
assert (
|
||||
repo.commit("upstream/master").hexsha == remote_commit
|
||||
)
|
||||
else:
|
||||
assert cmd.returncode == 0
|
||||
if ffable:
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
== repo.commit("upstream/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("upstream/master").hexsha == remote_commit
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
repo.commit("master").message.strip()
|
||||
== "local-commit-in-master"
|
||||
)
|
||||
assert repo.commit("master~1").hexsha == remote_commit
|
||||
270
e2e_tests/test_worktree_rebase.py
Normal file
270
e2e_tests/test_worktree_rebase.py
Normal file
@@ -0,0 +1,270 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import git
|
||||
import pytest
|
||||
from helpers import TempGitRepositoryWorktree, funcname, grm, shell
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pull", [True, False])
|
||||
@pytest.mark.parametrize("rebase", [True, False])
|
||||
@pytest.mark.parametrize("ffable", [True, False])
|
||||
@pytest.mark.parametrize("has_changes", [True, False])
|
||||
@pytest.mark.parametrize("stash", [True, False])
|
||||
def test_worktree_rebase(pull, rebase, ffable, has_changes, stash):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _root_commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write('persistent_branches = ["mybasebranch"]')
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
|
||||
grm(
|
||||
["wt", "add", "mybasebranch", "--track", "origin/mybasebranch"],
|
||||
cwd=base_dir,
|
||||
)
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/mybasebranch
|
||||
echo change > mychange-root
|
||||
git add mychange-root
|
||||
git commit -m "commit-root"
|
||||
echo change > mychange-base-local
|
||||
git add mychange-base-local
|
||||
git commit -m "commit-in-base-local"
|
||||
git push origin mybasebranch
|
||||
"""
|
||||
)
|
||||
|
||||
grm(
|
||||
["wt", "add", "myfeatbranch", "--track", "origin/myfeatbranch"],
|
||||
cwd=base_dir,
|
||||
)
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/myfeatbranch
|
||||
git reset --hard mybasebranch^ # root
|
||||
echo change > mychange-feat-local
|
||||
git add mychange-feat-local
|
||||
git commit -m "commit-in-feat-local"
|
||||
git push origin HEAD:myfeatbranch
|
||||
"""
|
||||
)
|
||||
|
||||
grm(["wt", "add", "tmp"], cwd=base_dir)
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/tmp
|
||||
git reset --hard mybasebranch
|
||||
echo change > mychange-base-remote
|
||||
git add mychange-base-remote
|
||||
git commit -m "commit-in-base-remote"
|
||||
git push origin HEAD:mybasebranch
|
||||
|
||||
git reset --hard myfeatbranch
|
||||
echo change > mychange-feat-remote
|
||||
git add mychange-feat-remote
|
||||
git commit -m "commit-in-feat-remote"
|
||||
git push origin HEAD:myfeatbranch
|
||||
"""
|
||||
)
|
||||
|
||||
if not ffable:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/mybasebranch
|
||||
echo change > mychange-base-no-ff
|
||||
git add mychange-base-no-ff
|
||||
git commit -m "commit-in-base-local-no-ff"
|
||||
|
||||
cd {base_dir}/myfeatbranch
|
||||
echo change > mychange-feat-no-ff
|
||||
git add mychange-feat-no-ff
|
||||
git commit -m "commit-in-feat-local-no-ff"
|
||||
"""
|
||||
)
|
||||
|
||||
if has_changes:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/myfeatbranch
|
||||
echo uncommitedchange > uncommitedchange
|
||||
"""
|
||||
)
|
||||
|
||||
grm(["wt", "delete", "--force", "tmp"], cwd=base_dir)
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
if ffable:
|
||||
assert repo.commit("mybasebranch~1").message.strip() == "commit-root"
|
||||
assert (
|
||||
repo.refs.mybasebranch.commit.message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert (
|
||||
repo.remote("origin").refs.mybasebranch.commit.message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.refs.myfeatbranch.commit.message.strip() == "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.remote("origin").refs.myfeatbranch.commit.message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
repo.commit("mybasebranch").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("mybasebranch~1").message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("mybasebranch~2").message.strip() == "commit-root"
|
||||
assert (
|
||||
repo.commit("myfeatbranch").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("myfeatbranch~1").message.strip() == "commit-in-feat-local"
|
||||
)
|
||||
assert repo.commit("myfeatbranch~2").message.strip() == "commit-root"
|
||||
assert (
|
||||
repo.remote("origin").refs.mybasebranch.commit.message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.remote("origin").refs.myfeatbranch.commit.message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
|
||||
args = ["wt", "rebase"]
|
||||
if pull:
|
||||
args += ["--pull"]
|
||||
if rebase:
|
||||
args += ["--rebase"]
|
||||
if stash:
|
||||
args += ["--stash"]
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
|
||||
if rebase and not pull:
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stderr) != 0
|
||||
elif has_changes and not stash:
|
||||
assert cmd.returncode != 0
|
||||
assert re.match(r".*myfeatbranch.*contains changes.*", cmd.stderr)
|
||||
else:
|
||||
repo = git.Repo(f"{base_dir}/myfeatbranch")
|
||||
if has_changes:
|
||||
assert ["uncommitedchange"] == repo.untracked_files
|
||||
if pull:
|
||||
if rebase:
|
||||
assert cmd.returncode == 0
|
||||
if ffable:
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
else:
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~4").message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~5").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~6").message.strip() == "commit-root"
|
||||
else:
|
||||
if ffable:
|
||||
assert cmd.returncode == 0
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
else:
|
||||
assert cmd.returncode != 0
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
else:
|
||||
assert cmd.returncode == 0
|
||||
if ffable:
|
||||
assert repo.commit("HEAD").message.strip() == "commit-in-feat-local"
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~2").message.strip() == "commit-root"
|
||||
else:
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip() == "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
84
e2e_tests/test_worktree_status.py
Normal file
84
e2e_tests/test_worktree_status.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import pytest
|
||||
from helpers import (
|
||||
NonGitDir,
|
||||
TempGitRepository,
|
||||
TempGitRepositoryWorktree,
|
||||
funcname,
|
||||
grm,
|
||||
shell,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("has_config", [True, False])
|
||||
def test_worktree_status(has_config):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
if has_config:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write("")
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
cmd = grm(["wt", "status"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
stdout = cmd.stdout.lower()
|
||||
assert "test" in stdout
|
||||
|
||||
|
||||
def test_worktree_status_fail_from_subdir():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
cmd = grm(["wt", "status"], cwd=f"{base_dir}/test")
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_worktree_status_non_worktree():
|
||||
with TempGitRepository() as git_dir:
|
||||
cmd = grm(["wt", "status"], cwd=git_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_worktree_status_non_git():
|
||||
with NonGitDir() as base_dir:
|
||||
cmd = grm(["wt", "status"], cwd=base_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_worktree_status_warn_with_non_worktree_dir():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
mkdir not_a_worktree
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "status"], cwd=base_dir)
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout) != 0
|
||||
assert len(cmd.stderr) != 0
|
||||
assert (
|
||||
re.match(
|
||||
".*error.*not_a_worktree.*not a valid worktree directory",
|
||||
cmd.stderr,
|
||||
re.IGNORECASE,
|
||||
)
|
||||
is not None
|
||||
)
|
||||
713
e2e_tests/test_worktrees.py
Normal file
713
e2e_tests/test_worktrees.py
Normal file
@@ -0,0 +1,713 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import datetime
|
||||
import os.path
|
||||
|
||||
import git
|
||||
import pytest
|
||||
from helpers import (
|
||||
TempGitRepositoryWorktree,
|
||||
checksum_directory,
|
||||
funcname,
|
||||
grm,
|
||||
shell,
|
||||
tempfile,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"config_setup",
|
||||
(
|
||||
(False, False, False),
|
||||
(True, False, False),
|
||||
(True, False, True),
|
||||
(True, True, False),
|
||||
(True, True, True),
|
||||
),
|
||||
)
|
||||
@pytest.mark.parametrize("explicit_notrack", [True, False])
|
||||
@pytest.mark.parametrize("explicit_track", [True, False])
|
||||
@pytest.mark.parametrize(
|
||||
"local_branch_setup", ((False, False), (True, False), (True, True))
|
||||
)
|
||||
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
|
||||
@pytest.mark.parametrize("remote_branch_with_prefix_already_exists", [True, False])
|
||||
@pytest.mark.parametrize(
|
||||
"remote_setup",
|
||||
(
|
||||
(0, "origin", False),
|
||||
(1, "origin", False),
|
||||
(2, "origin", False),
|
||||
(2, "otherremote", False),
|
||||
(2, "origin", True),
|
||||
(2, "otherremote", True),
|
||||
),
|
||||
)
|
||||
@pytest.mark.parametrize("track_differs_from_existing_branch_upstream", [True, False])
|
||||
@pytest.mark.parametrize("worktree_with_slash", [True, False])
|
||||
def test_worktree_add(
|
||||
config_setup,
|
||||
explicit_notrack,
|
||||
explicit_track,
|
||||
local_branch_setup,
|
||||
remote_branch_already_exists,
|
||||
remote_branch_with_prefix_already_exists,
|
||||
remote_setup,
|
||||
track_differs_from_existing_branch_upstream,
|
||||
worktree_with_slash,
|
||||
):
|
||||
(remote_count, default_remote, remotes_differ) = remote_setup
|
||||
(
|
||||
config_enabled,
|
||||
config_has_default_remote_prefix,
|
||||
config_has_default_track_enabled,
|
||||
) = config_setup
|
||||
(local_branch_exists, local_branch_has_tracking_branch) = local_branch_setup
|
||||
has_remotes = True if remote_count > 0 else False
|
||||
|
||||
if worktree_with_slash:
|
||||
worktree_name = "dir/nested/test"
|
||||
else:
|
||||
worktree_name = "test"
|
||||
|
||||
if track_differs_from_existing_branch_upstream:
|
||||
explicit_track_branch_name = f"{default_remote}/somethingelse"
|
||||
else:
|
||||
explicit_track_branch_name = f"{default_remote}/{worktree_name}"
|
||||
|
||||
timestamp = datetime.datetime.now().replace(microsecond=0).isoformat()
|
||||
# GitPython has some weird behavior here. It is not possible to use kwargs
|
||||
# to set the commit and author date.
|
||||
#
|
||||
# `committer_date=x` (which is documented) does not work, as `git commit`
|
||||
# does not accept --committer-date
|
||||
#
|
||||
# `author_date=x` does not work, as it's now called --date in `git commit`
|
||||
#
|
||||
# `date=x` should work, but is refused by GitPython, as it does not know
|
||||
# about the new behavior in `git commit`
|
||||
#
|
||||
# Fortunately, there are env variables that control those timestamps.
|
||||
os.environ["GIT_COMMITTER_DATE"] = str(timestamp)
|
||||
os.environ["GIT_AUTHOR_DATE"] = str(timestamp)
|
||||
|
||||
def setup_remote1(directory):
|
||||
if remote_branch_already_exists:
|
||||
with tempfile.TemporaryDirectory() as cloned:
|
||||
repo = git.Repo.clone_from(directory, cloned)
|
||||
newfile = os.path.join(cloned, "change")
|
||||
open(newfile, "w").close()
|
||||
repo.index.add([newfile])
|
||||
repo.index.commit("commit")
|
||||
repo.remotes.origin.push(f"HEAD:{worktree_name}", force=True)
|
||||
|
||||
if remote_branch_with_prefix_already_exists:
|
||||
with tempfile.TemporaryDirectory() as cloned:
|
||||
repo = git.Repo.clone_from(directory, cloned)
|
||||
newfile = os.path.join(cloned, "change2")
|
||||
open(newfile, "w").close()
|
||||
repo.index.add([newfile])
|
||||
repo.index.commit("commit")
|
||||
repo.remotes.origin.push(f"HEAD:myprefix/{worktree_name}", force=True)
|
||||
|
||||
return "_".join(
|
||||
[
|
||||
str(worktree_with_slash),
|
||||
str(remote_branch_already_exists),
|
||||
str(remote_branch_with_prefix_already_exists),
|
||||
str(remotes_differ),
|
||||
]
|
||||
)
|
||||
|
||||
def setup_remote2(directory):
|
||||
if remote_branch_already_exists:
|
||||
with tempfile.TemporaryDirectory() as cloned:
|
||||
repo = git.Repo.clone_from(directory, cloned)
|
||||
newfile = os.path.join(cloned, "change")
|
||||
open(newfile, "w").close()
|
||||
repo.index.add([newfile])
|
||||
repo.index.commit("commit")
|
||||
if remotes_differ:
|
||||
newfile = os.path.join(cloned, "change_on_second_remote")
|
||||
open(newfile, "w").close()
|
||||
repo.index.add([newfile])
|
||||
repo.index.commit("commit_on_second_remote")
|
||||
repo.remotes.origin.push(f"HEAD:{worktree_name}", force=True)
|
||||
|
||||
if remote_branch_with_prefix_already_exists:
|
||||
with tempfile.TemporaryDirectory() as cloned:
|
||||
repo = git.Repo.clone_from(directory, cloned)
|
||||
newfile = os.path.join(cloned, "change2")
|
||||
open(newfile, "w").close()
|
||||
repo.index.add([newfile])
|
||||
repo.index.commit("commit")
|
||||
if remotes_differ:
|
||||
newfile = os.path.join(cloned, "change_on_second_remote2")
|
||||
open(newfile, "w").close()
|
||||
repo.index.add([newfile])
|
||||
repo.index.commit("commit_on_second_remote2")
|
||||
repo.remotes.origin.push(f"HEAD:myprefix/{worktree_name}", force=True)
|
||||
|
||||
return "_".join(
|
||||
[
|
||||
str(worktree_with_slash),
|
||||
str(remote_branch_already_exists),
|
||||
str(remote_branch_with_prefix_already_exists),
|
||||
str(remotes_differ),
|
||||
]
|
||||
)
|
||||
|
||||
def cachefn(nr):
|
||||
return "_".join(
|
||||
[
|
||||
str(nr),
|
||||
str(default_remote),
|
||||
str(local_branch_exists),
|
||||
str(remote_branch_already_exists),
|
||||
str(remote_branch_with_prefix_already_exists),
|
||||
str(remote_count),
|
||||
str(remotes_differ),
|
||||
str(worktree_name),
|
||||
]
|
||||
)
|
||||
|
||||
remote1_cache_key = cachefn(1)
|
||||
remote2_cache_key = cachefn(2)
|
||||
|
||||
cachekey = "_".join(
|
||||
[
|
||||
str(local_branch_exists),
|
||||
str(local_branch_has_tracking_branch),
|
||||
str(remote_branch_already_exists),
|
||||
str(remote_branch_with_prefix_already_exists),
|
||||
str(remote_count),
|
||||
str(remotes_differ),
|
||||
str(worktree_name),
|
||||
]
|
||||
)
|
||||
|
||||
with TempGitRepositoryWorktree.get(
|
||||
cachekey=cachekey,
|
||||
branch=worktree_name if local_branch_exists else None,
|
||||
remotes=remote_count,
|
||||
remote_setup=[
|
||||
[remote1_cache_key, setup_remote1],
|
||||
[remote2_cache_key, setup_remote2],
|
||||
],
|
||||
) as (base_dir, initial_commit):
|
||||
repo = git.Repo(os.path.join(base_dir, ".git-main-working-tree"))
|
||||
|
||||
if config_enabled:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[track]
|
||||
default = {str(config_has_default_track_enabled).lower()}
|
||||
default_remote = "{default_remote}"
|
||||
"""
|
||||
)
|
||||
|
||||
if config_has_default_remote_prefix:
|
||||
f.write(
|
||||
"""
|
||||
default_remote_prefix = "myprefix"
|
||||
"""
|
||||
)
|
||||
|
||||
if local_branch_exists:
|
||||
if has_remotes and local_branch_has_tracking_branch:
|
||||
origin = repo.remote(default_remote)
|
||||
if remote_count >= 2:
|
||||
otherremote = repo.remote("otherremote")
|
||||
br = list(filter(lambda x: x.name == worktree_name, repo.branches))[0]
|
||||
assert os.path.exists(base_dir)
|
||||
if track_differs_from_existing_branch_upstream:
|
||||
origin.push(
|
||||
f"{worktree_name}:someothername", force=True, set_upstream=True
|
||||
)
|
||||
if remote_count >= 2:
|
||||
otherremote.push(
|
||||
f"{worktree_name}:someothername",
|
||||
force=True,
|
||||
set_upstream=True,
|
||||
)
|
||||
br.set_tracking_branch(
|
||||
list(
|
||||
filter(
|
||||
lambda x: x.remote_head == "someothername", origin.refs
|
||||
)
|
||||
)[0]
|
||||
)
|
||||
else:
|
||||
origin.push(
|
||||
f"{worktree_name}:{worktree_name}",
|
||||
force=True,
|
||||
set_upstream=True,
|
||||
)
|
||||
if remote_count >= 2:
|
||||
otherremote.push(
|
||||
f"{worktree_name}:{worktree_name}",
|
||||
force=True,
|
||||
set_upstream=True,
|
||||
)
|
||||
br.set_tracking_branch(
|
||||
list(
|
||||
filter(
|
||||
lambda x: x.remote_head == worktree_name, origin.refs
|
||||
)
|
||||
)[0]
|
||||
)
|
||||
|
||||
args = ["wt", "add", worktree_name]
|
||||
if explicit_track:
|
||||
args.extend(["--track", explicit_track_branch_name])
|
||||
if explicit_notrack:
|
||||
args.extend(["--no-track"])
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
if explicit_track and not explicit_notrack and not has_remotes:
|
||||
assert cmd.returncode != 0
|
||||
assert f'remote "{default_remote}" not found' in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert len(cmd.stdout.strip().split("\n")) == 1
|
||||
assert f"worktree {worktree_name} created" in cmd.stdout.lower()
|
||||
|
||||
def check_deviation_error(base):
|
||||
if (
|
||||
not local_branch_exists
|
||||
and (explicit_notrack or (not explicit_notrack and not explicit_track))
|
||||
and (
|
||||
remote_branch_already_exists
|
||||
or (
|
||||
config_enabled
|
||||
and config_has_default_remote_prefix
|
||||
and remote_branch_with_prefix_already_exists
|
||||
)
|
||||
)
|
||||
and remote_count >= 2
|
||||
and remotes_differ
|
||||
):
|
||||
assert (
|
||||
"branch exists on multiple remotes, but they deviate"
|
||||
in cmd.stderr.lower()
|
||||
)
|
||||
assert len(cmd.stderr.strip().split("\n")) == base + 1
|
||||
else:
|
||||
if base == 0:
|
||||
assert len(cmd.stderr) == base
|
||||
else:
|
||||
assert len(cmd.stderr.strip().split("\n")) == base
|
||||
|
||||
if explicit_track and explicit_notrack:
|
||||
assert "--track will be ignored" in cmd.stderr.lower()
|
||||
check_deviation_error(1)
|
||||
else:
|
||||
check_deviation_error(0)
|
||||
|
||||
files = os.listdir(base_dir)
|
||||
if config_enabled is True:
|
||||
if worktree_with_slash:
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "dir"}
|
||||
else:
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
|
||||
assert len(files) == 3
|
||||
if worktree_with_slash:
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "dir"}
|
||||
assert set(os.listdir(os.path.join(base_dir, "dir"))) == {"nested"}
|
||||
assert set(os.listdir(os.path.join(base_dir, "dir/nested"))) == {"test"}
|
||||
else:
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
|
||||
else:
|
||||
assert len(files) == 2
|
||||
if worktree_with_slash:
|
||||
assert set(files) == {".git-main-working-tree", "dir"}
|
||||
assert set(os.listdir(os.path.join(base_dir, "dir"))) == {"nested"}
|
||||
assert set(os.listdir(os.path.join(base_dir, "dir/nested"))) == {"test"}
|
||||
else:
|
||||
assert set(files) == {".git-main-working-tree", "test"}
|
||||
|
||||
repo = git.Repo(os.path.join(base_dir, worktree_name))
|
||||
assert not repo.bare
|
||||
# assert not repo.is_dirty()
|
||||
assert str(repo.head.ref) == worktree_name
|
||||
|
||||
local_commit = repo.head.commit.hexsha
|
||||
|
||||
if not has_remotes:
|
||||
assert local_commit == initial_commit
|
||||
elif local_branch_exists:
|
||||
assert local_commit == initial_commit
|
||||
elif explicit_track and not explicit_notrack:
|
||||
assert local_commit == repo.commit(explicit_track_branch_name).hexsha
|
||||
elif explicit_notrack:
|
||||
if config_enabled and config_has_default_remote_prefix:
|
||||
if remote_branch_with_prefix_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(
|
||||
f"{default_remote}/myprefix/{worktree_name}"
|
||||
).hexsha
|
||||
)
|
||||
elif remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
elif remote_count == 1:
|
||||
if config_enabled and config_has_default_remote_prefix:
|
||||
if remote_branch_with_prefix_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(
|
||||
f"{default_remote}/myprefix/{worktree_name}"
|
||||
).hexsha
|
||||
)
|
||||
elif remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
elif remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
elif remotes_differ:
|
||||
if config_enabled: # we have a default remote
|
||||
if (
|
||||
config_has_default_remote_prefix
|
||||
and remote_branch_with_prefix_already_exists
|
||||
):
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(
|
||||
f"{default_remote}/myprefix/{worktree_name}"
|
||||
).hexsha
|
||||
)
|
||||
elif remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
|
||||
else:
|
||||
if config_enabled and config_has_default_remote_prefix:
|
||||
if remote_branch_with_prefix_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(
|
||||
f"{default_remote}/myprefix/{worktree_name}"
|
||||
).hexsha
|
||||
)
|
||||
elif remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
|
||||
elif config_enabled:
|
||||
if not config_has_default_remote_prefix:
|
||||
if config_has_default_track_enabled:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
if remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
else:
|
||||
if remote_branch_with_prefix_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(
|
||||
f"{default_remote}/myprefix/{worktree_name}"
|
||||
).hexsha
|
||||
)
|
||||
elif remote_branch_already_exists:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
elif config_has_default_track_enabled:
|
||||
assert (
|
||||
local_commit
|
||||
== repo.commit(
|
||||
f"{default_remote}/myprefix/{worktree_name}"
|
||||
).hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
elif remote_branch_already_exists and not remotes_differ:
|
||||
assert (
|
||||
local_commit == repo.commit(f"{default_remote}/{worktree_name}").hexsha
|
||||
)
|
||||
else:
|
||||
assert local_commit == initial_commit
|
||||
|
||||
# Check whether tracking is ok
|
||||
if not has_remotes:
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
elif explicit_notrack:
|
||||
if local_branch_exists and local_branch_has_tracking_branch:
|
||||
if track_differs_from_existing_branch_upstream:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch())
|
||||
== f"{default_remote}/someothername"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch())
|
||||
== f"{default_remote}/{worktree_name}"
|
||||
)
|
||||
else:
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
elif explicit_track:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch()) == explicit_track_branch_name
|
||||
)
|
||||
elif config_enabled and config_has_default_track_enabled:
|
||||
if config_has_default_remote_prefix:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch())
|
||||
== f"{default_remote}/myprefix/{worktree_name}"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch())
|
||||
== f"{default_remote}/{worktree_name}"
|
||||
)
|
||||
elif local_branch_exists and local_branch_has_tracking_branch:
|
||||
if track_differs_from_existing_branch_upstream:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch())
|
||||
== f"{default_remote}/someothername"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch())
|
||||
== f"{default_remote}/{worktree_name}"
|
||||
)
|
||||
else:
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
|
||||
|
||||
def test_worktree_add_invalid_name():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
for worktree_name in [
|
||||
"/absolute/path",
|
||||
"trailingslash/",
|
||||
"with spaces",
|
||||
"with\t tabs",
|
||||
"with\nnewline",
|
||||
]:
|
||||
args = ["wt", "add", worktree_name]
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert not os.path.exists(worktree_name)
|
||||
assert not os.path.exists(os.path.join(base_dir, worktree_name))
|
||||
assert "invalid worktree name" in str(cmd.stderr.lower())
|
||||
|
||||
|
||||
def test_worktree_add_invalid_track():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
for track in ["/absolute/path", "trailingslash/", "/"]:
|
||||
args = ["wt", "add", "foo", "--track", track]
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stderr.strip().split("\n")) == 1
|
||||
assert not os.path.exists("foo")
|
||||
assert not os.path.exists(os.path.join(base_dir, "foo"))
|
||||
assert "tracking branch" in str(cmd.stderr.lower())
|
||||
|
||||
|
||||
@pytest.mark.parametrize("use_track", [True, False])
|
||||
@pytest.mark.parametrize("use_configuration", [True, False])
|
||||
@pytest.mark.parametrize("use_configuration_default", [True, False])
|
||||
def test_worktree_add_invalid_remote_name(
|
||||
use_track, use_configuration, use_configuration_default
|
||||
):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
if use_configuration:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[track]
|
||||
default = {str(use_configuration_default).lower()}
|
||||
default_remote = "thisremotedoesnotexist"
|
||||
"""
|
||||
)
|
||||
|
||||
args = ["wt", "add", "foo"]
|
||||
if use_track:
|
||||
args.extend(["--track", "thisremotedoesnotexist/master"])
|
||||
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
|
||||
if use_track or (use_configuration and use_configuration_default):
|
||||
assert cmd.returncode != 0
|
||||
assert "thisremotedoesnotexist" in cmd.stderr
|
||||
else:
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
|
||||
def test_worktree_add_into_invalid_subdirectory():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "/dir/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 1
|
||||
assert "dir" not in os.listdir(base_dir)
|
||||
assert "dir" not in os.listdir("/")
|
||||
|
||||
cmd = grm(["wt", "add", "dir/"], cwd=base_dir)
|
||||
assert cmd.returncode == 1
|
||||
assert "dir" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_delete():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout.strip().split("\n")) == 1
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
|
||||
cmd = grm(["wt", "add", "check"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
repo = git.Repo(os.path.join(base_dir, ".git-main-working-tree"))
|
||||
assert "test" not in [str(b) for b in repo.branches]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("has_other_worktree", [True, False])
|
||||
def test_worktree_delete_in_subfolder(has_other_worktree):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "dir/test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "dir" in os.listdir(base_dir)
|
||||
|
||||
if has_other_worktree is True:
|
||||
cmd = grm(
|
||||
["wt", "add", "dir/test2", "--track", "origin/test"], cwd=base_dir
|
||||
)
|
||||
assert cmd.returncode == 0
|
||||
assert {"test", "test2"} == set(os.listdir(os.path.join(base_dir, "dir")))
|
||||
else:
|
||||
assert {"test"} == set(os.listdir(os.path.join(base_dir, "dir")))
|
||||
|
||||
cmd = grm(["wt", "delete", "dir/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout.strip().split("\n")) == 1
|
||||
if has_other_worktree is True:
|
||||
assert {"test2"} == set(os.listdir(os.path.join(base_dir, "dir")))
|
||||
else:
|
||||
assert "dir" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_no_tracking_branch():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
stderr = cmd.stderr.lower()
|
||||
assert "refuse" in stderr or "refusing" in stderr
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"reason",
|
||||
(
|
||||
"new_file",
|
||||
"changed_file",
|
||||
"deleted_file",
|
||||
"new_commit",
|
||||
"tracking_branch_mismatch",
|
||||
),
|
||||
)
|
||||
def test_worktree_delete_refusal(reason):
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
if reason == "new_file":
|
||||
shell(f"cd {base_dir}/test && touch changed_file")
|
||||
elif reason == "changed_file":
|
||||
shell(
|
||||
f"cd {base_dir}/test && git ls-files | shuf | head | while read f ; do echo $RANDOM > $f ; done"
|
||||
)
|
||||
elif reason == "deleted_file":
|
||||
shell(f"cd {base_dir}/test && git ls-files | shuf | head | xargs rm -rf")
|
||||
elif reason == "new_commit":
|
||||
shell(
|
||||
f'cd {base_dir}/test && touch changed_file && git add changed_file && git commit -m "commitmsg"'
|
||||
)
|
||||
elif reason == "tracking_branch_mismatch":
|
||||
shell(
|
||||
f"cd {base_dir}/test && git push origin test && git reset --hard origin/test^"
|
||||
)
|
||||
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
before = checksum_directory(f"{base_dir}/test")
|
||||
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
stderr = cmd.stderr.lower()
|
||||
assert "refuse" in stderr or "refusing" in stderr
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
after = checksum_directory(f"{base_dir}/test")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_delete_force_refusal():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
cmd = grm(["wt", "delete", "test", "--force"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout.strip().split("\n")) == 1
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_add_delete_add():
|
||||
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
16
example.config.yaml
Normal file
16
example.config.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
trees:
|
||||
- root: "~/example-projects/"
|
||||
repos:
|
||||
- name: "git-repo-manager"
|
||||
remotes:
|
||||
- name: "origin"
|
||||
url: "https://code.hkoerber.de/hannes/git-repo-manager.git"
|
||||
type: "https"
|
||||
- name: "github"
|
||||
url: "https://github.com/hakoerber/git-repo-manager.git"
|
||||
type: "https"
|
||||
- name: "dotfiles"
|
||||
remotes:
|
||||
- name: "origin"
|
||||
url: "https://github.com/hakoerber/dotfiles.git"
|
||||
type: "https"
|
||||
163
release.sh
Executable file
163
release.sh
Executable file
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o nounset
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
usage() {
|
||||
printf '%s\n' "usage: $0 (major|minor|patch)" >&2
|
||||
}
|
||||
|
||||
if (($# != 1)); then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
current_version="$(grep '^version \?=' Cargo.toml | head -1 | cut -d '=' -f 2 | tr -d " '"'"')"
|
||||
|
||||
major="$(printf '%s' "${current_version}" | grep -oP '^\d+')"
|
||||
minor="$(printf '%s' "${current_version}" | grep -oP '\.\d+\.' | tr -d '.')"
|
||||
patch="$(printf '%s' "${current_version}" | grep -oP '\d+$' | tr -d '.')"
|
||||
|
||||
case "$1" in
|
||||
major)
|
||||
((major++)) || true
|
||||
minor=0
|
||||
patch=0
|
||||
|
||||
printf '%s\n' "Are you sure you want to release 1.x?" >&2
|
||||
exit 1
|
||||
;;
|
||||
minor)
|
||||
((minor++)) || true
|
||||
patch=0
|
||||
;;
|
||||
patch)
|
||||
((patch++)) || true
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
new_version="${major}.${minor}.${patch}"
|
||||
|
||||
if ! [[ "${new_version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
printf '%s\n' 'Version has to a complete semver' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
current_branch="$(git rev-parse --abbrev-ref HEAD)"
|
||||
if [[ "${current_branch}" != "develop" ]]; then
|
||||
printf '%s\n' 'You need to be on develop' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
gitstatus="$(git status --porcelain)"
|
||||
if [[ -n "${gitstatus}" ]]; then
|
||||
printf '%s\n' 'There are uncommitted changes' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if git tag --list "v${new_version}" | grep -q .; then
|
||||
printf 'Tag %s already exists\n' "v${new_version}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for remote in $(git remote); do
|
||||
if git ls-remote --tags "${remote}" | grep -q "refs/tags/v${new_version}$"; then
|
||||
printf 'Tag %s already exists on %s\n' "v${new_version}" "${remote}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
git fetch --all
|
||||
|
||||
for remote in $(git remote); do
|
||||
for branch in master develop; do
|
||||
if ! git diff --quiet "${remote}/${branch}..${branch}"; then
|
||||
printf 'Remote branch %s/%s not up to date, synchronize first!\n' "${remote}" "${branch}" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if ! git merge-base --is-ancestor master develop; then
|
||||
printf '%s\n' 'Develop is not a straight descendant of master, rebase!' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
changes="$(git log --oneline master..develop | wc -l)"
|
||||
if ((changes == 0)); then
|
||||
printf '%s\n' 'No changes between master and develop?' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sed -i "0,/^version/{s/^version.*$/version = \"${new_version}\"/}" Cargo.toml
|
||||
|
||||
cargo update --package git-repo-manager --precise "${new_version}"
|
||||
|
||||
diff="$(git diff --numstat)"
|
||||
if (($(printf '%s\n' "${diff}" | wc -l || true) != 2)); then
|
||||
printf '%s\n' 'Weird changes detected, bailing' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.lock$'; then
|
||||
printf '%s\n' 'Weird changes detected, bailing' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.toml$'; then
|
||||
printf '%s\n' 'Weird changes detected, bailing' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git add Cargo.lock Cargo.toml
|
||||
|
||||
git commit -m "Release v${new_version}"
|
||||
|
||||
git switch master 2>/dev/null || { [[ -d "../master" ]] && cd "../master"; } || {
|
||||
printf '%s\n' 'Could not change to master' >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
current_branch="$(git rev-parse --abbrev-ref HEAD)"
|
||||
if [[ "${current_branch}" != "master" ]]; then
|
||||
printf '%s\n' 'Looks like branch switching to master did not work' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git merge --no-ff --no-edit develop
|
||||
git tag "v${new_version}"
|
||||
|
||||
for remote in $(git remote); do
|
||||
while ! git push "${remote}" "v${new_version}" master; do
|
||||
:
|
||||
done
|
||||
done
|
||||
|
||||
git switch develop 2>/dev/null || { [[ -d "../develop" ]] && cd "../develop"; } || {
|
||||
printf '%s\n' 'Could not change to develop' >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
current_branch="$(git rev-parse --abbrev-ref HEAD)"
|
||||
if [[ "${current_branch}" != "develop" ]]; then
|
||||
printf '%s\n' 'Looks like branch switching to develop did not work' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git merge --ff-only master
|
||||
|
||||
for remote in $(git remote); do
|
||||
while ! git push "${remote}" develop; do
|
||||
:
|
||||
done
|
||||
done
|
||||
|
||||
cargo publish
|
||||
|
||||
printf 'Published %s successfully\n' "${new_version}"
|
||||
exit 0
|
||||
3
rust-toolchain.toml
Normal file
3
rust-toolchain.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "nightly"
|
||||
targets = ["x86_64-unknown-linux-musl"]
|
||||
45
src/auth.rs
Normal file
45
src/auth.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use std::process;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuthToken(String);
|
||||
|
||||
impl AuthToken {
|
||||
pub fn access(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_token_from_command(command: &str) -> Result<AuthToken, String> {
|
||||
let output = process::Command::new("/usr/bin/env")
|
||||
.arg("sh")
|
||||
.arg("-c")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map_err(|error| format!("Failed to run token-command: {}", error))?;
|
||||
|
||||
let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?;
|
||||
let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?;
|
||||
|
||||
if !output.status.success() {
|
||||
if !stderr.is_empty() {
|
||||
return Err(format!("Token command failed: {}", stderr));
|
||||
} else {
|
||||
return Err(String::from("Token command failed."));
|
||||
}
|
||||
}
|
||||
|
||||
if !stderr.is_empty() {
|
||||
return Err(format!("Token command produced stderr: {}", stderr));
|
||||
}
|
||||
|
||||
if stdout.is_empty() {
|
||||
return Err(String::from("Token command did not produce output"));
|
||||
}
|
||||
|
||||
let token = stdout
|
||||
.split('\n')
|
||||
.next()
|
||||
.ok_or_else(|| String::from("Output did not contain any newline"))?;
|
||||
|
||||
Ok(AuthToken(token.to_string()))
|
||||
}
|
||||
129
src/cmd.rs
129
src/cmd.rs
@@ -1,129 +0,0 @@
|
||||
use clap::{AppSettings, Parser};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(
|
||||
name = clap::crate_name!(),
|
||||
version = clap::crate_version!(),
|
||||
author = clap::crate_authors!("\n"),
|
||||
about = clap::crate_description!(),
|
||||
long_version = clap::crate_version!(),
|
||||
license = clap::crate_license!(),
|
||||
setting = AppSettings::DeriveDisplayOrder,
|
||||
setting = AppSettings::PropagateVersion,
|
||||
setting = AppSettings::HelpRequired,
|
||||
)]
|
||||
pub struct Opts {
|
||||
#[clap(subcommand)]
|
||||
pub subcmd: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum SubCommand {
|
||||
#[clap(about = "Manage repositories")]
|
||||
Repos(Repos),
|
||||
#[clap(visible_alias = "wt", about = "Manage worktrees")]
|
||||
Worktree(Worktree),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Repos {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: ReposAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum ReposAction {
|
||||
#[clap(
|
||||
visible_alias = "run",
|
||||
about = "Synchronize the repositories to the configured values"
|
||||
)]
|
||||
Sync(Sync),
|
||||
#[clap(about = "Generate a repository configuration from an existing file tree")]
|
||||
Find(Find),
|
||||
#[clap(about = "Show status of configured repositories")]
|
||||
Status(OptionalConfig),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct Sync {
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "./config.toml",
|
||||
about = "Path to the configuration file"
|
||||
)]
|
||||
pub config: String,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct OptionalConfig {
|
||||
#[clap(short, long, about = "Path to the configuration file")]
|
||||
pub config: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Find {
|
||||
#[clap(about = "The path to search through")]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Worktree {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: WorktreeAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum WorktreeAction {
|
||||
#[clap(about = "Add a new worktree")]
|
||||
Add(WorktreeAddArgs),
|
||||
#[clap(about = "Add an existing worktree")]
|
||||
Delete(WorktreeDeleteArgs),
|
||||
#[clap(about = "Show state of existing worktrees")]
|
||||
Status(WorktreeStatusArgs),
|
||||
#[clap(about = "Convert a normal repository to a worktree setup")]
|
||||
Convert(WorktreeConvertArgs),
|
||||
#[clap(about = "Clean all worktrees that do not contain uncommited/unpushed changes")]
|
||||
Clean(WorktreeCleanArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeAddArgs {
|
||||
#[clap(about = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(
|
||||
short = 'n',
|
||||
long = "branch-namespace",
|
||||
about = "Namespace of the branch"
|
||||
)]
|
||||
pub branch_namespace: Option<String>,
|
||||
#[clap(short = 't', long = "track", about = "Remote branch to track")]
|
||||
pub track: Option<String>,
|
||||
}
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeDeleteArgs {
|
||||
#[clap(about = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(
|
||||
long = "force",
|
||||
about = "Force deletion, even when there are uncommitted/unpushed changes"
|
||||
)]
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeStatusArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeConvertArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeCleanArgs {}
|
||||
|
||||
pub fn parse() -> Opts {
|
||||
Opts::parse()
|
||||
}
|
||||
331
src/config.rs
331
src/config.rs
@@ -1,39 +1,336 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process;
|
||||
|
||||
use super::repo::Repo;
|
||||
use std::path::Path;
|
||||
|
||||
use super::auth;
|
||||
use super::output::*;
|
||||
use super::path;
|
||||
use super::provider;
|
||||
use super::provider::Filter;
|
||||
use super::provider::Provider;
|
||||
use super::repo;
|
||||
use super::tree;
|
||||
|
||||
pub type RemoteProvider = provider::RemoteProvider;
|
||||
pub type RemoteType = repo::RemoteType;
|
||||
|
||||
fn worktree_setup_default() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Config {
|
||||
pub trees: Vec<Tree>,
|
||||
#[serde(untagged)]
|
||||
pub enum Config {
|
||||
ConfigTrees(ConfigTrees),
|
||||
ConfigProvider(ConfigProvider),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Tree {
|
||||
pub struct ConfigTrees {
|
||||
pub trees: Vec<ConfigTree>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct ConfigProviderFilter {
|
||||
pub access: Option<bool>,
|
||||
pub owner: Option<bool>,
|
||||
pub users: Option<Vec<String>>,
|
||||
pub groups: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct ConfigProvider {
|
||||
pub provider: RemoteProvider,
|
||||
pub token_command: String,
|
||||
pub root: String,
|
||||
pub repos: Option<Vec<Repo>>,
|
||||
pub filters: Option<ConfigProviderFilter>,
|
||||
|
||||
pub force_ssh: Option<bool>,
|
||||
|
||||
pub api_url: Option<String>,
|
||||
|
||||
pub worktree: Option<bool>,
|
||||
|
||||
pub remote_name: Option<String>,
|
||||
}
|
||||
|
||||
pub fn read_config(path: &str) -> Result<Config, String> {
|
||||
let content = match std::fs::read_to_string(&path) {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct RemoteConfig {
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
#[serde(rename = "type")]
|
||||
pub remote_type: RemoteType,
|
||||
}
|
||||
|
||||
impl RemoteConfig {
|
||||
pub fn from_remote(remote: repo::Remote) -> Self {
|
||||
Self {
|
||||
name: remote.name,
|
||||
url: remote.url,
|
||||
remote_type: remote.remote_type,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_remote(self) -> repo::Remote {
|
||||
repo::Remote {
|
||||
name: self.name,
|
||||
url: self.url,
|
||||
remote_type: self.remote_type,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct RepoConfig {
|
||||
pub name: String,
|
||||
|
||||
#[serde(default = "worktree_setup_default")]
|
||||
pub worktree_setup: bool,
|
||||
|
||||
pub remotes: Option<Vec<RemoteConfig>>,
|
||||
}
|
||||
|
||||
impl RepoConfig {
|
||||
pub fn from_repo(repo: repo::Repo) -> Self {
|
||||
Self {
|
||||
name: repo.name,
|
||||
worktree_setup: repo.worktree_setup,
|
||||
remotes: repo
|
||||
.remotes
|
||||
.map(|remotes| remotes.into_iter().map(RemoteConfig::from_remote).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_repo(self) -> repo::Repo {
|
||||
let (namespace, name) = if let Some((namespace, name)) = self.name.rsplit_once('/') {
|
||||
(Some(namespace.to_string()), name.to_string())
|
||||
} else {
|
||||
(None, self.name)
|
||||
};
|
||||
|
||||
repo::Repo {
|
||||
name,
|
||||
namespace,
|
||||
worktree_setup: self.worktree_setup,
|
||||
remotes: self.remotes.map(|remotes| {
|
||||
remotes
|
||||
.into_iter()
|
||||
.map(|remote| remote.into_remote())
|
||||
.collect()
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConfigTrees {
|
||||
pub fn to_config(self) -> Config {
|
||||
Config::ConfigTrees(self)
|
||||
}
|
||||
|
||||
pub fn from_vec(vec: Vec<ConfigTree>) -> Self {
|
||||
ConfigTrees { trees: vec }
|
||||
}
|
||||
|
||||
pub fn from_trees(vec: Vec<tree::Tree>) -> Self {
|
||||
ConfigTrees {
|
||||
trees: vec.into_iter().map(ConfigTree::from_tree).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trees(self) -> Vec<ConfigTree> {
|
||||
self.trees
|
||||
}
|
||||
|
||||
pub fn trees_mut(&mut self) -> &mut Vec<ConfigTree> {
|
||||
&mut self.trees
|
||||
}
|
||||
|
||||
pub fn trees_ref(&self) -> &Vec<ConfigTree> {
|
||||
self.trees.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn trees(self) -> Result<Vec<ConfigTree>, String> {
|
||||
match self {
|
||||
Config::ConfigTrees(config) => Ok(config.trees),
|
||||
Config::ConfigProvider(config) => {
|
||||
let token = match auth::get_token_from_command(&config.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filters = config.filters.unwrap_or(ConfigProviderFilter {
|
||||
access: Some(false),
|
||||
owner: Some(false),
|
||||
users: Some(vec![]),
|
||||
groups: Some(vec![]),
|
||||
});
|
||||
|
||||
let filter = Filter::new(
|
||||
filters.users.unwrap_or_default(),
|
||||
filters.groups.unwrap_or_default(),
|
||||
filters.owner.unwrap_or(false),
|
||||
filters.access.unwrap_or(false),
|
||||
);
|
||||
|
||||
if filter.empty() {
|
||||
print_warning(
|
||||
"The configuration does not contain any filters, so no repos will match",
|
||||
);
|
||||
}
|
||||
|
||||
let repos = match config.provider {
|
||||
RemoteProvider::Github => {
|
||||
match provider::Github::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
config.remote_name,
|
||||
)?
|
||||
}
|
||||
RemoteProvider::Gitlab => {
|
||||
match provider::Gitlab::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
config.remote_name,
|
||||
)?
|
||||
}
|
||||
};
|
||||
|
||||
let mut trees = vec![];
|
||||
|
||||
for (namespace, namespace_repos) in repos {
|
||||
let repos = namespace_repos
|
||||
.into_iter()
|
||||
.map(RepoConfig::from_repo)
|
||||
.collect();
|
||||
let tree = ConfigTree {
|
||||
root: if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&config.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&config.root))
|
||||
},
|
||||
repos: Some(repos),
|
||||
};
|
||||
trees.push(tree);
|
||||
}
|
||||
Ok(trees)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_trees(trees: Vec<ConfigTree>) -> Self {
|
||||
Config::ConfigTrees(ConfigTrees { trees })
|
||||
}
|
||||
|
||||
pub fn normalize(&mut self) {
|
||||
if let Config::ConfigTrees(config) = self {
|
||||
let home = path::env_home();
|
||||
for tree in &mut config.trees_mut().iter_mut() {
|
||||
if tree.root.starts_with(&home) {
|
||||
// The tilde is not handled differently, it's just a normal path component for `Path`.
|
||||
// Therefore we can treat it like that during **output**.
|
||||
//
|
||||
// The `unwrap()` is safe here as we are testing via `starts_with()`
|
||||
// beforehand
|
||||
let mut path = tree.root.strip_prefix(&home).unwrap();
|
||||
if path.starts_with('/') {
|
||||
path = path.strip_prefix('/').unwrap();
|
||||
}
|
||||
|
||||
tree.root = Path::new("~").join(path).display().to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_toml(&self) -> Result<String, String> {
|
||||
match toml::to_string(self) {
|
||||
Ok(toml) => Ok(toml),
|
||||
Err(error) => Err(error.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_yaml(&self) -> Result<String, String> {
|
||||
serde_yaml::to_string(self).map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct ConfigTree {
|
||||
pub root: String,
|
||||
pub repos: Option<Vec<RepoConfig>>,
|
||||
}
|
||||
|
||||
impl ConfigTree {
|
||||
pub fn from_repos(root: String, repos: Vec<repo::Repo>) -> Self {
|
||||
Self {
|
||||
root,
|
||||
repos: Some(repos.into_iter().map(RepoConfig::from_repo).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_tree(tree: tree::Tree) -> Self {
|
||||
Self {
|
||||
root: tree.root,
|
||||
repos: Some(tree.repos.into_iter().map(RepoConfig::from_repo).collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_config<'a, T>(path: &str) -> Result<T, String>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
let content = match std::fs::read_to_string(path) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Error reading configuration file \"{}\": {}",
|
||||
path, e
|
||||
))
|
||||
path,
|
||||
match e.kind() {
|
||||
std::io::ErrorKind::NotFound => String::from("not found"),
|
||||
_ => e.to_string(),
|
||||
}
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let config: Config = match toml::from_str(&content) {
|
||||
let config: T = match toml::from_str(&content) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Error parsing configuration file \"{}\": {}",
|
||||
path, e
|
||||
))
|
||||
}
|
||||
Err(_) => match serde_yaml::from_str(&content) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Error parsing configuration file \"{}\": {}",
|
||||
path, e
|
||||
))
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Ok(config)
|
||||
|
||||
352
src/grm/cmd.rs
Normal file
352
src/grm/cmd.rs
Normal file
@@ -0,0 +1,352 @@
|
||||
use clap::Parser;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(
|
||||
name = clap::crate_name!(),
|
||||
version = clap::crate_version!(),
|
||||
author = clap::crate_authors!("\n"),
|
||||
about = clap::crate_description!(),
|
||||
long_version = clap::crate_version!(),
|
||||
propagate_version = true,
|
||||
)]
|
||||
pub struct Opts {
|
||||
#[clap(subcommand)]
|
||||
pub subcmd: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum SubCommand {
|
||||
#[clap(about = "Manage repositories")]
|
||||
Repos(Repos),
|
||||
#[clap(visible_alias = "wt", about = "Manage worktrees")]
|
||||
Worktree(Worktree),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Repos {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: ReposAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum ReposAction {
|
||||
#[clap(subcommand)]
|
||||
Sync(SyncAction),
|
||||
#[clap(subcommand)]
|
||||
Find(FindAction),
|
||||
#[clap(about = "Show status of configured repositories")]
|
||||
Status(OptionalConfig),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(about = "Sync local repositories with a configured list")]
|
||||
pub enum SyncAction {
|
||||
#[clap(about = "Synchronize the repositories to the configured values")]
|
||||
Config(Config),
|
||||
#[clap(about = "Synchronize the repositories from a remote provider")]
|
||||
Remote(SyncRemoteArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(about = "Generate a repository configuration from existing repositories")]
|
||||
pub enum FindAction {
|
||||
#[clap(about = "Find local repositories")]
|
||||
Local(FindLocalArgs),
|
||||
#[clap(about = "Find repositories on remote provider")]
|
||||
Remote(FindRemoteArgs),
|
||||
#[clap(about = "Find repositories as defined in the configuration file")]
|
||||
Config(FindConfigArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct FindLocalArgs {
|
||||
#[clap(help = "The path to search through")]
|
||||
pub path: String,
|
||||
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
help = "Exclude repositories that match the given regex",
|
||||
name = "REGEX"
|
||||
)]
|
||||
pub exclude: Option<String>,
|
||||
|
||||
#[clap(
|
||||
value_enum,
|
||||
short,
|
||||
long,
|
||||
help = "Format to produce",
|
||||
default_value_t = ConfigFormat::Toml,
|
||||
)]
|
||||
pub format: ConfigFormat,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct FindConfigArgs {
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "./config.toml",
|
||||
help = "Path to the configuration file"
|
||||
)]
|
||||
pub config: String,
|
||||
|
||||
#[clap(
|
||||
value_enum,
|
||||
short,
|
||||
long,
|
||||
help = "Format to produce",
|
||||
default_value_t = ConfigFormat::Toml,
|
||||
)]
|
||||
pub format: ConfigFormat,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct FindRemoteArgs {
|
||||
#[clap(short, long, help = "Path to the configuration file")]
|
||||
pub config: Option<String>,
|
||||
|
||||
#[clap(value_enum, short, long, help = "Remote provider to use")]
|
||||
pub provider: RemoteProvider,
|
||||
|
||||
#[clap(short, long, help = "Name of the remote to use")]
|
||||
pub remote_name: Option<String>,
|
||||
|
||||
#[clap(
|
||||
action = clap::ArgAction::Append,
|
||||
name = "user",
|
||||
long,
|
||||
help = "Users to get repositories from"
|
||||
)]
|
||||
pub users: Vec<String>,
|
||||
|
||||
#[clap(
|
||||
action = clap::ArgAction::Append,
|
||||
name = "group",
|
||||
long,
|
||||
help = "Groups to get repositories from"
|
||||
)]
|
||||
pub groups: Vec<String>,
|
||||
|
||||
#[clap(long, help = "Get repositories that belong to the requesting user")]
|
||||
pub owner: bool,
|
||||
|
||||
#[clap(long, help = "Get repositories that the requesting user has access to")]
|
||||
pub access: bool,
|
||||
|
||||
#[clap(long, help = "Always use SSH, even for public repositories")]
|
||||
pub force_ssh: bool,
|
||||
|
||||
#[clap(long, help = "Command to get API token")]
|
||||
pub token_command: String,
|
||||
|
||||
#[clap(long, help = "Root of the repo tree to produce")]
|
||||
pub root: String,
|
||||
|
||||
#[clap(
|
||||
value_enum,
|
||||
short,
|
||||
long,
|
||||
help = "Format to produce",
|
||||
default_value_t = ConfigFormat::Toml,
|
||||
)]
|
||||
pub format: ConfigFormat,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Use worktree setup for repositories",
|
||||
value_parser = ["true", "false"],
|
||||
default_value = "false",
|
||||
default_missing_value = "true",
|
||||
num_args = 0..=1,
|
||||
)]
|
||||
pub worktree: String,
|
||||
|
||||
#[clap(long, help = "Base URL for the API")]
|
||||
pub api_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct Config {
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "./config.toml",
|
||||
help = "Path to the configuration file"
|
||||
)]
|
||||
pub config: String,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
value_parser = ["true", "false"],
|
||||
help = "Check out the default worktree after clone",
|
||||
default_value = "true",
|
||||
default_missing_value = "true",
|
||||
num_args = 0..=1,
|
||||
)]
|
||||
pub init_worktree: String,
|
||||
}
|
||||
|
||||
pub type RemoteProvider = super::provider::RemoteProvider;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct SyncRemoteArgs {
|
||||
#[clap(value_enum, short, long, help = "Remote provider to use")]
|
||||
pub provider: RemoteProvider,
|
||||
|
||||
#[clap(short, long, help = "Name of the remote to use")]
|
||||
pub remote_name: Option<String>,
|
||||
|
||||
#[clap(
|
||||
action = clap::ArgAction::Append,
|
||||
name = "user",
|
||||
long,
|
||||
help = "Users to get repositories from"
|
||||
)]
|
||||
pub users: Vec<String>,
|
||||
|
||||
#[clap(
|
||||
action = clap::ArgAction::Append,
|
||||
name = "group",
|
||||
long,
|
||||
help = "Groups to get repositories from"
|
||||
)]
|
||||
pub groups: Vec<String>,
|
||||
|
||||
#[clap(long, help = "Get repositories that belong to the requesting user")]
|
||||
pub owner: bool,
|
||||
|
||||
#[clap(long, help = "Get repositories that the requesting user has access to")]
|
||||
pub access: bool,
|
||||
|
||||
#[clap(long, help = "Always use SSH, even for public repositories")]
|
||||
pub force_ssh: bool,
|
||||
|
||||
#[clap(long, help = "Command to get API token")]
|
||||
pub token_command: String,
|
||||
|
||||
#[clap(long, help = "Root of the repo tree to produce")]
|
||||
pub root: String,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Use worktree setup for repositories",
|
||||
value_parser = ["true", "false"],
|
||||
default_value = "false",
|
||||
default_missing_value = "true",
|
||||
num_args = 0..=1,
|
||||
)]
|
||||
pub worktree: String,
|
||||
|
||||
#[clap(long, help = "Base URL for the API")]
|
||||
pub api_url: Option<String>,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Check out the default worktree after clone",
|
||||
value_parser = ["true", "false"],
|
||||
default_value = "true",
|
||||
default_missing_value = "true",
|
||||
num_args = 0..=1,
|
||||
)]
|
||||
pub init_worktree: String,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct OptionalConfig {
|
||||
#[clap(short, long, help = "Path to the configuration file")]
|
||||
pub config: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(clap::ValueEnum, Clone)]
|
||||
pub enum ConfigFormat {
|
||||
Yaml,
|
||||
Toml,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Worktree {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: WorktreeAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum WorktreeAction {
|
||||
#[clap(about = "Add a new worktree")]
|
||||
Add(WorktreeAddArgs),
|
||||
#[clap(about = "Add an existing worktree")]
|
||||
Delete(WorktreeDeleteArgs),
|
||||
#[clap(about = "Show state of existing worktrees")]
|
||||
Status(WorktreeStatusArgs),
|
||||
#[clap(about = "Convert a normal repository to a worktree setup")]
|
||||
Convert(WorktreeConvertArgs),
|
||||
#[clap(about = "Clean all worktrees that do not contain uncommited/unpushed changes")]
|
||||
Clean(WorktreeCleanArgs),
|
||||
#[clap(about = "Fetch refs from remotes")]
|
||||
Fetch(WorktreeFetchArgs),
|
||||
#[clap(about = "Fetch refs from remotes and update local branches")]
|
||||
Pull(WorktreePullArgs),
|
||||
#[clap(about = "Rebase worktree onto default branch")]
|
||||
Rebase(WorktreeRebaseArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeAddArgs {
|
||||
#[clap(help = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(short = 't', long = "track", help = "Remote branch to track")]
|
||||
pub track: Option<String>,
|
||||
|
||||
#[clap(long = "no-track", help = "Disable tracking")]
|
||||
pub no_track: bool,
|
||||
}
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeDeleteArgs {
|
||||
#[clap(help = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(
|
||||
long = "force",
|
||||
help = "Force deletion, even when there are uncommitted/unpushed changes"
|
||||
)]
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeStatusArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeConvertArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeCleanArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeFetchArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreePullArgs {
|
||||
#[clap(long = "rebase", help = "Perform a rebase instead of a fast-forward")]
|
||||
pub rebase: bool,
|
||||
#[clap(long = "stash", help = "Stash & unstash changes before & after pull")]
|
||||
pub stash: bool,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeRebaseArgs {
|
||||
#[clap(long = "pull", help = "Perform a pull before rebasing")]
|
||||
pub pull: bool,
|
||||
#[clap(long = "rebase", help = "Perform a rebase when doing a pull")]
|
||||
pub rebase: bool,
|
||||
#[clap(long = "stash", help = "Stash & unstash changes before & after rebase")]
|
||||
pub stash: bool,
|
||||
}
|
||||
|
||||
pub fn parse() -> Opts {
|
||||
Opts::parse()
|
||||
}
|
||||
800
src/grm/main.rs
Normal file
800
src/grm/main.rs
Normal file
@@ -0,0 +1,800 @@
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::process;
|
||||
|
||||
mod cmd;
|
||||
|
||||
use grm::auth;
|
||||
use grm::config;
|
||||
use grm::find_in_tree;
|
||||
use grm::output::*;
|
||||
use grm::path;
|
||||
use grm::provider;
|
||||
use grm::provider::Provider;
|
||||
use grm::repo;
|
||||
use grm::table;
|
||||
use grm::tree;
|
||||
use grm::worktree;
|
||||
|
||||
fn main() {
|
||||
let opts = cmd::parse();
|
||||
|
||||
match opts.subcmd {
|
||||
cmd::SubCommand::Repos(repos) => match repos.action {
|
||||
cmd::ReposAction::Sync(sync) => match sync {
|
||||
cmd::SyncAction::Config(args) => {
|
||||
let config = match config::read_config(&args.config) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
match tree::sync_trees(config, args.init_worktree == "true") {
|
||||
Ok(success) => {
|
||||
if !success {
|
||||
process::exit(1)
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Sync error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::SyncAction::Remote(args) => {
|
||||
let token = match auth::get_token_from_command(&args.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filter =
|
||||
provider::Filter::new(args.users, args.groups, args.owner, args.access);
|
||||
|
||||
if filter.empty() {
|
||||
print_warning("You did not specify any filters, so no repos will match");
|
||||
}
|
||||
|
||||
let worktree = args.worktree == "true";
|
||||
|
||||
let repos = match args.provider {
|
||||
cmd::RemoteProvider::Github => {
|
||||
match provider::Github::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Sync error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
worktree,
|
||||
args.force_ssh,
|
||||
args.remote_name,
|
||||
)
|
||||
}
|
||||
cmd::RemoteProvider::Gitlab => {
|
||||
match provider::Gitlab::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Sync error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
worktree,
|
||||
args.force_ssh,
|
||||
args.remote_name,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
match repos {
|
||||
Ok(repos) => {
|
||||
let mut trees: Vec<config::ConfigTree> = vec![];
|
||||
|
||||
for (namespace, repolist) in repos {
|
||||
let root = if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&args.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&args.root))
|
||||
};
|
||||
|
||||
let tree = config::ConfigTree::from_repos(root, repolist);
|
||||
trees.push(tree);
|
||||
}
|
||||
|
||||
let config = config::Config::from_trees(trees);
|
||||
|
||||
match tree::sync_trees(config, args.init_worktree == "true") {
|
||||
Ok(success) => {
|
||||
if !success {
|
||||
process::exit(1)
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Sync error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Sync error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cmd::ReposAction::Status(args) => match &args.config {
|
||||
Some(config_path) => {
|
||||
let config = match config::read_config(config_path) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
match table::get_status_table(config) {
|
||||
Ok((tables, errors)) => {
|
||||
for table in tables {
|
||||
println!("{}", table);
|
||||
}
|
||||
for error in errors {
|
||||
print_error(&format!("Error: {}", error));
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting status: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let dir = match std::env::current_dir() {
|
||||
Ok(dir) => dir,
|
||||
Err(error) => {
|
||||
print_error(&format!("Could not open current directory: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
match table::show_single_repo_status(&dir) {
|
||||
Ok((table, warnings)) => {
|
||||
println!("{}", table);
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting status: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cmd::ReposAction::Find(find) => match find {
|
||||
cmd::FindAction::Local(args) => {
|
||||
let path = Path::new(&args.path);
|
||||
if !path.exists() {
|
||||
print_error(&format!("Path \"{}\" does not exist", path.display()));
|
||||
process::exit(1);
|
||||
}
|
||||
if !path.is_dir() {
|
||||
print_error(&format!("Path \"{}\" is not a directory", path.display()));
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
let path = match path.canonicalize() {
|
||||
Ok(path) => path,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed to canonicalize path \"{}\". This is a bug. Error message: {}",
|
||||
&path.display(),
|
||||
error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let (found_repos, warnings) = match find_in_tree(&path, args.exclude.as_deref())
|
||||
{
|
||||
Ok((repos, warnings)) => (repos, warnings),
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let trees = config::ConfigTrees::from_trees(vec![found_repos]);
|
||||
if trees.trees_ref().iter().all(|t| match &t.repos {
|
||||
None => false,
|
||||
Some(r) => r.is_empty(),
|
||||
}) {
|
||||
print_warning("No repositories found");
|
||||
} else {
|
||||
let mut config = trees.to_config();
|
||||
|
||||
config.normalize();
|
||||
|
||||
match args.format {
|
||||
cmd::ConfigFormat::Toml => {
|
||||
let toml = match config.as_toml() {
|
||||
Ok(toml) => toml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to TOML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", toml);
|
||||
}
|
||||
cmd::ConfigFormat::Yaml => {
|
||||
let yaml = match config.as_yaml() {
|
||||
Ok(yaml) => yaml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to YAML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", yaml);
|
||||
}
|
||||
}
|
||||
}
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
cmd::FindAction::Config(args) => {
|
||||
let config: config::ConfigProvider = match config::read_config(&args.config) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let token = match auth::get_token_from_command(&config.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filters = config.filters.unwrap_or(config::ConfigProviderFilter {
|
||||
access: Some(false),
|
||||
owner: Some(false),
|
||||
users: Some(vec![]),
|
||||
groups: Some(vec![]),
|
||||
});
|
||||
|
||||
let filter = provider::Filter::new(
|
||||
filters.users.unwrap_or_default(),
|
||||
filters.groups.unwrap_or_default(),
|
||||
filters.owner.unwrap_or(false),
|
||||
filters.access.unwrap_or(false),
|
||||
);
|
||||
|
||||
if filter.empty() {
|
||||
print_warning("You did not specify any filters, so no repos will match");
|
||||
}
|
||||
|
||||
let repos = match config.provider {
|
||||
provider::RemoteProvider::Github => {
|
||||
match match provider::Github::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
config.remote_name,
|
||||
) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
provider::RemoteProvider::Gitlab => {
|
||||
match match provider::Gitlab::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
config.remote_name,
|
||||
) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut trees = vec![];
|
||||
|
||||
for (namespace, namespace_repos) in repos {
|
||||
let tree = config::ConfigTree {
|
||||
root: if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&config.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&config.root))
|
||||
},
|
||||
repos: Some(
|
||||
namespace_repos
|
||||
.into_iter()
|
||||
.map(config::RepoConfig::from_repo)
|
||||
.collect(),
|
||||
),
|
||||
};
|
||||
trees.push(tree);
|
||||
}
|
||||
|
||||
let config = config::Config::from_trees(trees);
|
||||
|
||||
match args.format {
|
||||
cmd::ConfigFormat::Toml => {
|
||||
let toml = match config.as_toml() {
|
||||
Ok(toml) => toml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to TOML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", toml);
|
||||
}
|
||||
cmd::ConfigFormat::Yaml => {
|
||||
let yaml = match config.as_yaml() {
|
||||
Ok(yaml) => yaml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to YAML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", yaml);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::FindAction::Remote(args) => {
|
||||
let token = match auth::get_token_from_command(&args.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filter =
|
||||
provider::Filter::new(args.users, args.groups, args.owner, args.access);
|
||||
|
||||
if filter.empty() {
|
||||
print_warning("You did not specify any filters, so no repos will match");
|
||||
}
|
||||
|
||||
let worktree = args.worktree == "true";
|
||||
|
||||
let repos = match args.provider {
|
||||
cmd::RemoteProvider::Github => {
|
||||
match provider::Github::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
worktree,
|
||||
args.force_ssh,
|
||||
args.remote_name,
|
||||
)
|
||||
}
|
||||
cmd::RemoteProvider::Gitlab => {
|
||||
match provider::Gitlab::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
worktree,
|
||||
args.force_ssh,
|
||||
args.remote_name,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let repos = repos.unwrap_or_else(|error| {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut trees: Vec<config::ConfigTree> = vec![];
|
||||
|
||||
for (namespace, repolist) in repos {
|
||||
let tree = config::ConfigTree {
|
||||
root: if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&args.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&args.root))
|
||||
},
|
||||
repos: Some(
|
||||
repolist
|
||||
.into_iter()
|
||||
.map(config::RepoConfig::from_repo)
|
||||
.collect(),
|
||||
),
|
||||
};
|
||||
trees.push(tree);
|
||||
}
|
||||
|
||||
let mut config = config::Config::from_trees(trees);
|
||||
|
||||
config.normalize();
|
||||
|
||||
match args.format {
|
||||
cmd::ConfigFormat::Toml => {
|
||||
let toml = match config.as_toml() {
|
||||
Ok(toml) => toml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to TOML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", toml);
|
||||
}
|
||||
cmd::ConfigFormat::Yaml => {
|
||||
let yaml = match config.as_yaml() {
|
||||
Ok(yaml) => yaml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to YAML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", yaml);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
cmd::SubCommand::Worktree(args) => {
|
||||
let cwd = std::env::current_dir().unwrap_or_else(|error| {
|
||||
print_error(&format!("Could not open current directory: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match args.action {
|
||||
cmd::WorktreeAction::Add(action_args) => {
|
||||
if action_args.track.is_some() && action_args.no_track {
|
||||
print_warning("You are using --track and --no-track at the same time. --track will be ignored");
|
||||
}
|
||||
let track = match &action_args.track {
|
||||
Some(branch) => {
|
||||
let split = branch.split_once('/');
|
||||
|
||||
if split.is_none()
|
||||
|| split.unwrap().0.is_empty()
|
||||
|| split.unwrap().1.is_empty()
|
||||
{
|
||||
print_error("Tracking branch needs to match the pattern <remote>/<branch_name>");
|
||||
process::exit(1);
|
||||
};
|
||||
|
||||
// unwrap() here is safe because we checked for
|
||||
// is_none() explictily before
|
||||
let (remote_name, remote_branch_name) = split.unwrap();
|
||||
|
||||
Some((remote_name, remote_branch_name))
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
match worktree::add_worktree(
|
||||
&cwd,
|
||||
&action_args.name,
|
||||
track,
|
||||
action_args.no_track,
|
||||
) {
|
||||
Ok(warnings) => {
|
||||
if let Some(warnings) = warnings {
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
print_success(&format!("Worktree {} created", &action_args.name));
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error creating worktree: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Delete(action_args) => {
|
||||
let worktree_config = match repo::read_worktree_root_config(&cwd) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Error getting worktree configuration: {}",
|
||||
error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
print_error(&format!("Error opening repository: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match repo.remove_worktree(
|
||||
&cwd,
|
||||
&action_args.name,
|
||||
Path::new(&action_args.name),
|
||||
action_args.force,
|
||||
&worktree_config,
|
||||
) {
|
||||
Ok(_) => print_success(&format!("Worktree {} deleted", &action_args.name)),
|
||||
Err(error) => {
|
||||
match error {
|
||||
repo::WorktreeRemoveFailureReason::Error(msg) => {
|
||||
print_error(&msg);
|
||||
process::exit(1);
|
||||
}
|
||||
repo::WorktreeRemoveFailureReason::Changes(changes) => {
|
||||
print_warning(&format!(
|
||||
"Changes in worktree: {}. Refusing to delete",
|
||||
changes
|
||||
));
|
||||
}
|
||||
repo::WorktreeRemoveFailureReason::NotMerged(message) => {
|
||||
print_warning(&message);
|
||||
}
|
||||
}
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Status(_args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
print_error(&format!("Error opening repository: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match table::get_worktree_status_table(&repo, &cwd) {
|
||||
Ok((table, errors)) => {
|
||||
println!("{}", table);
|
||||
for error in errors {
|
||||
print_error(&format!("Error: {}", error));
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting status: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Convert(_args) => {
|
||||
// Converting works like this:
|
||||
// * Check whether there are uncommitted/unpushed changes
|
||||
// * Move the contents of .git dir to the worktree directory
|
||||
// * Remove all files
|
||||
// * Set `core.bare` to `true`
|
||||
|
||||
let repo = repo::RepoHandle::open(&cwd, false).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match repo.convert_to_worktree(&cwd) {
|
||||
Ok(_) => print_success("Conversion done"),
|
||||
Err(reason) => {
|
||||
match reason {
|
||||
repo::WorktreeConversionFailureReason::Changes => {
|
||||
print_error("Changes found in repository, refusing to convert");
|
||||
}
|
||||
repo::WorktreeConversionFailureReason::Ignored => {
|
||||
print_error("Ignored files found in repository, refusing to convert. Run git clean -f -d -X to remove them manually.");
|
||||
}
|
||||
repo::WorktreeConversionFailureReason::Error(error) => {
|
||||
print_error(&format!("Error during conversion: {}", error));
|
||||
}
|
||||
}
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Clean(_args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match repo.cleanup_worktrees(&cwd) {
|
||||
Ok(warnings) => {
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Worktree cleanup failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
for unmanaged_worktree in
|
||||
repo.find_unmanaged_worktrees(&cwd).unwrap_or_else(|error| {
|
||||
print_error(&format!("Failed finding unmanaged worktrees: {}", error));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
print_warning(&format!(
|
||||
"Found {}, which is not a valid worktree directory!",
|
||||
&unmanaged_worktree
|
||||
));
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Fetch(_args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
repo.fetchall().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error fetching remotes: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
print_success("Fetched from all remotes");
|
||||
}
|
||||
cmd::WorktreeAction::Pull(args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
repo.fetchall().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error fetching remotes: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut failures = false;
|
||||
for worktree in repo.get_worktrees().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error getting worktrees: {}", error));
|
||||
process::exit(1);
|
||||
}) {
|
||||
if let Some(warning) = worktree
|
||||
.forward_branch(args.rebase, args.stash)
|
||||
.unwrap_or_else(|error| {
|
||||
print_error(&format!("Error updating worktree branch: {}", error));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
print_warning(&format!("{}: {}", worktree.name(), warning));
|
||||
failures = true;
|
||||
} else {
|
||||
print_success(&format!("{}: Done", worktree.name()));
|
||||
}
|
||||
}
|
||||
if failures {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Rebase(args) => {
|
||||
if args.rebase && !args.pull {
|
||||
print_error("There is no point in using --rebase without --pull");
|
||||
process::exit(1);
|
||||
}
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
if args.pull {
|
||||
repo.fetchall().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error fetching remotes: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
let config = repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
|
||||
print_error(&format!("Failed to read worktree configuration: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let worktrees = repo.get_worktrees().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error getting worktrees: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut failures = false;
|
||||
|
||||
for worktree in &worktrees {
|
||||
if args.pull {
|
||||
if let Some(warning) = worktree
|
||||
.forward_branch(args.rebase, args.stash)
|
||||
.unwrap_or_else(|error| {
|
||||
print_error(&format!(
|
||||
"Error updating worktree branch: {}",
|
||||
error
|
||||
));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
failures = true;
|
||||
print_warning(&format!("{}: {}", worktree.name(), warning));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for worktree in &worktrees {
|
||||
if let Some(warning) = worktree
|
||||
.rebase_onto_default(&config, args.stash)
|
||||
.unwrap_or_else(|error| {
|
||||
print_error(&format!("Error rebasing worktree branch: {}", error));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
failures = true;
|
||||
print_warning(&format!("{}: {}", worktree.name(), warning));
|
||||
} else {
|
||||
print_success(&format!("{}: Done", worktree.name()));
|
||||
}
|
||||
}
|
||||
if failures {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1332
src/lib.rs
1332
src/lib.rs
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
||||
use grm::run;
|
||||
|
||||
fn main() {
|
||||
run();
|
||||
}
|
||||
@@ -20,12 +20,12 @@ pub fn print_repo_action(repo: &str, message: &str) {
|
||||
}
|
||||
|
||||
pub fn print_action(message: &str) {
|
||||
let stderr = Term::stderr();
|
||||
let stdout = Term::stdout();
|
||||
let mut style = Style::new().yellow();
|
||||
if stderr.is_term() {
|
||||
if stdout.is_term() {
|
||||
style = style.force_styling(true);
|
||||
}
|
||||
stderr
|
||||
stdout
|
||||
.write_line(&format!("[{}] {}", style.apply_to('\u{2699}'), &message))
|
||||
.unwrap();
|
||||
}
|
||||
@@ -46,13 +46,13 @@ pub fn print_repo_success(repo: &str, message: &str) {
|
||||
}
|
||||
|
||||
pub fn print_success(message: &str) {
|
||||
let stderr = Term::stderr();
|
||||
let stdout = Term::stdout();
|
||||
let mut style = Style::new().green();
|
||||
if stderr.is_term() {
|
||||
if stdout.is_term() {
|
||||
style = style.force_styling(true);
|
||||
}
|
||||
|
||||
stderr
|
||||
stdout
|
||||
.write_line(&format!("[{}] {}", style.apply_to('\u{2714}'), &message))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
80
src/path.rs
Normal file
80
src/path.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
|
||||
use super::output::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn setup() {
|
||||
std::env::set_var("HOME", "/home/test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_expand_tilde() {
|
||||
setup();
|
||||
assert_eq!(
|
||||
expand_path(Path::new("~/file")),
|
||||
Path::new("/home/test/file")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_expand_invalid_tilde() {
|
||||
setup();
|
||||
assert_eq!(
|
||||
expand_path(Path::new("/home/~/file")),
|
||||
Path::new("/home/~/file")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_expand_home() {
|
||||
setup();
|
||||
assert_eq!(
|
||||
expand_path(Path::new("$HOME/file")),
|
||||
Path::new("/home/test/file")
|
||||
);
|
||||
assert_eq!(
|
||||
expand_path(Path::new("${HOME}/file")),
|
||||
Path::new("/home/test/file")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path_as_string(path: &Path) -> String {
|
||||
path.to_path_buf().into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
pub fn env_home() -> String {
|
||||
match std::env::var("HOME") {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
print_error(&format!("Unable to read HOME: {}", e));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_path(path: &Path) -> PathBuf {
|
||||
let expanded_path = match shellexpand::full_with_context(
|
||||
&path_as_string(path),
|
||||
|| Some(env_home()),
|
||||
|name| -> Result<Option<String>, &'static str> {
|
||||
match name {
|
||||
"HOME" => Ok(Some(env_home())),
|
||||
_ => Ok(None),
|
||||
}
|
||||
},
|
||||
) {
|
||||
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
|
||||
Ok(std::borrow::Cow::Owned(path)) => path,
|
||||
Err(e) => {
|
||||
print_error(&format!("Unable to expand root: {}", e));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
Path::new(&expanded_path).to_path_buf()
|
||||
}
|
||||
142
src/provider/github.rs
Normal file
142
src/provider/github.rs
Normal file
@@ -0,0 +1,142 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::auth;
|
||||
use super::escape;
|
||||
use super::ApiErrorResponse;
|
||||
use super::Filter;
|
||||
use super::JsonError;
|
||||
use super::Project;
|
||||
use super::Provider;
|
||||
|
||||
const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json";
|
||||
const GITHUB_API_BASEURL: &str = match option_env!("GITHUB_API_BASEURL") {
|
||||
Some(url) => url,
|
||||
None => "https://api.github.com",
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GithubProject {
|
||||
pub name: String,
|
||||
pub full_name: String,
|
||||
pub clone_url: String,
|
||||
pub ssh_url: String,
|
||||
pub private: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GithubUser {
|
||||
#[serde(rename = "login")]
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
impl Project for GithubProject {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn namespace(&self) -> Option<String> {
|
||||
if let Some((namespace, _name)) = self.full_name.rsplit_once('/') {
|
||||
Some(namespace.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn ssh_url(&self) -> String {
|
||||
self.ssh_url.clone()
|
||||
}
|
||||
|
||||
fn http_url(&self) -> String {
|
||||
self.clone_url.clone()
|
||||
}
|
||||
|
||||
fn private(&self) -> bool {
|
||||
self.private
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GithubApiErrorResponse {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl JsonError for GithubApiErrorResponse {
|
||||
fn to_string(self) -> String {
|
||||
self.message
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Github {
|
||||
filter: Filter,
|
||||
secret_token: auth::AuthToken,
|
||||
}
|
||||
|
||||
impl Provider for Github {
|
||||
type Project = GithubProject;
|
||||
type Error = GithubApiErrorResponse;
|
||||
|
||||
fn new(
|
||||
filter: Filter,
|
||||
secret_token: auth::AuthToken,
|
||||
api_url_override: Option<String>,
|
||||
) -> Result<Self, String> {
|
||||
if api_url_override.is_some() {
|
||||
return Err("API URL overriding is not supported for Github".to_string());
|
||||
}
|
||||
Ok(Self {
|
||||
filter,
|
||||
secret_token,
|
||||
})
|
||||
}
|
||||
|
||||
fn filter(&self) -> &Filter {
|
||||
&self.filter
|
||||
}
|
||||
|
||||
fn secret_token(&self) -> &auth::AuthToken {
|
||||
&self.secret_token
|
||||
}
|
||||
|
||||
fn auth_header_key() -> &'static str {
|
||||
"token"
|
||||
}
|
||||
|
||||
fn get_user_projects(
|
||||
&self,
|
||||
user: &str,
|
||||
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{GITHUB_API_BASEURL}/users/{}/repos", escape(user)),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_group_projects(
|
||||
&self,
|
||||
group: &str,
|
||||
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{GITHUB_API_BASEURL}/orgs/{}/repos?type=all", escape(group)),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_accessible_projects(
|
||||
&self,
|
||||
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{GITHUB_API_BASEURL}/user/repos"),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
Ok(super::call::<GithubUser, GithubApiErrorResponse>(
|
||||
&format!("{GITHUB_API_BASEURL}/user"),
|
||||
Self::auth_header_key(),
|
||||
self.secret_token(),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)?
|
||||
.username)
|
||||
}
|
||||
}
|
||||
164
src/provider/gitlab.rs
Normal file
164
src/provider/gitlab.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::auth;
|
||||
use super::escape;
|
||||
use super::ApiErrorResponse;
|
||||
use super::Filter;
|
||||
use super::JsonError;
|
||||
use super::Project;
|
||||
use super::Provider;
|
||||
|
||||
const ACCEPT_HEADER_JSON: &str = "application/json";
|
||||
const GITLAB_API_BASEURL: &str = match option_env!("GITLAB_API_BASEURL") {
|
||||
Some(url) => url,
|
||||
None => "https://gitlab.com",
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum GitlabVisibility {
|
||||
Private,
|
||||
Internal,
|
||||
Public,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GitlabProject {
|
||||
#[serde(rename = "path")]
|
||||
pub name: String,
|
||||
pub path_with_namespace: String,
|
||||
pub http_url_to_repo: String,
|
||||
pub ssh_url_to_repo: String,
|
||||
pub visibility: GitlabVisibility,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GitlabUser {
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
impl Project for GitlabProject {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn namespace(&self) -> Option<String> {
|
||||
if let Some((namespace, _name)) = self.path_with_namespace.rsplit_once('/') {
|
||||
Some(namespace.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn ssh_url(&self) -> String {
|
||||
self.ssh_url_to_repo.clone()
|
||||
}
|
||||
|
||||
fn http_url(&self) -> String {
|
||||
self.http_url_to_repo.clone()
|
||||
}
|
||||
|
||||
fn private(&self) -> bool {
|
||||
!matches!(self.visibility, GitlabVisibility::Public)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GitlabApiErrorResponse {
|
||||
#[serde(alias = "error_description", alias = "error")]
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl JsonError for GitlabApiErrorResponse {
|
||||
fn to_string(self) -> String {
|
||||
self.message
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Gitlab {
|
||||
filter: Filter,
|
||||
secret_token: auth::AuthToken,
|
||||
api_url_override: Option<String>,
|
||||
}
|
||||
|
||||
impl Gitlab {
|
||||
fn api_url(&self) -> String {
|
||||
self.api_url_override
|
||||
.as_ref()
|
||||
.unwrap_or(&GITLAB_API_BASEURL.to_string())
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl Provider for Gitlab {
|
||||
type Project = GitlabProject;
|
||||
type Error = GitlabApiErrorResponse;
|
||||
|
||||
fn new(
|
||||
filter: Filter,
|
||||
secret_token: auth::AuthToken,
|
||||
api_url_override: Option<String>,
|
||||
) -> Result<Self, String> {
|
||||
Ok(Self {
|
||||
filter,
|
||||
secret_token,
|
||||
api_url_override,
|
||||
})
|
||||
}
|
||||
|
||||
fn filter(&self) -> &Filter {
|
||||
&self.filter
|
||||
}
|
||||
|
||||
fn secret_token(&self) -> &auth::AuthToken {
|
||||
&self.secret_token
|
||||
}
|
||||
|
||||
fn auth_header_key() -> &'static str {
|
||||
"bearer"
|
||||
}
|
||||
|
||||
fn get_user_projects(
|
||||
&self,
|
||||
user: &str,
|
||||
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{}/api/v4/users/{}/projects", self.api_url(), escape(user)),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_group_projects(
|
||||
&self,
|
||||
group: &str,
|
||||
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!(
|
||||
"{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false",
|
||||
self.api_url(),
|
||||
escape(group),
|
||||
),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_accessible_projects(
|
||||
&self,
|
||||
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{}/api/v4/projects", self.api_url(),),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
Ok(super::call::<GitlabUser, GitlabApiErrorResponse>(
|
||||
&format!("{}/api/v4/user", self.api_url()),
|
||||
Self::auth_header_key(),
|
||||
self.secret_token(),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)?
|
||||
.username)
|
||||
}
|
||||
}
|
||||
364
src/provider/mod.rs
Normal file
364
src/provider/mod.rs
Normal file
@@ -0,0 +1,364 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// Required to use the `json()` method from the trait
|
||||
use isahc::ReadResponseExt;
|
||||
|
||||
pub mod github;
|
||||
pub mod gitlab;
|
||||
|
||||
pub use github::Github;
|
||||
pub use gitlab::Gitlab;
|
||||
|
||||
use super::auth;
|
||||
use super::repo;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
const DEFAULT_REMOTE_NAME: &str = "origin";
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, clap::ValueEnum, Clone)]
|
||||
pub enum RemoteProvider {
|
||||
#[serde(alias = "github", alias = "GitHub")]
|
||||
Github,
|
||||
#[serde(alias = "gitlab", alias = "GitLab")]
|
||||
Gitlab,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum ProjectResponse<T, U> {
|
||||
Success(Vec<T>),
|
||||
Failure(U),
|
||||
}
|
||||
|
||||
pub fn escape(s: &str) -> String {
|
||||
url_escape::encode_component(s).to_string()
|
||||
}
|
||||
|
||||
pub trait Project {
|
||||
fn into_repo_config(
|
||||
self,
|
||||
provider_name: &str,
|
||||
worktree_setup: bool,
|
||||
force_ssh: bool,
|
||||
) -> repo::Repo
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
repo::Repo {
|
||||
name: self.name(),
|
||||
namespace: self.namespace(),
|
||||
worktree_setup,
|
||||
remotes: Some(vec![repo::Remote {
|
||||
name: String::from(provider_name),
|
||||
url: if force_ssh || self.private() {
|
||||
self.ssh_url()
|
||||
} else {
|
||||
self.http_url()
|
||||
},
|
||||
remote_type: if force_ssh || self.private() {
|
||||
repo::RemoteType::Ssh
|
||||
} else {
|
||||
repo::RemoteType::Https
|
||||
},
|
||||
}]),
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> String;
|
||||
fn namespace(&self) -> Option<String>;
|
||||
fn ssh_url(&self) -> String;
|
||||
fn http_url(&self) -> String;
|
||||
fn private(&self) -> bool;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Filter {
|
||||
users: Vec<String>,
|
||||
groups: Vec<String>,
|
||||
owner: bool,
|
||||
access: bool,
|
||||
}
|
||||
|
||||
impl Filter {
|
||||
pub fn new(users: Vec<String>, groups: Vec<String>, owner: bool, access: bool) -> Self {
|
||||
Filter {
|
||||
users,
|
||||
groups,
|
||||
owner,
|
||||
access,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn empty(&self) -> bool {
|
||||
self.users.is_empty() && self.groups.is_empty() && !self.owner && !self.access
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ApiErrorResponse<T>
|
||||
where
|
||||
T: JsonError,
|
||||
{
|
||||
Json(T),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl<T> From<String> for ApiErrorResponse<T>
|
||||
where
|
||||
T: JsonError,
|
||||
{
|
||||
fn from(s: String) -> ApiErrorResponse<T> {
|
||||
ApiErrorResponse::String(s)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait JsonError {
|
||||
fn to_string(self) -> String;
|
||||
}
|
||||
|
||||
pub trait Provider {
|
||||
type Project: serde::de::DeserializeOwned + Project;
|
||||
type Error: serde::de::DeserializeOwned + JsonError;
|
||||
|
||||
fn new(
|
||||
filter: Filter,
|
||||
secret_token: auth::AuthToken,
|
||||
api_url_override: Option<String>,
|
||||
) -> Result<Self, String>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn filter(&self) -> &Filter;
|
||||
fn secret_token(&self) -> &auth::AuthToken;
|
||||
fn auth_header_key() -> &'static str;
|
||||
|
||||
fn get_user_projects(
|
||||
&self,
|
||||
user: &str,
|
||||
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
fn get_group_projects(
|
||||
&self,
|
||||
group: &str,
|
||||
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
fn get_own_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
|
||||
self.get_user_projects(&self.get_current_user()?)
|
||||
}
|
||||
|
||||
fn get_accessible_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
fn get_current_user(&self) -> Result<String, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
///
|
||||
/// Calls the API at specific uri and expects a successful response of Vec<T> back, or an error
|
||||
/// response U
|
||||
///
|
||||
/// Handles paging with "link" HTTP headers properly and reads all pages to
|
||||
/// the end.
|
||||
fn call_list(
|
||||
&self,
|
||||
uri: &str,
|
||||
accept_header: Option<&str>,
|
||||
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
|
||||
let mut results = vec![];
|
||||
|
||||
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
|
||||
|
||||
let request = isahc::Request::builder()
|
||||
.uri(uri)
|
||||
.method("GET")
|
||||
.header("accept", accept_header.unwrap_or("application/json"))
|
||||
.header(
|
||||
"authorization",
|
||||
format!(
|
||||
"{} {}",
|
||||
Self::auth_header_key(),
|
||||
&self.secret_token().access()
|
||||
),
|
||||
)
|
||||
.body(())
|
||||
.map_err(|error| error.to_string())?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let r: Self::Error = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
|
||||
return Err(ApiErrorResponse::Json(r));
|
||||
}
|
||||
|
||||
let result: Vec<Self::Project> = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing response: {}", error))?;
|
||||
|
||||
results.extend(result);
|
||||
|
||||
if let Some(link_header) = response.headers().get("link") {
|
||||
let link_header = link_header.to_str().map_err(|error| error.to_string())?;
|
||||
|
||||
let link_header =
|
||||
parse_link_header::parse(link_header).map_err(|error| error.to_string())?;
|
||||
|
||||
let next_page = link_header.get(&Some(String::from("next")));
|
||||
|
||||
if let Some(page) = next_page {
|
||||
let following_repos = self.call_list(&page.raw_uri, accept_header)?;
|
||||
results.extend(following_repos);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn get_repos(
|
||||
&self,
|
||||
worktree_setup: bool,
|
||||
force_ssh: bool,
|
||||
remote_name: Option<String>,
|
||||
) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> {
|
||||
let mut repos = vec![];
|
||||
|
||||
if self.filter().owner {
|
||||
repos.extend(self.get_own_projects().map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?);
|
||||
}
|
||||
|
||||
if self.filter().access {
|
||||
let accessible_projects =
|
||||
self.get_accessible_projects()
|
||||
.map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?;
|
||||
|
||||
for accessible_project in accessible_projects {
|
||||
let mut already_present = false;
|
||||
for repo in &repos {
|
||||
if repo.name() == accessible_project.name()
|
||||
&& repo.namespace() == accessible_project.namespace()
|
||||
{
|
||||
already_present = true;
|
||||
}
|
||||
}
|
||||
if !already_present {
|
||||
repos.push(accessible_project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for user in &self.filter().users {
|
||||
let user_projects = self.get_user_projects(user).map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?;
|
||||
|
||||
for user_project in user_projects {
|
||||
let mut already_present = false;
|
||||
for repo in &repos {
|
||||
if repo.name() == user_project.name()
|
||||
&& repo.namespace() == user_project.namespace()
|
||||
{
|
||||
already_present = true;
|
||||
}
|
||||
}
|
||||
if !already_present {
|
||||
repos.push(user_project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for group in &self.filter().groups {
|
||||
let group_projects = self.get_group_projects(group).map_err(|error| {
|
||||
format!(
|
||||
"group \"{}\": {}",
|
||||
group,
|
||||
match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
}
|
||||
)
|
||||
})?;
|
||||
for group_project in group_projects {
|
||||
let mut already_present = false;
|
||||
for repo in &repos {
|
||||
if repo.name() == group_project.name()
|
||||
&& repo.namespace() == group_project.namespace()
|
||||
{
|
||||
already_present = true;
|
||||
}
|
||||
}
|
||||
|
||||
if !already_present {
|
||||
repos.push(group_project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new();
|
||||
|
||||
let remote_name = remote_name.unwrap_or_else(|| DEFAULT_REMOTE_NAME.to_string());
|
||||
|
||||
for repo in repos {
|
||||
let namespace = repo.namespace();
|
||||
|
||||
let mut repo = repo.into_repo_config(&remote_name, worktree_setup, force_ssh);
|
||||
|
||||
// Namespace is already part of the hashmap key. I'm not too happy
|
||||
// about the data exchange format here.
|
||||
repo.remove_namespace();
|
||||
|
||||
ret.entry(namespace).or_insert(vec![]).push(repo);
|
||||
}
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
||||
fn call<T, U>(
|
||||
uri: &str,
|
||||
auth_header_key: &str,
|
||||
secret_token: &auth::AuthToken,
|
||||
accept_header: Option<&str>,
|
||||
) -> Result<T, ApiErrorResponse<U>>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
U: serde::de::DeserializeOwned + JsonError,
|
||||
{
|
||||
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
|
||||
|
||||
let request = isahc::Request::builder()
|
||||
.uri(uri)
|
||||
.header("accept", accept_header.unwrap_or("application/json"))
|
||||
.header(
|
||||
"authorization",
|
||||
format!("{} {}", &auth_header_key, &secret_token.access()),
|
||||
)
|
||||
.body(())
|
||||
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
|
||||
|
||||
let success = response.status().is_success();
|
||||
|
||||
if !success {
|
||||
let response: U = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
|
||||
|
||||
return Err(ApiErrorResponse::Json(response));
|
||||
}
|
||||
|
||||
let response: T = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing response: {}", error))?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
1817
src/repo.rs
1817
src/repo.rs
File diff suppressed because it is too large
Load Diff
317
src/table.rs
Normal file
317
src/table.rs
Normal file
@@ -0,0 +1,317 @@
|
||||
use super::config;
|
||||
use super::path;
|
||||
use super::repo;
|
||||
|
||||
use comfy_table::{Cell, Table};
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
fn add_table_header(table: &mut Table) {
|
||||
table
|
||||
.load_preset(comfy_table::presets::UTF8_FULL)
|
||||
.apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS)
|
||||
.set_header(vec![
|
||||
Cell::new("Repo"),
|
||||
Cell::new("Worktree"),
|
||||
Cell::new("Status"),
|
||||
Cell::new("Branches"),
|
||||
Cell::new("HEAD"),
|
||||
Cell::new("Remotes"),
|
||||
]);
|
||||
}
|
||||
|
||||
fn add_repo_status(
|
||||
table: &mut Table,
|
||||
repo_name: &str,
|
||||
repo_handle: &repo::RepoHandle,
|
||||
is_worktree: bool,
|
||||
) -> Result<(), String> {
|
||||
let repo_status = repo_handle.status(is_worktree)?;
|
||||
|
||||
table.add_row(vec![
|
||||
repo_name,
|
||||
match is_worktree {
|
||||
true => "\u{2714}",
|
||||
false => "",
|
||||
},
|
||||
&match is_worktree {
|
||||
true => String::from(""),
|
||||
false => match repo_status.changes {
|
||||
Some(changes) => {
|
||||
let mut out = Vec::new();
|
||||
if changes.files_new > 0 {
|
||||
out.push(format!("New: {}\n", changes.files_new))
|
||||
}
|
||||
if changes.files_modified > 0 {
|
||||
out.push(format!("Modified: {}\n", changes.files_modified))
|
||||
}
|
||||
if changes.files_deleted > 0 {
|
||||
out.push(format!("Deleted: {}\n", changes.files_deleted))
|
||||
}
|
||||
out.into_iter().collect::<String>().trim().to_string()
|
||||
}
|
||||
None => String::from("\u{2714}"),
|
||||
},
|
||||
},
|
||||
repo_status
|
||||
.branches
|
||||
.iter()
|
||||
.map(|(branch_name, remote_branch)| {
|
||||
format!(
|
||||
"branch: {}{}\n",
|
||||
&branch_name,
|
||||
&match remote_branch {
|
||||
None => String::from(" <!local>"),
|
||||
Some((remote_branch_name, remote_tracking_status)) => {
|
||||
format!(
|
||||
" <{}>{}",
|
||||
remote_branch_name,
|
||||
&match remote_tracking_status {
|
||||
repo::RemoteTrackingStatus::UpToDate =>
|
||||
String::from(" \u{2714}"),
|
||||
repo::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
|
||||
repo::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
|
||||
repo::RemoteTrackingStatus::Diverged(d1, d2) =>
|
||||
format!(" [+{}/-{}]", &d1, &d2),
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim(),
|
||||
&match is_worktree {
|
||||
true => String::from(""),
|
||||
false => match repo_status.head {
|
||||
Some(head) => head,
|
||||
None => String::from("Empty"),
|
||||
},
|
||||
},
|
||||
repo_status
|
||||
.remotes
|
||||
.iter()
|
||||
.map(|r| format!("{}\n", r))
|
||||
.collect::<String>()
|
||||
.trim(),
|
||||
]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Don't return table, return a type that implements Display(?)
|
||||
pub fn get_worktree_status_table(
|
||||
repo: &repo::RepoHandle,
|
||||
directory: &Path,
|
||||
) -> Result<(impl std::fmt::Display, Vec<String>), String> {
|
||||
let worktrees = repo.get_worktrees()?;
|
||||
let mut table = Table::new();
|
||||
|
||||
let mut errors = Vec::new();
|
||||
|
||||
add_worktree_table_header(&mut table);
|
||||
for worktree in &worktrees {
|
||||
let worktree_dir = &directory.join(worktree.name());
|
||||
if worktree_dir.exists() {
|
||||
let repo = match repo::RepoHandle::open(worktree_dir, false) {
|
||||
Ok(repo) => repo,
|
||||
Err(error) => {
|
||||
errors.push(format!(
|
||||
"Failed opening repo of worktree {}: {}",
|
||||
&worktree.name(),
|
||||
&error
|
||||
));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if let Err(error) = add_worktree_status(&mut table, worktree, &repo) {
|
||||
errors.push(error);
|
||||
}
|
||||
} else {
|
||||
errors.push(format!(
|
||||
"Worktree {} does not have a directory",
|
||||
&worktree.name()
|
||||
));
|
||||
}
|
||||
}
|
||||
for worktree in repo::RepoHandle::find_unmanaged_worktrees(repo, directory)? {
|
||||
errors.push(format!(
|
||||
"Found {}, which is not a valid worktree directory!",
|
||||
&worktree
|
||||
));
|
||||
}
|
||||
Ok((table, errors))
|
||||
}
|
||||
|
||||
pub fn get_status_table(config: config::Config) -> Result<(Vec<Table>, Vec<String>), String> {
|
||||
let mut errors = Vec::new();
|
||||
let mut tables = Vec::new();
|
||||
for tree in config.trees()? {
|
||||
let repos = tree.repos.unwrap_or_default();
|
||||
|
||||
let root_path = path::expand_path(Path::new(&tree.root));
|
||||
|
||||
let mut table = Table::new();
|
||||
add_table_header(&mut table);
|
||||
|
||||
for repo in &repos {
|
||||
let repo_path = root_path.join(&repo.name);
|
||||
|
||||
if !repo_path.exists() {
|
||||
errors.push(format!(
|
||||
"{}: Repository does not exist. Run sync?",
|
||||
&repo.name
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
let repo_handle = repo::RepoHandle::open(&repo_path, repo.worktree_setup);
|
||||
|
||||
let repo_handle = match repo_handle {
|
||||
Ok(repo) => repo,
|
||||
Err(error) => {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
errors.push(format!(
|
||||
"{}: No git repository found. Run sync?",
|
||||
&repo.name
|
||||
));
|
||||
} else {
|
||||
errors.push(format!(
|
||||
"{}: Opening repository failed: {}",
|
||||
&repo.name, error
|
||||
));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(err) =
|
||||
add_repo_status(&mut table, &repo.name, &repo_handle, repo.worktree_setup)
|
||||
{
|
||||
errors.push(format!("{}: Couldn't add repo status: {}", &repo.name, err));
|
||||
}
|
||||
}
|
||||
|
||||
tables.push(table);
|
||||
}
|
||||
|
||||
Ok((tables, errors))
|
||||
}
|
||||
|
||||
fn add_worktree_table_header(table: &mut Table) {
|
||||
table
|
||||
.load_preset(comfy_table::presets::UTF8_FULL)
|
||||
.apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS)
|
||||
.set_header(vec![
|
||||
Cell::new("Worktree"),
|
||||
Cell::new("Status"),
|
||||
Cell::new("Branch"),
|
||||
Cell::new("Remote branch"),
|
||||
]);
|
||||
}
|
||||
|
||||
fn add_worktree_status(
|
||||
table: &mut Table,
|
||||
worktree: &repo::Worktree,
|
||||
repo: &repo::RepoHandle,
|
||||
) -> Result<(), String> {
|
||||
let repo_status = repo.status(false)?;
|
||||
|
||||
let local_branch = repo
|
||||
.head_branch()
|
||||
.map_err(|error| format!("Failed getting head branch: {}", error))?;
|
||||
|
||||
let upstream_output = match local_branch.upstream() {
|
||||
Ok(remote_branch) => {
|
||||
let remote_branch_name = remote_branch
|
||||
.name()
|
||||
.map_err(|error| format!("Failed getting name of remote branch: {}", error))?;
|
||||
|
||||
let (ahead, behind) = repo
|
||||
.graph_ahead_behind(&local_branch, &remote_branch)
|
||||
.map_err(|error| format!("Failed computing branch deviation: {}", error))?;
|
||||
|
||||
format!(
|
||||
"{}{}\n",
|
||||
&remote_branch_name,
|
||||
&match (ahead, behind) {
|
||||
(0, 0) => String::from(""),
|
||||
(d, 0) => format!(" [+{}]", &d),
|
||||
(0, d) => format!(" [-{}]", &d),
|
||||
(d1, d2) => format!(" [+{}/-{}]", &d1, &d2),
|
||||
},
|
||||
)
|
||||
}
|
||||
Err(_) => String::from(""),
|
||||
};
|
||||
|
||||
table.add_row(vec![
|
||||
worktree.name(),
|
||||
&match repo_status.changes {
|
||||
Some(changes) => {
|
||||
let mut out = Vec::new();
|
||||
if changes.files_new > 0 {
|
||||
out.push(format!("New: {}\n", changes.files_new))
|
||||
}
|
||||
if changes.files_modified > 0 {
|
||||
out.push(format!("Modified: {}\n", changes.files_modified))
|
||||
}
|
||||
if changes.files_deleted > 0 {
|
||||
out.push(format!("Deleted: {}\n", changes.files_deleted))
|
||||
}
|
||||
out.into_iter().collect::<String>().trim().to_string()
|
||||
}
|
||||
None => String::from("\u{2714}"),
|
||||
},
|
||||
&local_branch
|
||||
.name()
|
||||
.map_err(|error| format!("Failed getting name of branch: {}", error))?,
|
||||
&upstream_output,
|
||||
]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn show_single_repo_status(
|
||||
path: &Path,
|
||||
) -> Result<(impl std::fmt::Display, Vec<String>), String> {
|
||||
let mut table = Table::new();
|
||||
let mut warnings = Vec::new();
|
||||
|
||||
let is_worktree = repo::RepoHandle::detect_worktree(path);
|
||||
add_table_header(&mut table);
|
||||
|
||||
let repo_handle = repo::RepoHandle::open(path, is_worktree);
|
||||
|
||||
if let Err(error) = repo_handle {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
return Err(String::from("Directory is not a git directory"));
|
||||
} else {
|
||||
return Err(format!("Opening repository failed: {}", error));
|
||||
}
|
||||
};
|
||||
|
||||
let repo_name = match path.file_name() {
|
||||
None => {
|
||||
warnings.push(format!(
|
||||
"Cannot detect repo name for path {}. Are you working in /?",
|
||||
&path.display()
|
||||
));
|
||||
String::from("unknown")
|
||||
}
|
||||
Some(file_name) => match file_name.to_str() {
|
||||
None => {
|
||||
warnings.push(format!(
|
||||
"Name of repo directory {} is not valid UTF-8",
|
||||
&path.display()
|
||||
));
|
||||
String::from("invalid")
|
||||
}
|
||||
Some(name) => name.to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
add_repo_status(&mut table, &repo_name, &repo_handle.unwrap(), is_worktree)?;
|
||||
|
||||
Ok((table, warnings))
|
||||
}
|
||||
300
src/tree.rs
Normal file
300
src/tree.rs
Normal file
@@ -0,0 +1,300 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use super::config;
|
||||
use super::output::*;
|
||||
use super::path;
|
||||
use super::repo;
|
||||
use super::worktree;
|
||||
|
||||
pub struct Tree {
|
||||
pub root: String,
|
||||
pub repos: Vec<repo::Repo>,
|
||||
}
|
||||
|
||||
pub fn find_unmanaged_repos(
|
||||
root_path: &Path,
|
||||
managed_repos: &[repo::Repo],
|
||||
) -> Result<Vec<PathBuf>, String> {
|
||||
let mut unmanaged_repos = Vec::new();
|
||||
|
||||
for repo_path in find_repo_paths(root_path)? {
|
||||
if !managed_repos
|
||||
.iter()
|
||||
.any(|r| Path::new(root_path).join(r.fullname()) == repo_path)
|
||||
{
|
||||
unmanaged_repos.push(repo_path);
|
||||
}
|
||||
}
|
||||
Ok(unmanaged_repos)
|
||||
}
|
||||
|
||||
pub fn sync_trees(config: config::Config, init_worktree: bool) -> Result<bool, String> {
|
||||
let mut failures = false;
|
||||
|
||||
let mut unmanaged_repos_absolute_paths = vec![];
|
||||
let mut managed_repos_absolute_paths = vec![];
|
||||
|
||||
let trees = config.trees()?;
|
||||
|
||||
for tree in trees {
|
||||
let repos: Vec<repo::Repo> = tree
|
||||
.repos
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|repo| repo.into_repo())
|
||||
.collect();
|
||||
|
||||
let root_path = path::expand_path(Path::new(&tree.root));
|
||||
|
||||
for repo in &repos {
|
||||
managed_repos_absolute_paths.push(root_path.join(repo.fullname()));
|
||||
match sync_repo(&root_path, repo, init_worktree) {
|
||||
Ok(_) => print_repo_success(&repo.name, "OK"),
|
||||
Err(error) => {
|
||||
print_repo_error(&repo.name, &error);
|
||||
failures = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match find_unmanaged_repos(&root_path, &repos) {
|
||||
Ok(repos) => {
|
||||
for path in repos.into_iter() {
|
||||
if !unmanaged_repos_absolute_paths.contains(&path) {
|
||||
unmanaged_repos_absolute_paths.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting unmanaged repos: {}", error));
|
||||
failures = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for unmanaged_repo_absolute_path in &unmanaged_repos_absolute_paths {
|
||||
if managed_repos_absolute_paths
|
||||
.iter()
|
||||
.any(|managed_repo_absolute_path| {
|
||||
managed_repo_absolute_path == unmanaged_repo_absolute_path
|
||||
})
|
||||
{
|
||||
continue;
|
||||
}
|
||||
print_warning(&format!(
|
||||
"Found unmanaged repository: \"{}\"",
|
||||
path::path_as_string(unmanaged_repo_absolute_path)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(!failures)
|
||||
}
|
||||
|
||||
/// Finds repositories recursively, returning their path
|
||||
pub fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
|
||||
let mut repos = Vec::new();
|
||||
|
||||
let git_dir = path.join(".git");
|
||||
let git_worktree = path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY);
|
||||
|
||||
if git_dir.exists() || git_worktree.exists() {
|
||||
repos.push(path.to_path_buf());
|
||||
} else {
|
||||
match fs::read_dir(path) {
|
||||
Ok(contents) => {
|
||||
for content in contents {
|
||||
match content {
|
||||
Ok(entry) => {
|
||||
let path = entry.path();
|
||||
if path.is_symlink() {
|
||||
continue;
|
||||
}
|
||||
if path.is_dir() {
|
||||
match find_repo_paths(&path) {
|
||||
Ok(ref mut r) => repos.append(r),
|
||||
Err(error) => return Err(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Error accessing directory: {}", e));
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Failed to open \"{}\": {}",
|
||||
&path.display(),
|
||||
match e.kind() {
|
||||
std::io::ErrorKind::NotADirectory =>
|
||||
String::from("directory expected, but path is not a directory"),
|
||||
std::io::ErrorKind::NotFound => String::from("not found"),
|
||||
_ => format!("{:?}", e.kind()),
|
||||
}
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(repos)
|
||||
}
|
||||
|
||||
fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result<(), String> {
|
||||
let repo_path = root_path.join(&repo.fullname());
|
||||
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
|
||||
|
||||
let mut newly_created = false;
|
||||
|
||||
// Syncing a repository can have a few different flows, depending on the repository
|
||||
// that is to be cloned and the local directory:
|
||||
//
|
||||
// * If the local directory already exists, we have to make sure that it matches the
|
||||
// worktree configuration, as there is no way to convert. If the sync is supposed
|
||||
// to be worktree-aware, but the local directory is not, we abort. Note that we could
|
||||
// also automatically convert here. In any case, the other direction (converting a
|
||||
// worktree repository to non-worktree) cannot work, as we'd have to throw away the
|
||||
// worktrees.
|
||||
//
|
||||
// * If the local directory does not yet exist, we have to actually do something ;). If
|
||||
// no remote is specified, we just initialize a new repository (git init) and are done.
|
||||
//
|
||||
// If there are (potentially multiple) remotes configured, we have to clone. We assume
|
||||
// that the first remote is the canonical one that we do the first clone from. After
|
||||
// cloning, we just add the other remotes as usual (as if they were added to the config
|
||||
// afterwards)
|
||||
//
|
||||
// Branch handling:
|
||||
//
|
||||
// Handling the branches on checkout is a bit magic. For minimum surprises, we just set
|
||||
// up local tracking branches for all remote branches.
|
||||
if repo_path.exists()
|
||||
&& repo_path
|
||||
.read_dir()
|
||||
.map_err(|error| error.to_string())?
|
||||
.next()
|
||||
.is_some()
|
||||
{
|
||||
if repo.worktree_setup && !actual_git_directory.exists() {
|
||||
return Err(String::from(
|
||||
"Repo already exists, but is not using a worktree setup",
|
||||
));
|
||||
};
|
||||
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
"Repository does not have remotes configured, initializing new",
|
||||
);
|
||||
match repo::RepoHandle::init(&repo_path, repo.worktree_setup) {
|
||||
Ok(r) => {
|
||||
print_repo_success(&repo.name, "Repository created");
|
||||
Some(r)
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Repository failed during init: {}", e));
|
||||
}
|
||||
};
|
||||
} else {
|
||||
let first = repo.remotes.as_ref().unwrap().first().unwrap();
|
||||
|
||||
match repo::clone_repo(first, &repo_path, repo.worktree_setup) {
|
||||
Ok(_) => {
|
||||
print_repo_success(&repo.name, "Repository successfully cloned");
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Repository failed during clone: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
newly_created = true;
|
||||
}
|
||||
|
||||
let repo_handle = match repo::RepoHandle::open(&repo_path, repo.worktree_setup) {
|
||||
Ok(repo) => repo,
|
||||
Err(error) => {
|
||||
if !repo.worktree_setup && repo::RepoHandle::open(&repo_path, true).is_ok() {
|
||||
return Err(String::from(
|
||||
"Repo already exists, but is using a worktree setup",
|
||||
));
|
||||
} else {
|
||||
return Err(format!("Opening repository failed: {}", error));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if newly_created && repo.worktree_setup && init_worktree {
|
||||
match repo_handle.default_branch() {
|
||||
Ok(branch) => {
|
||||
worktree::add_worktree(&repo_path, &branch.name()?, None, false)?;
|
||||
}
|
||||
Err(_error) => print_repo_error(
|
||||
&repo.name,
|
||||
"Could not determine default branch, skipping worktree initializtion",
|
||||
),
|
||||
}
|
||||
}
|
||||
if let Some(remotes) = &repo.remotes {
|
||||
let current_remotes: Vec<String> = repo_handle
|
||||
.remotes()
|
||||
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
|
||||
|
||||
for remote in remotes {
|
||||
let current_remote = repo_handle.find_remote(&remote.name)?;
|
||||
|
||||
match current_remote {
|
||||
Some(current_remote) => {
|
||||
let current_url = current_remote.url();
|
||||
|
||||
if remote.url != current_url {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
|
||||
);
|
||||
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
|
||||
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
|
||||
};
|
||||
}
|
||||
}
|
||||
None => {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
&format!(
|
||||
"Setting up new remote \"{}\" to \"{}\"",
|
||||
&remote.name, &remote.url
|
||||
),
|
||||
);
|
||||
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
|
||||
return Err(format!(
|
||||
"Repository failed during setting the remotes: {}",
|
||||
e
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for current_remote in ¤t_remotes {
|
||||
if !remotes.iter().any(|r| &r.name == current_remote) {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
&format!("Deleting remote \"{}\"", ¤t_remote,),
|
||||
);
|
||||
if let Err(e) = repo_handle.remote_delete(current_remote) {
|
||||
return Err(format!(
|
||||
"Repository failed during deleting remote \"{}\": {}",
|
||||
¤t_remote, e
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
|
||||
match is_worktree {
|
||||
false => path.to_path_buf(),
|
||||
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
|
||||
}
|
||||
}
|
||||
780
src/worktree.rs
Normal file
780
src/worktree.rs
Normal file
@@ -0,0 +1,780 @@
|
||||
//! This handles worktrees for repositories. Some considerations to take care
|
||||
//! of:
|
||||
//!
|
||||
//! * Which branch to check out / create
|
||||
//! * Which commit to check out
|
||||
//! * Whether to track a remote branch, and which
|
||||
//!
|
||||
//! There are a general rules. The main goal is to do the least surprising thing
|
||||
//! in each situation, and to never change existing setups (e.g. tracking,
|
||||
//! branch states) except when explicitly told to. In 99% of all cases, the
|
||||
//! workflow will be quite straightforward.
|
||||
//!
|
||||
//! * The name of the worktree (and therefore the path) is **always** the same
|
||||
//! as the name of the branch.
|
||||
//! * Never modify existing local branches
|
||||
//! * Only modify tracking branches for existing local branches if explicitly
|
||||
//! requested
|
||||
//! * By default, do not do remote operations. This means that we do no do any
|
||||
//! tracking setup (but of course, the local branch can already have a
|
||||
//! tracking branch set up, which will just be left alone)
|
||||
//! * Be quite lax with finding a remote tracking branch (as using an existing
|
||||
//! branch is most likely preferred to creating a new branch)
|
||||
//!
|
||||
//! There are a few different options that can be given:
|
||||
//!
|
||||
//! * Explicit track (`--track`) and explicit no-track (`--no-track`)
|
||||
//! * A configuration may specify to enable tracking a remote branch by default
|
||||
//! * A configuration may specify a prefix for remote branches
|
||||
//!
|
||||
//! # How to handle the local branch?
|
||||
//!
|
||||
//! That one is easy: If a branch with the desired name already exists, all is
|
||||
//! well. If not, we create a new one.
|
||||
//!
|
||||
//! # Which commit should be checked out?
|
||||
//!
|
||||
//! The most imporant rule: If the local branch already existed, just leave it
|
||||
//! as it is. Only if a new branch is created do we need to answer the question
|
||||
//! which commit to set it to. Generally, we set the branch to whatever the
|
||||
//! "default" branch of the repository is (something like "main" or "master").
|
||||
//! But there are a few cases where we can use remote branches to make the
|
||||
//! result less surprising.
|
||||
//!
|
||||
//! First, if tracking is explicitly disabled, we still try to guess! But we
|
||||
//! *do* ignore `--track`, as this is how it's done everywhere else.
|
||||
//!
|
||||
//! As an example: If `origin/foobar` exists and we run `grm worktree add foobar
|
||||
//! --no-track`, we create a new worktree called `foobar` that's on the same
|
||||
//! state as `origin/foobar` (but we will not set up tracking, see below).
|
||||
//!
|
||||
//! If tracking is explicitly requested to a certain state, we use that remote
|
||||
//! branch. If it exists, easy. If not, no more guessing!
|
||||
//!
|
||||
//! Now, it's important to select the correct remote. In the easiest case, there
|
||||
//! is only one remote, so we just use that one. If there is more than one
|
||||
//! remote, we check whether there is a default remote configured via
|
||||
//! `track.default_remote`. If yes, we use that one. If not, we have to do the
|
||||
//! selection process below *for each of them*. If only one of them returns
|
||||
//! some branch to track, we use that one. If more than one remote returns
|
||||
//! information, we only use it if it's identical for each. Otherwise we bail,
|
||||
//! as there is no point in guessing.
|
||||
//!
|
||||
//! The commit selection process looks like this:
|
||||
//!
|
||||
//! * If a prefix is specified in the configuration, we look for
|
||||
//! `{remote}/{prefix}/{worktree_name}`
|
||||
//!
|
||||
//! * We look for `{remote}/{worktree_name}` (yes, this means that even when a
|
||||
//! prefix is configured, we use a branch *without* a prefix if one with
|
||||
//! prefix does not exist)
|
||||
//!
|
||||
//! Note that we may select different branches for different remotes when
|
||||
//! prefixes is used. If remote1 has a branch with a prefix and remote2 only has
|
||||
//! a branch *without* a prefix, we select them both when a prefix is used. This
|
||||
//! could lead to the following situation:
|
||||
//!
|
||||
//! * There is `origin/prefix/foobar` and `remote2/foobar`, with different
|
||||
//! states
|
||||
//! * You set `track.default_prefix = "prefix"` (and no default remote!)
|
||||
//! * You run `grm worktree add `prefix/foobar`
|
||||
//! * Instead of just picking `origin/prefix/foobar`, grm will complain because
|
||||
//! it also selected `remote2/foobar`.
|
||||
//!
|
||||
//! This is just emergent behavior of the logic above. Fixing it would require
|
||||
//! additional logic for that edge case. I assume that it's just so rare to get
|
||||
//! that behavior that it's acceptable for now.
|
||||
//!
|
||||
//! Now we either have a commit, we aborted, or we do not have commit. In the
|
||||
//! last case, as stated above, we check out the "default" branch.
|
||||
//!
|
||||
//! # The remote tracking branch
|
||||
//!
|
||||
//! First, the only remote operations we do is branch creation! It's
|
||||
//! unfortunately not possible to defer remote branch creation until the first
|
||||
//! `git push`, which would be ideal. The remote tracking branch has to already
|
||||
//! exist, so we have to do the equivalent of `git push --set-upstream` during
|
||||
//! worktree creation.
|
||||
//!
|
||||
//! Whether (and which) remote branch to track works like this:
|
||||
//!
|
||||
//! * If `--no-track` is given, we never track a remote branch, except when
|
||||
//! branch already has a tracking branch. So we'd be done already!
|
||||
//!
|
||||
//! * If `--track` is given, we always track this branch, regardless of anything
|
||||
//! else. If the branch exists, cool, otherwise we create it.
|
||||
//!
|
||||
//! If neither is given, we only set up tracking if requested in the
|
||||
//! configuration file (`track.default = true`)
|
||||
//!
|
||||
//! The rest of the process is similar to the commit selection above. The only
|
||||
//! difference is the remote selection. If there is only one, we use it, as
|
||||
//! before. Otherwise, we try to use `default_remote` from the configuration, if
|
||||
//! available. If not, we do not set up a remote tracking branch. It works like
|
||||
//! this:
|
||||
//!
|
||||
//! * If a prefix is specified in the configuration, we use
|
||||
//! `{remote}/{prefix}/{worktree_name}`
|
||||
//!
|
||||
//! * If no prefix is specified in the configuration, we use
|
||||
//! `{remote}/{worktree_name}`
|
||||
//!
|
||||
//! Now that we have a remote, we use the same process as above:
|
||||
//!
|
||||
//! * If a prefix is specified in the configuration, we use for
|
||||
//! `{remote}/{prefix}/{worktree_name}`
|
||||
//! * We use for `{remote}/{worktree_name}`
|
||||
//!
|
||||
//! ---
|
||||
//!
|
||||
//! All this means that in some weird situation, you may end up with the state
|
||||
//! of a remote branch while not actually tracking that branch. This can only
|
||||
//! happen in repositories with more than one remote. Imagine the following:
|
||||
//!
|
||||
//! The repository has two remotes (`remote1` and `remote2`) which have the
|
||||
//! exact same remote state. But there is no `default_remote` in the
|
||||
//! configuration (or no configuration at all). There is a remote branch
|
||||
//! `foobar`. As both `remote1/foobar` and `remote2/foobar` as the same, the new
|
||||
//! worktree will use that as the state of the new branch. But as `grm` cannot
|
||||
//! tell which remote branch to track, it will not set up remote tracking. This
|
||||
//! behavior may be a bit confusing, but first, there is no good way to resolve
|
||||
//! this, and second, the situation should be really rare (when having multiple
|
||||
//! remotes, you would generally have a `default_remote` configured).
|
||||
//!
|
||||
//! # Implementation
|
||||
//!
|
||||
//! To reduce the chance of bugs, the implementation uses the [typestate
|
||||
//! pattern](http://cliffle.com/blog/rust-typestate/). Here are the states we
|
||||
//! are moving through linearily:
|
||||
//!
|
||||
//! * Init
|
||||
//! * A local branch name is set
|
||||
//! * A local commit to set the new branch to is selected
|
||||
//! * A remote tracking branch is selected
|
||||
//! * The new branch is created with all the required settings
|
||||
//!
|
||||
//! Don't worry about the lifetime stuff: There is only one single lifetime, as
|
||||
//! everything (branches, commits) is derived from the single repo::Repo
|
||||
//! instance
|
||||
//!
|
||||
//! # Testing
|
||||
//!
|
||||
//! There are two types of input to the tests:
|
||||
//!
|
||||
//! 1) The parameters passed to `grm`, either via command line or via
|
||||
//! configuration file
|
||||
//! 2) The circumstances in the repository and remotes
|
||||
//!
|
||||
//! ## Parameters
|
||||
//!
|
||||
//! * The name of the worktree
|
||||
//! * Whether it contains slashes or not
|
||||
//! * Whether it is invalid
|
||||
//! * `--track` and `--no-track`
|
||||
//! * Whether there is a configuration file and what it contains
|
||||
//! * Whether `track.default` is enabled or disabled
|
||||
//! * Whether `track.default_remote_prefix` is there or missing
|
||||
//! * Whether `track.default_remote` is there or missing
|
||||
//! * Whether that remote exists or not
|
||||
//!
|
||||
//! ## Situations
|
||||
//!
|
||||
//! ### The local branch
|
||||
//!
|
||||
//! * Whether the branch already exists
|
||||
//! * Whether the branch has a remote tracking branch and whether it differs
|
||||
//! from the desired tracking branch (i.e. `--track` or config)
|
||||
//!
|
||||
//! ### Remotes
|
||||
//!
|
||||
//! * How many remotes there are, if any
|
||||
//! * If more than two remotes exist, whether their desired tracking branch
|
||||
//! differs
|
||||
//!
|
||||
//! ### The remote tracking branch branch
|
||||
//!
|
||||
//! * Whether a remote branch with the same name as the worktree exists
|
||||
//! * Whether a remote branch with the same name as the worktree plus prefix
|
||||
//! exists
|
||||
//!
|
||||
//! ## Outcomes
|
||||
//!
|
||||
//! We have to check the following afterwards:
|
||||
//!
|
||||
//! * Does the worktree exist in the correct location?
|
||||
//! * Does the local branch have the same name as the worktree?
|
||||
//! * Does the local branch have the correct commit?
|
||||
//! * Does the local branch track the correct remote branch?
|
||||
//! * Does that remote branch also exist?
|
||||
use std::cell::RefCell;
|
||||
use std::path::Path;
|
||||
|
||||
// use super::output::*;
|
||||
use super::repo;
|
||||
|
||||
pub const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn invalid_worktree_names() {
|
||||
assert!(add_worktree(Path::new("/tmp/"), "/leadingslash", None, false).is_err());
|
||||
assert!(add_worktree(Path::new("/tmp/"), "trailingslash/", None, false).is_err());
|
||||
assert!(add_worktree(Path::new("/tmp/"), "//", None, false).is_err());
|
||||
assert!(add_worktree(Path::new("/tmp/"), "test//test", None, false).is_err());
|
||||
assert!(add_worktree(Path::new("/tmp/"), "test test", None, false).is_err());
|
||||
assert!(add_worktree(Path::new("/tmp/"), "test\ttest", None, false).is_err());
|
||||
}
|
||||
}
|
||||
|
||||
struct Init;
|
||||
|
||||
struct WithLocalBranchName<'a> {
|
||||
local_branch_name: String,
|
||||
/// Outer option: Is there a computed value?
|
||||
/// Inner option: Is there actually a branch?
|
||||
///
|
||||
/// None => No computed value yet
|
||||
/// Some(None) => No branch
|
||||
/// Some(Some(_)) => Branch
|
||||
local_branch: RefCell<Option<Option<repo::Branch<'a>>>>,
|
||||
}
|
||||
|
||||
struct WithLocalTargetSelected<'a> {
|
||||
local_branch_name: String,
|
||||
local_branch: Option<repo::Branch<'a>>,
|
||||
target_commit: Option<Box<repo::Commit<'a>>>,
|
||||
}
|
||||
|
||||
struct WithRemoteTrackingBranch<'a> {
|
||||
local_branch_name: String,
|
||||
local_branch: Option<repo::Branch<'a>>,
|
||||
target_commit: Option<Box<repo::Commit<'a>>>,
|
||||
remote_tracking_branch: Option<(String, String)>,
|
||||
prefix: Option<String>,
|
||||
}
|
||||
|
||||
struct Worktree<'a, S: WorktreeState> {
|
||||
repo: &'a repo::RepoHandle,
|
||||
extra: S,
|
||||
}
|
||||
|
||||
impl<'a> WithLocalBranchName<'a> {
|
||||
fn new(name: String) -> Self {
|
||||
Self {
|
||||
local_branch_name: name,
|
||||
local_branch: RefCell::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait WorktreeState {}
|
||||
|
||||
impl WorktreeState for Init {}
|
||||
impl<'a> WorktreeState for WithLocalBranchName<'a> {}
|
||||
impl<'a> WorktreeState for WithLocalTargetSelected<'a> {}
|
||||
impl<'a> WorktreeState for WithRemoteTrackingBranch<'a> {}
|
||||
|
||||
impl<'a> Worktree<'a, Init> {
|
||||
fn new(repo: &'a repo::RepoHandle) -> Self {
|
||||
Self {
|
||||
repo,
|
||||
extra: Init {},
|
||||
}
|
||||
}
|
||||
|
||||
fn set_local_branch_name(self, name: &str) -> Worktree<'a, WithLocalBranchName<'a>> {
|
||||
Worktree::<WithLocalBranchName> {
|
||||
repo: self.repo,
|
||||
extra: WithLocalBranchName::new(name.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> Worktree<'a, WithLocalBranchName<'b>>
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
fn check_local_branch(&self) {
|
||||
let mut branchref = self.extra.local_branch.borrow_mut();
|
||||
if branchref.is_none() {
|
||||
let branch = self.repo.find_local_branch(&self.extra.local_branch_name);
|
||||
*branchref = Some(if let Ok(branch) = branch {
|
||||
Some(branch)
|
||||
} else {
|
||||
None
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn local_branch_already_exists(&self) -> bool {
|
||||
if let Some(branch) = &*self.extra.local_branch.borrow() {
|
||||
return branch.is_some();
|
||||
}
|
||||
self.check_local_branch();
|
||||
// As we just called `check_local_branch`, we can be sure that
|
||||
// `self.extra.local_branch` is set to some `Some` value
|
||||
(*self.extra.local_branch.borrow())
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.is_some()
|
||||
}
|
||||
|
||||
fn select_commit(
|
||||
self,
|
||||
commit: Option<Box<repo::Commit<'b>>>,
|
||||
) -> Worktree<'a, WithLocalTargetSelected<'b>> {
|
||||
self.check_local_branch();
|
||||
|
||||
Worktree::<'a, WithLocalTargetSelected> {
|
||||
repo: self.repo,
|
||||
extra: WithLocalTargetSelected::<'b> {
|
||||
local_branch_name: self.extra.local_branch_name,
|
||||
// As we just called `check_local_branch`, we can be sure that
|
||||
// `self.extra.local_branch` is set to some `Some` value
|
||||
local_branch: self.extra.local_branch.into_inner().unwrap(),
|
||||
target_commit: commit,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Worktree<'a, WithLocalTargetSelected<'a>> {
|
||||
fn set_remote_tracking_branch(
|
||||
self,
|
||||
branch: Option<(&str, &str)>,
|
||||
prefix: Option<&str>,
|
||||
) -> Worktree<'a, WithRemoteTrackingBranch<'a>> {
|
||||
Worktree::<WithRemoteTrackingBranch> {
|
||||
repo: self.repo,
|
||||
extra: WithRemoteTrackingBranch {
|
||||
local_branch_name: self.extra.local_branch_name,
|
||||
local_branch: self.extra.local_branch,
|
||||
target_commit: self.extra.target_commit,
|
||||
remote_tracking_branch: branch.map(|(s1, s2)| (s1.to_string(), s2.to_string())),
|
||||
prefix: prefix.map(|prefix| prefix.to_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Worktree<'a, WithRemoteTrackingBranch<'a>> {
|
||||
fn create(self, directory: &Path) -> Result<Option<Vec<String>>, String> {
|
||||
let mut warnings: Vec<String> = vec![];
|
||||
|
||||
let mut branch = if let Some(branch) = self.extra.local_branch {
|
||||
branch
|
||||
} else {
|
||||
self.repo.create_branch(
|
||||
&self.extra.local_branch_name,
|
||||
// TECHDEBT
|
||||
// We must not call this with `Some()` without a valid target.
|
||||
// I'm sure this can be improved, just not sure how.
|
||||
&self.extra.target_commit.unwrap(),
|
||||
)?
|
||||
};
|
||||
|
||||
if let Some((remote_name, remote_branch_name)) = self.extra.remote_tracking_branch {
|
||||
let remote_branch_with_prefix = if let Some(ref prefix) = self.extra.prefix {
|
||||
if let Ok(remote_branch) = self
|
||||
.repo
|
||||
.find_remote_branch(&remote_name, &format!("{prefix}/{remote_branch_name}"))
|
||||
{
|
||||
Some(remote_branch)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let remote_branch_without_prefix = if let Ok(remote_branch) = self
|
||||
.repo
|
||||
.find_remote_branch(&remote_name, &remote_branch_name)
|
||||
{
|
||||
Some(remote_branch)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let remote_branch = if let Some(ref _prefix) = self.extra.prefix {
|
||||
remote_branch_with_prefix
|
||||
} else {
|
||||
remote_branch_without_prefix
|
||||
};
|
||||
|
||||
match remote_branch {
|
||||
Some(remote_branch) => {
|
||||
if branch.commit()?.id().hex_string()
|
||||
!= remote_branch.commit()?.id().hex_string()
|
||||
{
|
||||
warnings.push(format!("The local branch \"{}\" and the remote branch \"{}/{}\" differ. Make sure to push/pull afterwards!", &self.extra.local_branch_name, &remote_name, &remote_branch_name));
|
||||
}
|
||||
|
||||
branch.set_upstream(&remote_name, &remote_branch.basename()?)?;
|
||||
}
|
||||
None => {
|
||||
let mut remote = match self.repo.find_remote(&remote_name)? {
|
||||
Some(remote) => remote,
|
||||
None => return Err(format!("Remote \"{remote_name}\" not found")),
|
||||
};
|
||||
|
||||
if !remote.is_pushable()? {
|
||||
return Err(format!(
|
||||
"Cannot push to non-pushable remote \"{remote_name}\""
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(prefix) = self.extra.prefix {
|
||||
remote.push(
|
||||
&self.extra.local_branch_name,
|
||||
&format!("{}/{}", prefix, remote_branch_name),
|
||||
self.repo,
|
||||
)?;
|
||||
|
||||
branch.set_upstream(
|
||||
&remote_name,
|
||||
&format!("{}/{}", prefix, remote_branch_name),
|
||||
)?;
|
||||
} else {
|
||||
remote.push(
|
||||
&self.extra.local_branch_name,
|
||||
&remote_branch_name,
|
||||
self.repo,
|
||||
)?;
|
||||
|
||||
branch.set_upstream(&remote_name, &remote_branch_name)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We have to create subdirectories first, otherwise adding the worktree
|
||||
// will fail
|
||||
if self.extra.local_branch_name.contains('/') {
|
||||
let path = Path::new(&self.extra.local_branch_name);
|
||||
if let Some(base) = path.parent() {
|
||||
// This is a workaround of a bug in libgit2 (?)
|
||||
//
|
||||
// When *not* doing this, we will receive an error from the `Repository::worktree()`
|
||||
// like this:
|
||||
//
|
||||
// > failed to make directory '/{repo}/.git-main-working-tree/worktrees/dir/test
|
||||
//
|
||||
// This is a discrepancy between the behavior of libgit2 and the
|
||||
// git CLI when creating worktrees with slashes:
|
||||
//
|
||||
// The git CLI will create the worktree's configuration directory
|
||||
// inside {git_dir}/worktrees/{last_path_component}. Look at this:
|
||||
//
|
||||
// ```
|
||||
// $ git worktree add 1/2/3 -b 1/2/3
|
||||
// $ ls .git/worktrees
|
||||
// 3
|
||||
// ```
|
||||
//
|
||||
// Interesting: When adding a worktree with a different name but the
|
||||
// same final path component, git starts adding a counter suffix to
|
||||
// the worktree directories:
|
||||
//
|
||||
// ```
|
||||
// $ git worktree add 1/3/3 -b 1/3/3
|
||||
// $ git worktree add 1/4/3 -b 1/4/3
|
||||
// $ ls .git/worktrees
|
||||
// 3
|
||||
// 31
|
||||
// 32
|
||||
// ```
|
||||
//
|
||||
// I *guess* that the mapping back from the worktree directory under .git to the actual
|
||||
// worktree directory is done via the `gitdir` file inside `.git/worktrees/{worktree}.
|
||||
// This means that the actual directory would not matter. You can verify this by
|
||||
// just renaming it:
|
||||
//
|
||||
// ```
|
||||
// $ mv .git/worktrees/3 .git/worktrees/foobar
|
||||
// $ git worktree list
|
||||
// /tmp/ fcc8a2a7 [master]
|
||||
// /tmp/1/2/3 fcc8a2a7 [1/2/3]
|
||||
// /tmp/1/3/3 fcc8a2a7 [1/3/3]
|
||||
// /tmp/1/4/3 fcc8a2a7 [1/4/3]
|
||||
// ```
|
||||
//
|
||||
// => Still works
|
||||
//
|
||||
// Anyway, libgit2 does not do this: It tries to create the worktree
|
||||
// directory inside .git with the exact name of the worktree, including
|
||||
// any slashes. It should be this code:
|
||||
//
|
||||
// https://github.com/libgit2/libgit2/blob/f98dd5438f8d7bfd557b612fdf1605b1c3fb8eaf/src/libgit2/worktree.c#L346
|
||||
//
|
||||
// As a workaround, we can create the base directory manually for now.
|
||||
//
|
||||
// Tracking upstream issue: https://github.com/libgit2/libgit2/issues/6327
|
||||
std::fs::create_dir_all(
|
||||
directory
|
||||
.join(GIT_MAIN_WORKTREE_DIRECTORY)
|
||||
.join("worktrees")
|
||||
.join(base),
|
||||
)
|
||||
.map_err(|error| error.to_string())?;
|
||||
std::fs::create_dir_all(base).map_err(|error| error.to_string())?;
|
||||
}
|
||||
}
|
||||
|
||||
self.repo.new_worktree(
|
||||
&self.extra.local_branch_name,
|
||||
&directory.join(&self.extra.local_branch_name),
|
||||
&branch,
|
||||
)?;
|
||||
|
||||
Ok(if warnings.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(warnings)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A branch name must never start or end with a slash, and it cannot have two
|
||||
/// consecutive slashes
|
||||
fn validate_worktree_name(name: &str) -> Result<(), String> {
|
||||
if name.starts_with('/') || name.ends_with('/') {
|
||||
return Err(format!(
|
||||
"Invalid worktree name: {}. It cannot start or end with a slash",
|
||||
name
|
||||
));
|
||||
}
|
||||
|
||||
if name.contains("//") {
|
||||
return Err(format!(
|
||||
"Invalid worktree name: {}. It cannot contain two consecutive slashes",
|
||||
name
|
||||
));
|
||||
}
|
||||
|
||||
if name.contains(char::is_whitespace) {
|
||||
return Err(format!(
|
||||
"Invalid worktree name: {}. It cannot contain whitespace",
|
||||
name
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TECHDEBT
|
||||
//
|
||||
// Instead of opening the repo & reading configuration inside the function, it
|
||||
// should be done by the caller and given as a parameter
|
||||
pub fn add_worktree(
|
||||
directory: &Path,
|
||||
name: &str,
|
||||
track: Option<(&str, &str)>,
|
||||
no_track: bool,
|
||||
) -> Result<Option<Vec<String>>, String> {
|
||||
let mut warnings: Vec<String> = vec![];
|
||||
|
||||
validate_worktree_name(name)?;
|
||||
|
||||
let repo = repo::RepoHandle::open(directory, true).map_err(|error| match error.kind {
|
||||
repo::RepoErrorKind::NotFound => {
|
||||
String::from("Current directory does not contain a worktree setup")
|
||||
}
|
||||
_ => format!("Error opening repo: {}", error),
|
||||
})?;
|
||||
|
||||
let remotes = &repo.remotes()?;
|
||||
|
||||
let config = repo::read_worktree_root_config(directory)?;
|
||||
|
||||
if repo.find_worktree(name).is_ok() {
|
||||
return Err(format!("Worktree {} already exists", &name));
|
||||
}
|
||||
|
||||
let track_config = config.and_then(|config| config.track);
|
||||
let prefix = track_config
|
||||
.as_ref()
|
||||
.and_then(|track| track.default_remote_prefix.as_ref());
|
||||
let enable_tracking = track_config.as_ref().map_or(false, |track| track.default);
|
||||
let default_remote = track_config
|
||||
.as_ref()
|
||||
.map(|track| track.default_remote.clone());
|
||||
|
||||
// Note that we have to define all variables that borrow from `repo`
|
||||
// *first*, otherwise we'll receive "borrowed value does not live long
|
||||
// enough" errors. This is due to the `repo` reference inside `Worktree` that is
|
||||
// passed through each state type.
|
||||
//
|
||||
// The `commit` variable will be dropped at the end of the scope, together with all
|
||||
// worktree variables. It will be done in the opposite direction of delcaration (FILO).
|
||||
//
|
||||
// So if we define `commit` *after* the respective worktrees, it will be dropped first while
|
||||
// still being borrowed by `Worktree`.
|
||||
let default_branch_head = repo.default_branch()?.commit_owned()?;
|
||||
|
||||
let worktree = Worktree::<Init>::new(&repo).set_local_branch_name(name);
|
||||
|
||||
let get_remote_head = |remote_name: &str,
|
||||
remote_branch_name: &str|
|
||||
-> Result<Option<Box<repo::Commit>>, String> {
|
||||
if let Ok(remote_branch) = repo.find_remote_branch(remote_name, remote_branch_name) {
|
||||
Ok(Some(Box::new(remote_branch.commit_owned()?)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
};
|
||||
|
||||
let worktree = if worktree.local_branch_already_exists() {
|
||||
worktree.select_commit(None)
|
||||
} else if let Some((remote_name, remote_branch_name)) = if no_track { None } else { track } {
|
||||
if let Ok(remote_branch) = repo.find_remote_branch(remote_name, remote_branch_name) {
|
||||
worktree.select_commit(Some(Box::new(remote_branch.commit_owned()?)))
|
||||
} else {
|
||||
worktree.select_commit(Some(Box::new(default_branch_head)))
|
||||
}
|
||||
} else {
|
||||
match remotes.len() {
|
||||
0 => worktree.select_commit(Some(Box::new(default_branch_head))),
|
||||
1 => {
|
||||
let remote_name = &remotes[0];
|
||||
let commit: Option<Box<repo::Commit>> = ({
|
||||
if let Some(prefix) = prefix {
|
||||
get_remote_head(remote_name, &format!("{prefix}/{name}"))?
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.or(get_remote_head(remote_name, name)?)
|
||||
.or_else(|| Some(Box::new(default_branch_head)));
|
||||
|
||||
worktree.select_commit(commit)
|
||||
}
|
||||
_ => {
|
||||
let commit = if let Some(ref default_remote) = default_remote {
|
||||
if let Some(ref prefix) = prefix {
|
||||
if let Ok(remote_branch) = repo
|
||||
.find_remote_branch(default_remote, &format!("{prefix}/{name}"))
|
||||
{
|
||||
Some(Box::new(remote_branch.commit_owned()?))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
.or({
|
||||
if let Ok(remote_branch) =
|
||||
repo.find_remote_branch(default_remote, name)
|
||||
{
|
||||
Some(Box::new(remote_branch.commit_owned()?))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}.or({
|
||||
let mut commits = vec![];
|
||||
for remote_name in remotes.iter() {
|
||||
let remote_head: Option<Box<repo::Commit>> = ({
|
||||
if let Some(ref prefix) = prefix {
|
||||
if let Ok(remote_branch) = repo.find_remote_branch(
|
||||
remote_name,
|
||||
&format!("{prefix}/{name}"),
|
||||
) {
|
||||
Some(Box::new(remote_branch.commit_owned()?))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.or({
|
||||
if let Ok(remote_branch) =
|
||||
repo.find_remote_branch(remote_name, name)
|
||||
{
|
||||
Some(Box::new(remote_branch.commit_owned()?))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.or(None);
|
||||
commits.push(remote_head);
|
||||
}
|
||||
|
||||
let mut commits = commits
|
||||
.into_iter()
|
||||
.flatten()
|
||||
// have to collect first because the `flatten()` return
|
||||
// typedoes not implement `windows()`
|
||||
.collect::<Vec<Box<repo::Commit>>>();
|
||||
// `flatten()` takes care of `None` values here. If all
|
||||
// remotes return None for the branch, we do *not* abort, we
|
||||
// continue!
|
||||
if commits.is_empty() {
|
||||
Some(Box::new(default_branch_head))
|
||||
} else if commits.len() == 1 {
|
||||
Some(commits.swap_remove(0))
|
||||
} else if commits.windows(2).any(|window| {
|
||||
let c1 = &window[0];
|
||||
let c2 = &window[1];
|
||||
(*c1).id().hex_string() != (*c2).id().hex_string()
|
||||
}) {
|
||||
warnings.push(
|
||||
// TODO this should also include the branch
|
||||
// name. BUT: the branch name may be different
|
||||
// between the remotes. Let's just leave it
|
||||
// until I get around to fix that inconsistency
|
||||
// (see module-level doc about), which might be
|
||||
// never, as it's such a rare edge case.
|
||||
"Branch exists on multiple remotes, but they deviate. Selecting default branch instead".to_string()
|
||||
);
|
||||
Some(Box::new(default_branch_head))
|
||||
} else {
|
||||
Some(commits.swap_remove(0))
|
||||
}
|
||||
});
|
||||
worktree.select_commit(commit)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let worktree = if no_track {
|
||||
worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str()))
|
||||
} else if let Some((remote_name, remote_branch_name)) = track {
|
||||
worktree.set_remote_tracking_branch(
|
||||
Some((remote_name, remote_branch_name)),
|
||||
None, // Always disable prefixing when explicitly given --track
|
||||
)
|
||||
} else if !enable_tracking {
|
||||
worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str()))
|
||||
} else {
|
||||
match remotes.len() {
|
||||
0 => worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str())),
|
||||
1 => worktree
|
||||
.set_remote_tracking_branch(Some((&remotes[0], name)), prefix.map(|s| s.as_str())),
|
||||
_ => {
|
||||
if let Some(default_remote) = default_remote {
|
||||
worktree.set_remote_tracking_branch(
|
||||
Some((&default_remote, name)),
|
||||
prefix.map(|s| s.as_str()),
|
||||
)
|
||||
} else {
|
||||
worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str()))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
worktree.create(directory)?;
|
||||
|
||||
Ok(if warnings.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(warnings)
|
||||
})
|
||||
}
|
||||
@@ -8,13 +8,13 @@ use helpers::*;
|
||||
fn open_empty_repo() {
|
||||
let tmpdir = init_tmpdir();
|
||||
assert!(matches!(
|
||||
open_repo(tmpdir.path(), true),
|
||||
RepoHandle::open(tmpdir.path(), true),
|
||||
Err(RepoError {
|
||||
kind: RepoErrorKind::NotFound
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
open_repo(tmpdir.path(), false),
|
||||
RepoHandle::open(tmpdir.path(), false),
|
||||
Err(RepoError {
|
||||
kind: RepoErrorKind::NotFound
|
||||
})
|
||||
@@ -25,7 +25,7 @@ fn open_empty_repo() {
|
||||
#[test]
|
||||
fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let tmpdir = init_tmpdir();
|
||||
let repo = init_repo(tmpdir.path(), false)?;
|
||||
let repo = RepoHandle::init(tmpdir.path(), false)?;
|
||||
assert!(!repo.is_bare());
|
||||
assert!(repo.is_empty()?);
|
||||
cleanup_tmpdir(tmpdir);
|
||||
@@ -35,7 +35,7 @@ fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
|
||||
#[test]
|
||||
fn create_repo_with_worktree() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let tmpdir = init_tmpdir();
|
||||
let repo = init_repo(tmpdir.path(), true)?;
|
||||
let repo = RepoHandle::init(tmpdir.path(), true)?;
|
||||
assert!(repo.is_bare());
|
||||
assert!(repo.is_empty()?);
|
||||
cleanup_tmpdir(tmpdir);
|
||||
|
||||
Reference in New Issue
Block a user