153 Commits

Author SHA1 Message Date
0746be904a Merge branch 'develop' 2022-07-21 20:05:57 +02:00
53c2ee404c Release v0.7.6 2022-07-21 20:05:57 +02:00
bd694c3f7d just: Add pushall target for easier releases 2022-07-21 20:05:22 +02:00
95e9fcbffe Cargo.lock: Updating pin-project v1.0.10 -> v1.0.11 2022-07-21 19:49:53 +02:00
98665a3231 Cargo.lock: Updating openssl-sys v0.9.74 -> v0.9.75 2022-07-21 19:49:48 +02:00
7a51ad135f Cargo.lock: Updating rustversion v1.0.7 -> v1.0.8 2022-07-21 19:49:46 +02:00
e386935bc7 Cargo.lock: Updating bytes v1.1.0 -> v1.2.0 2022-07-21 19:49:44 +02:00
c62562e6f0 dependencies: Update serde_yaml to 0.8.26 2022-07-21 19:49:43 +02:00
00e37996b7 dependencies: Update regex to 1.6.0 2022-07-21 19:49:43 +02:00
a7e2c61984 dependencies: Update clap to 3.2.14 2022-07-21 19:49:43 +02:00
58919b2d58 dependencies: Update serde to 1.0.140 2022-07-21 19:49:42 +02:00
dd36eb886f Merge pull request #42 from hakoerber/dependabot/cargo/openssl-src-111.22.01.1.1q
build(deps): bump openssl-src from 111.21.0+1.1.1p to 111.22.0+1.1.1q
2022-07-07 08:01:05 +02:00
dependabot[bot]
d2e01db0ae build(deps): bump openssl-src from 111.21.0+1.1.1p to 111.22.0+1.1.1q
Bumps [openssl-src](https://github.com/alexcrichton/openssl-src-rs) from 111.21.0+1.1.1p to 111.22.0+1.1.1q.
- [Release notes](https://github.com/alexcrichton/openssl-src-rs/releases)
- [Commits](https://github.com/alexcrichton/openssl-src-rs/commits)

---
updated-dependencies:
- dependency-name: openssl-src
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-07 05:55:19 +00:00
bfd7b01ea4 Use en_US for spelling 2022-06-30 20:33:51 +02:00
da7a499da0 Merge branch 'develop' 2022-06-30 20:26:24 +02:00
64965c32dd Release v0.7.5 2022-06-30 20:26:24 +02:00
3207bdfdfb Add wait helper to Justfile 2022-06-30 20:02:36 +02:00
d8dd604174 Use safer method to remove empty directory 2022-06-30 19:59:46 +02:00
7ca9459675 Update release script to not run "just check" 2022-06-30 19:58:04 +02:00
989b0cdcce e2e: Refactor worktree delete removal tests 2022-06-30 19:56:29 +02:00
64d8397092 Remove debug output 2022-06-30 19:56:22 +02:00
a1b054a672 e2e: Fix method name 2022-06-30 19:56:22 +02:00
193c96c5aa e2e: Check for stdout on "worktree delete" 2022-06-30 19:56:22 +02:00
ee973432be Update documentation 2022-06-30 19:34:07 +02:00
38d0252101 dependencies: Update clap to 3.2.8 2022-06-30 19:17:19 +02:00
280048264e Cargo.lock: Updating smallvec v1.8.1 -> v1.9.0 2022-06-30 19:08:03 +02:00
129111273d Add Justfile target for release 2022-06-30 19:08:03 +02:00
d62a19d741 Do not update dependencies on each release 2022-06-30 19:08:03 +02:00
e34a6243c0 Add pretection against accidential 1.0 release 2022-06-30 19:08:03 +02:00
4464bb607b Fix usage output of release.sh 2022-06-30 19:08:03 +02:00
48fa888f9b Print each unmanaged repo only once 2022-06-30 19:08:03 +02:00
b4eafd0b41 Merge branch 'develop' 2022-06-29 23:58:31 +02:00
fa83063c61 Release v0.7.4 2022-06-29 23:58:31 +02:00
7d8fbb844e Properly handle deletion of nested worktrees 2022-06-29 23:40:23 +02:00
494c6ecb3e Cargo.lock: Updating linked-hash-map v0.5.4 -> v0.5.6 2022-06-29 23:36:50 +02:00
91a37cb12d Cargo.lock: Updating smallvec v1.8.0 -> v1.8.1 2022-06-29 23:36:46 +02:00
4e21a3daad dependencies: Update serde_json to 1.0.82 2022-06-29 23:34:55 +02:00
0e9c8d0c01 dependencies: Update clap to 3.2.7 2022-06-29 23:34:55 +02:00
512de5e187 e2e: Reduce number of tests by removing redundant ones 2022-06-29 22:47:04 +02:00
f027191896 Update worktree handling
That's a big one, see the module-level comment for details.
2022-06-23 19:21:05 +02:00
ee44fa40fd Add method to get owned commit of branch 2022-06-23 19:21:05 +02:00
e78dcf471a Print warning when giving --track and --no-track 2022-06-23 19:21:05 +02:00
056480f65a e2e: Update test for worktree adding 2022-06-23 19:21:05 +02:00
3eabc0e8f8 e2e: Update test for invalid remote name 2022-06-23 19:21:05 +02:00
d7ab3c4d6b e2e: Remove unnecessary output 2022-06-23 19:21:05 +02:00
09ce9f043e e2e: Add test case for invalid tracks 2022-06-23 19:21:05 +02:00
eac22148c5 e2e: Move invalid subdirectory test 2022-06-23 19:21:05 +02:00
92ec2e1a2d e2e: Test worktree names with whitespace 2022-06-23 19:21:05 +02:00
88961e1c6b e2e: Add caching to git repositories
It's very expensive to create new repositories from scratch. To avoid
this, a new repo & remotes are only created if necessary (depending on a
cache key given on request). If not created, they are simply copied from
a stored, clean repository / remote.
2022-06-23 19:21:05 +02:00
8c384741b3 e2e: Fix warning about default branch name 2022-06-23 19:00:22 +02:00
2053512559 e2e: Print stdout/stderr on error 2022-06-23 18:58:13 +02:00
ad7ef9277e e2e: Use pipefail for test scripts 2022-06-23 18:57:58 +02:00
95da48b5e6 e2e: Don't install recommended packages in docker 2022-06-23 18:56:35 +02:00
664cfb8965 e2e: Exit on first test error 2022-06-23 18:55:39 +02:00
ba4240720c Use static binary for e2e tests 2022-06-23 18:55:19 +02:00
ec04618a73 Use release builds for e2e tests 2022-06-23 18:54:49 +02:00
6dc298146a Cargo.lock: Updating openssl-src v111.20.0+1.1.1o -> v111.21.0+1.1.1p 2022-06-23 18:47:40 +02:00
09606cfc27 Cargo.lock: Updating crossbeam-utils v0.8.9 -> v0.8.10 2022-06-23 18:47:32 +02:00
465f877d6a Cargo.lock: Updating mio v0.8.3 -> v0.8.4 2022-06-23 18:47:26 +02:00
763e014b44 dependencies: Update clap to 3.2.6 2022-06-23 18:47:21 +02:00
474e0b60f9 Cargo.lock: Updating crossbeam-utils v0.8.8 -> v0.8.9 2022-06-17 02:25:41 +02:00
10af4d7448 Cargo.lock: Updating strum_macros v0.24.1 -> v0.24.0 2022-06-17 02:25:39 +02:00
94bfe971b3 Add FUNDING.yml 2022-06-17 02:24:15 +02:00
b77c442f56 Forbid unsafe code 2022-06-17 02:24:15 +02:00
a3f9c9fda1 e2e: Remove redundant test 2022-06-17 02:24:15 +02:00
2a0a591194 e2e: Add test for invalid worktree names 2022-06-17 02:24:15 +02:00
23526ae62b e2e: Update tests for worktree subdirectory handling 2022-06-17 02:24:15 +02:00
addff12c17 Run e2e tests again dynamically linked dev binary
This makes the build much faster.
2022-06-17 01:50:01 +02:00
c56765ce26 Match branches with worktrees always, even with slashes 2022-06-17 01:50:01 +02:00
d18c49982e Merge branch 'develop' 2022-06-16 00:55:13 +02:00
58db521b5b Release v0.7.3 2022-06-16 00:55:13 +02:00
c21fb5813b just: Remove redunant commands from check target 2022-06-16 00:39:57 +02:00
33a5a1a262 Add short doc snipper about "just check" 2022-06-16 00:39:43 +02:00
df8e69bce2 Enable autoformatting for shell scripts 2022-06-16 00:39:31 +02:00
58fdcfba9f Enable linting for shell scripts 2022-06-16 00:32:16 +02:00
27ef86c1b4 forge: Use "origin" as the default remote name
Close #33
2022-06-15 20:49:15 +02:00
9fc34e6989 just: Add clean target 2022-06-15 20:39:54 +02:00
4b79b6dd1d just: Update targets for static builds 2022-06-15 20:39:54 +02:00
d0cbc2f985 forge: Add option to specify remote name
Close #32
2022-06-15 20:39:54 +02:00
d53e28668b Cargo.lock: Updating http v0.2.7 -> v0.2.8 2022-06-15 20:39:54 +02:00
0b8896d11d Cargo.lock: Updating getrandom v0.2.6 -> v0.2.7 2022-06-15 20:39:54 +02:00
8c0c3ad169 dependencies: Update clap to 3.2.5 2022-06-15 20:39:54 +02:00
aebed5639d Add Max to contributors 2022-06-14 09:37:51 +02:00
4514de9ff5 Add release script 2022-06-14 00:35:03 +02:00
31b9757ef3 Merge branch 'develop' 2022-06-14 00:32:08 +02:00
defb3d1b7d Release v0.7.2 2022-06-14 00:32:08 +02:00
e6b654e990 Cargo.lock: Updating libz-sys v1.1.6 -> v1.1.8 2022-06-14 00:15:15 +02:00
29ddc647e3 dependencies: Update comfy-table to 6.0.0 2022-06-14 00:15:15 +02:00
67c3e40108 just: Update check target to be pre-commit ready 2022-06-14 00:15:15 +02:00
7363ed48b4 Add clippy suggestions 2022-06-14 00:15:15 +02:00
96943c1483 Use new cargo fmt 2022-06-14 00:15:15 +02:00
9f7195282f Enable output in rust unit tests 2022-06-14 00:15:15 +02:00
30480fb568 Update handling of branches on worktree setup 2022-06-14 00:15:15 +02:00
c3aaea3332 Quote branch name on output 2022-06-14 00:15:15 +02:00
fad6f71876 Improve default branch guessing 2022-06-14 00:15:15 +02:00
73158e3d47 Print ok-ish stuff to stdout 2022-06-14 00:15:15 +02:00
6f4ae88260 Add some comments about repo syncing 2022-06-14 00:15:15 +02:00
a8f8803a92 Do not fail on empty clone target 2022-06-14 00:15:15 +02:00
581a513ebd Initialize local branches on clone 2022-06-14 00:15:15 +02:00
f1e212ead9 Add function to get all remote branches 2022-06-14 00:15:15 +02:00
bc3001a4e6 Add function to get basename of branch 2022-06-14 00:15:15 +02:00
c4fd1d0452 Refactor default_branch() for readability 2022-06-14 00:15:15 +02:00
1a65a163a1 Use opaque type for auth token
So we cannot accidentially output it, as it does not implement
`Display`.
2022-06-14 00:15:15 +02:00
4f68a563c6 providers: Use references for field access 2022-06-14 00:15:15 +02:00
e04e8ceeeb Use opaque type for auth token
So we cannot accidentially output it, as it does not implement
`Display`.
2022-06-14 00:15:15 +02:00
Max Volk
b2542b341e Reword some of the documentation and spelling fixes 2022-06-14 00:15:15 +02:00
d402c1f8ce Remove accidentially added file 2022-05-28 22:06:52 +02:00
e75aead3a8 Release v0.7.1 2022-05-27 23:37:54 +02:00
dca2b3c9b4 Justfile: Add build targets 2022-05-27 23:37:54 +02:00
a71711978e Make sure we do not expose secrets in output
This is using the RFC-8959 URI scheme to detect secrets. Thanks
hackernews for the idea ;)
2022-05-27 23:37:54 +02:00
90d188e01e Back to pure docker for testing 2022-05-27 23:37:54 +02:00
2e6166e807 Link binary statically with musl 2022-05-27 23:37:54 +02:00
8aaaa55d45 gitlab: Add alternate error field in JSON response 2022-05-27 23:37:54 +02:00
df39bb3076 gitlab: Fix detection of private repositories 2022-05-27 23:37:54 +02:00
bc3d4e1c49 Properly escape URL parameters 2022-05-27 23:37:54 +02:00
32eb4676ee Restructure into smaller modules 2022-05-27 23:37:54 +02:00
5d7480f7a4 Merge branch 'develop' 2022-05-26 19:13:02 +02:00
2d34ba1bd7 Fix forge documentation 2022-05-26 19:11:19 +02:00
5b78c3ba9e Release v0.7.0 2022-05-26 19:09:27 +02:00
95cffc5f0e dependencies: Update isahc to 1.7.2 2022-05-26 19:07:17 +02:00
4841920c64 dependencies: Update serde_json to 1.0.81 2022-05-26 19:07:16 +02:00
c439595d92 Justfile: Add target to lint 2022-05-26 18:57:31 +02:00
62c1e430b2 Derive Eq when deriving PartialEq
There is a clippy lint for this.
2022-05-26 18:57:31 +02:00
1212917fae Add unit tests for Repo::fullname() 2022-05-26 18:57:31 +02:00
f41b9b1684 Add pycache to gitignore 2022-05-26 18:57:31 +02:00
b17f4d68ef Fix handling of unmanaged repositories
Before, there were warnings in case of nested trees.
2022-05-26 18:57:31 +02:00
b8c552fb62 Give repos a namespace to allow subdirectories 2022-05-26 18:57:31 +02:00
f2d2482476 e2e: Add tests for subdirectory checkouts 2022-05-26 18:57:31 +02:00
6ef759a14e Separate config structs from internal structs 2022-05-26 18:57:31 +02:00
10e02c20a1 e2e: Add tests for nested repository checkouts 2022-05-26 18:57:31 +02:00
433dc090e0 Prefix shell commands with dollar sign 2022-05-26 18:57:31 +02:00
35e7c34d11 Do not panic when finding unmanaged worktrees fails 2022-05-26 18:57:31 +02:00
50a0f4d766 Fail properly when default branch cannot be detected 2022-05-26 18:57:31 +02:00
1db3eadd4c Fix formatting 2022-05-26 18:57:14 +02:00
af45b13612 Justfile: Add target for formatting 2022-05-26 18:53:12 +02:00
c994c90247 Justfile: Remove Cargo.lock check 2022-05-26 18:53:12 +02:00
3e8aad2221 Format cargo update script with black 2022-05-26 18:53:12 +02:00
127dd0535e Normalize paths when printing configuration 2022-05-26 18:53:12 +02:00
664d44eddc Only initialize worktrees for actually cloned repos 2022-05-26 18:53:12 +02:00
ad206297d8 e2e: Test sync twice to verify no changes 2022-05-26 18:53:12 +02:00
f2f1d5bcaf Fix worktree initialization 2022-05-26 18:53:12 +02:00
881a33dc96 e2e: Add tests for worktree initialization 2022-05-26 18:53:12 +02:00
38c66cad62 Add git forge integration 2022-05-26 17:55:07 +02:00
7ad51ccb47 Cargo.lock: Updating ryu v1.0.9 -> v1.0.10 2022-05-26 17:26:28 +02:00
dd65f2cd81 Cargo.lock: Updating once_cell v1.10.0 -> v1.12.0 2022-05-26 17:26:28 +02:00
f01568a695 Cargo.lock: Updating mio v0.8.2 -> v0.8.3 2022-05-26 17:26:28 +02:00
be085e9b0f dependencies: Update regex to 1.5.6 2022-05-26 17:26:28 +02:00
3557dd2686 dependencies: Update clap to 3.1.18 2022-05-26 17:26:28 +02:00
908094f48b dependencies: Update git2 to 0.14.4 2022-05-26 17:26:28 +02:00
c3c1c98913 Run cargo fmt 2022-05-10 18:26:06 +02:00
e940ab69fb Accept clippy suggestions 2022-05-10 18:25:45 +02:00
1cf4e85014 Fix non-worktree directory detection for status 2022-05-10 18:24:29 +02:00
14c95f2704 Fix worktree creation handling 2022-05-10 17:54:03 +02:00
75 changed files with 8634 additions and 1904 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
github: hakoerber

View File

@@ -1,49 +1,3 @@
# Contributing
GRM is still in very early development. I started GRM mainly to scratch my own
itches (and am heavily dogfooding it). If you have a new use case for GRM, go
for it!
The branching strategy is a simplified
[git-flow](https://nvie.com/posts/a-successful-git-branching-model/).
* `master` is the "production" branch. Each commit is a new release.
* `develop` is the branch where new stuff is coming in.
* feature branches branch off of `develop` and merge back into it.
So to contribute, just fork the repo and create a pull request against
`develop`. If you plan bigger changes, please consider opening an issue first,
so we can discuss it.
If you want, add yourself to the `CONTRIBUTORS` file in your pull request.
## Code formatting
For Rust, just use `cargo fmt`. For Python, use
[black](https://github.com/psf/black). I'd rather not spend any effort in
configuring the formatters (not possible for black anyway).
## Tooling
GRM uses [`just`](https://github.com/casey/just) as a command runner. See
[here](https://github.com/casey/just#installation) for installation
instructions (it's most likely just a simple `cargo install just`).
## Testing
There are two distinct test suites: One for unit test (`just test-unit`) and
integration tests (`just test-integration`) that is part of the rust crate, and
a separate e2e test suite in python (`just test-e2e`).
To run all tests, run `just test`.
When contributing, consider whether it makes sense to add tests that to prevent
regressions in the future. When fixing bugs, it makes sense to add tests that
expose the wrong behaviour beforehand.
## Documentation
The documentation lives in `docs` and uses
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
features here!
Check out [the developer
documentation](https://hakoerber.github.io/git-repo-manager/developing.html) it
you want to contribute!

View File

@@ -1 +1,2 @@
nonnominandus
Maximilian Volk

600
Cargo.lock generated
View File

@@ -11,6 +11,17 @@ dependencies = [
"memchr",
]
[[package]]
name = "async-channel"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319"
dependencies = [
"concurrent-queue",
"event-listener",
"futures-core",
]
[[package]]
name = "atty"
version = "0.2.14"
@@ -34,6 +45,24 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bytes"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0b3de4a0c5e67e16066a0715723abd91edc2f9001d09c46e1dca929351e130e"
[[package]]
name = "cache-padded"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c"
[[package]]
name = "castaway"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6"
[[package]]
name = "cc"
version = "1.0.73"
@@ -51,16 +80,16 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "3.1.17"
version = "3.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47582c09be7c8b32c0ab3a6181825ababb713fde6fff20fc573a3870dd45c6a0"
checksum = "54635806b078b7925d6e36810b1755f2a4b5b4d57560432c1ecf60bcbe10602b"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"clap_lex",
"indexmap",
"lazy_static",
"once_cell",
"strsim",
"termcolor",
"textwrap",
@@ -68,11 +97,11 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "3.1.7"
version = "3.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3aab4734e083b809aaf5794e14e756d1c798d2c69c7f7de7a09a2f5214993c1"
checksum = "759bf187376e1afa7b85b959e6a664a3e7a95203415dba952ad19139e798f902"
dependencies = [
"heck 0.4.0",
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
@@ -81,18 +110,18 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.2.0"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213"
checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
dependencies = [
"os_str_bytes",
]
[[package]]
name = "comfy-table"
version = "5.0.1"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b103d85ca6e209388771bfb7aa6b68a7aeec4afbf6f0a0264bfbf50360e5212e"
checksum = "121d8a5b0346092c18a4b2fd6f620d7a06f0eb7ac0a45860939a0884bc579c56"
dependencies = [
"crossterm",
"strum",
@@ -100,6 +129,15 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "concurrent-queue"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3"
dependencies = [
"cache-padded",
]
[[package]]
name = "console"
version = "0.15.0"
@@ -115,6 +153,16 @@ dependencies = [
"winapi",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "crossterm"
version = "0.23.2"
@@ -140,6 +188,37 @@ dependencies = [
"winapi",
]
[[package]]
name = "curl"
version = "0.4.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d855aeef205b43f65a5001e0997d81f8efca7badad4fad7d897aa7f0d0651f"
dependencies = [
"curl-sys",
"libc",
"openssl-probe",
"openssl-sys",
"schannel",
"socket2",
"winapi",
]
[[package]]
name = "curl-sys"
version = "0.4.55+curl-7.83.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23734ec77368ec583c2e61dd3f0b0e5c98b93abe6d2a004ca06b91dd7e3e2762"
dependencies = [
"cc",
"libc",
"libnghttp2-sys",
"libz-sys",
"openssl-sys",
"pkg-config",
"vcpkg",
"winapi",
]
[[package]]
name = "dirs-next"
version = "2.0.0"
@@ -167,6 +246,36 @@ version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "encoding_rs"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b"
dependencies = [
"cfg-if",
]
[[package]]
name = "event-listener"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71"
[[package]]
name = "fastrand"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
dependencies = [
"instant",
]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
version = "1.0.1"
@@ -184,37 +293,68 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
[[package]]
name = "getrandom"
version = "0.2.6"
name = "futures-core"
version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3"
[[package]]
name = "futures-io"
version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b"
[[package]]
name = "futures-lite"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48"
dependencies = [
"fastrand",
"futures-core",
"futures-io",
"memchr",
"parking",
"pin-project-lite",
"waker-fn",
]
[[package]]
name = "getrandom"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6"
dependencies = [
"cfg-if",
"libc",
"wasi 0.10.2+wasi-snapshot-preview1",
"wasi",
]
[[package]]
name = "git-repo-manager"
version = "0.6.2"
version = "0.7.6"
dependencies = [
"clap",
"comfy-table",
"console",
"git2",
"isahc",
"parse_link_header",
"regex",
"serde",
"serde_json",
"serde_yaml",
"shellexpand",
"tempdir",
"toml",
"url-escape",
]
[[package]]
name = "git2"
version = "0.14.3"
version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e77a14ffc6ba4ad5188d6cf428894c4fcfda725326b37558f35bb677e712cec"
checksum = "d0155506aab710a86160ddb504a480d2964d7ab5b9e62419be69e0032bc5931c"
dependencies = [
"bitflags",
"libc",
@@ -227,18 +367,9 @@ dependencies = [
[[package]]
name = "hashbrown"
version = "0.11.2"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
"unicode-segmentation",
]
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "heck"
@@ -255,6 +386,17 @@ dependencies = [
"libc",
]
[[package]]
name = "http"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "idna"
version = "0.2.3"
@@ -268,14 +410,58 @@ dependencies = [
[[package]]
name = "indexmap"
version = "1.8.1"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "instant"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
]
[[package]]
name = "isahc"
version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9"
dependencies = [
"async-channel",
"castaway",
"crossbeam-utils",
"curl",
"curl-sys",
"encoding_rs",
"event-listener",
"futures-lite",
"http",
"log",
"mime",
"once_cell",
"polling",
"serde",
"serde_json",
"slab",
"sluice",
"tracing",
"tracing-futures",
"url",
"waker-fn",
]
[[package]]
name = "itoa"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
[[package]]
name = "jobserver"
version = "0.1.24"
@@ -293,15 +479,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.125"
version = "0.2.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
[[package]]
name = "libgit2-sys"
version = "0.13.3+1.4.2"
version = "0.13.4+1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c24d36c3ac9b9996a2418d6bf428cc0bc5d1a814a84303fc60986088c5ed60de"
checksum = "d0fa6563431ede25f5cc7f6d803c6afbc1c5d3ad3d4925d12c882bf2b526f5d1"
dependencies = [
"cc",
"libc",
@@ -311,6 +497,16 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "libnghttp2-sys"
version = "0.1.7+1.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "libssh2-sys"
version = "0.2.23"
@@ -327,9 +523,9 @@ dependencies = [
[[package]]
name = "libz-sys"
version = "1.1.6"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e7e15d7610cce1d9752e137625f14e61a28cd45929b6e12e47b50fe154ee2e"
checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
dependencies = [
"cc",
"libc",
@@ -339,9 +535,9 @@ dependencies = [
[[package]]
name = "linked-hash-map"
version = "0.5.4"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "lock_api"
@@ -375,42 +571,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "mio"
version = "0.8.2"
name = "mime"
version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52da4364ffb0e4fe33a9841a98a3f3014fb964045ce4f7a45a398243c8d6b0c9"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
[[package]]
name = "mio"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
dependencies = [
"libc",
"log",
"miow",
"ntapi",
"wasi 0.11.0+wasi-snapshot-preview1",
"winapi",
]
[[package]]
name = "miow"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21"
dependencies = [
"winapi",
]
[[package]]
name = "ntapi"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f"
dependencies = [
"winapi",
"wasi",
"windows-sys",
]
[[package]]
name = "once_cell"
version = "1.10.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
[[package]]
name = "openssl-probe"
@@ -419,29 +601,45 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
version = "0.9.73"
name = "openssl-src"
version = "111.22.0+1.1.1q"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5fd19fb3e0a8191c1e34935718976a3e70c112ab9a24af6d7cadccd9d90bc0"
checksum = "8f31f0d509d1c1ae9cada2f9539ff8f37933831fd5098879e482aa687d659853"
dependencies = [
"cc",
]
[[package]]
name = "openssl-sys"
version = "0.9.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f"
dependencies = [
"autocfg",
"cc",
"libc",
"openssl-src",
"pkg-config",
"vcpkg",
]
[[package]]
name = "os_str_bytes"
version = "6.0.0"
version = "6.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
checksum = "648001efe5d5c0102d8cea768e348da85d90af8ba91f0bea908f157951493cd4"
[[package]]
name = "parking"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
[[package]]
name = "parking_lot"
version = "0.12.0"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
"parking_lot_core",
@@ -460,18 +658,68 @@ dependencies = [
"windows-sys",
]
[[package]]
name = "parse_link_header"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40728c9c01de984c45f49385ab054fdc31cd3322658a6934347887e72cb48df9"
dependencies = [
"http",
"lazy_static",
"regex",
]
[[package]]
name = "percent-encoding"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "pin-project"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-lite"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]]
name = "pkg-config"
version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
[[package]]
name = "polling"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259"
dependencies = [
"cfg-if",
"libc",
"log",
"wepoll-ffi",
"winapi",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@@ -498,18 +746,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.38"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9027b48e9d4c9175fa2218adf3557f91c1137021739951d4932f5f8268ac48aa"
checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
dependencies = [
"unicode-xid",
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.18"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
dependencies = [
"proc-macro2",
]
@@ -573,9 +821,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.5.5"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
dependencies = [
"aho-corasick",
"memchr",
@@ -584,9 +832,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.6.25"
version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
[[package]]
name = "remove_dir_all"
@@ -599,15 +847,25 @@ dependencies = [
[[package]]
name = "rustversion"
version = "1.0.6"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
checksum = "24c8ad4f0c00e1eb5bc7614d236a7f1300e3dbd76b68cac8e06fb00b015ad8d8"
[[package]]
name = "ryu"
version = "1.0.9"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
[[package]]
name = "schannel"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2"
dependencies = [
"lazy_static",
"windows-sys",
]
[[package]]
name = "scopeguard"
@@ -617,18 +875,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
version = "1.0.137"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
checksum = "fc855a42c7967b7c369eb5860f7164ef1f6f81c20c7cc1141f2a604e18723b03"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.137"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
checksum = "6f2122636b9fe3b81f1cb25099fcf2d3f542cdb1d45940d56c713158884a05da"
dependencies = [
"proc-macro2",
"quote",
@@ -636,10 +894,21 @@ dependencies = [
]
[[package]]
name = "serde_yaml"
version = "0.8.24"
name = "serde_json"
version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "707d15895415db6628332b737c838b88c598522e4dc70647e59b72312924aebc"
checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_yaml"
version = "0.8.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b"
dependencies = [
"indexmap",
"ryu",
@@ -658,9 +927,9 @@ dependencies = [
[[package]]
name = "signal-hook"
version = "0.3.13"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "647c97df271007dcea485bb74ffdb57f2e683f1306c854f468a0c244badabf2d"
checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d"
dependencies = [
"libc",
"signal-hook-registry",
@@ -687,10 +956,40 @@ dependencies = [
]
[[package]]
name = "smallvec"
version = "1.8.0"
name = "slab"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef"
dependencies = [
"autocfg",
]
[[package]]
name = "sluice"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5"
dependencies = [
"async-channel",
"futures-core",
"futures-io",
]
[[package]]
name = "smallvec"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "socket2"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "strsim"
@@ -700,17 +999,17 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strum"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cae14b91c7d11c9a851d3fbc80a963198998c2a64eec840477fa92d8ce9b70bb"
checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f"
[[package]]
name = "strum_macros"
version = "0.23.1"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb0dc7ee9c15cea6199cde9a127fa16a4c5819af85395457ad72d68edc85a38"
checksum = "4faebde00e8ff94316c01800f9054fd2ba77d30d9e922541913051d1d978918b"
dependencies = [
"heck 0.3.3",
"heck",
"proc-macro2",
"quote",
"rustversion",
@@ -719,13 +1018,13 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.92"
version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ff7c592601f11445996a06f8ad0c27f094a58857c2f89e97974ab9235b92c52"
checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
"unicode-ident",
]
[[package]]
@@ -807,6 +1106,49 @@ dependencies = [
"serde",
]
[[package]]
name = "tracing"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160"
dependencies = [
"cfg-if",
"log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tracing-core"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7"
dependencies = [
"once_cell",
]
[[package]]
name = "tracing-futures"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2"
dependencies = [
"pin-project",
"tracing",
]
[[package]]
name = "unicode-bidi"
version = "0.3.8"
@@ -814,32 +1156,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
[[package]]
name = "unicode-normalization"
version = "0.1.19"
name = "unicode-ident"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"
checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7"
[[package]]
name = "unicode-normalization"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
[[package]]
name = "unicode-width"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
[[package]]
name = "unicode-xid"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
[[package]]
name = "url"
version = "2.2.2"
@@ -852,6 +1188,15 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "url-escape"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44e0ce4d1246d075ca5abec4b41d33e87a6054d08e2366b63205665e950db218"
dependencies = [
"percent-encoding",
]
[[package]]
name = "vcpkg"
version = "0.2.15"
@@ -865,10 +1210,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.10.2+wasi-snapshot-preview1"
name = "waker-fn"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
[[package]]
name = "wasi"
@@ -876,6 +1221,15 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wepoll-ffi"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb"
dependencies = [
"cc",
]
[[package]]
name = "winapi"
version = "0.3.9"

View File

@@ -1,7 +1,8 @@
[package]
name = "git-repo-manager"
version = "0.6.2"
version = "0.7.6"
edition = "2021"
authors = [
"Hannes Körber <hannes@hkoerber.de>",
]
@@ -26,6 +27,9 @@ rust-version = "1.57"
license = "GPL-3.0-only"
[profile.e2e-tests]
inherits = "dev"
[lib]
name = "grm"
path = "src/lib.rs"
@@ -40,30 +44,52 @@ path = "src/grm/main.rs"
version = "=0.5.9"
[dependencies.serde]
version = "=1.0.137"
version = "=1.0.140"
features = ["derive"]
[dependencies.git2]
version = "=0.14.3"
version = "=0.14.4"
[dependencies.shellexpand]
version = "=2.1.0"
[dependencies.clap]
version = "=3.1.17"
version = "=3.2.14"
features = ["derive", "cargo"]
[dependencies.console]
version = "=0.15.0"
[dependencies.regex]
version = "=1.5.5"
version = "=1.6.0"
[dependencies.comfy-table]
version = "=5.0.1"
version = "=6.0.0"
[dependencies.serde_yaml]
version = "=0.8.24"
version = "=0.8.26"
[dependencies.serde_json]
version = "=1.0.82"
[dependencies.isahc]
version = "=1.7.2"
default-features = false
features = ["json", "http2", "text-decoding"]
[dependencies.parse_link_header]
version = "=0.3.2"
[dependencies.url-escape]
version = "=0.1.1"
[dev-dependencies.tempdir]
version = "=0.3.7"
[features]
static-build = [
"git2/vendored-openssl",
"git2/vendored-libgit2",
"isahc/static-curl",
"isahc/static-ssl",
]

View File

@@ -1,41 +1,85 @@
check: check-cargo-lock check-pip-requirements test
cargo check
cargo fmt --check
cargo clippy --no-deps -- -Dwarnings
set positional-arguments
check-cargo-lock:
cargo update --locked
static_target := "x86_64-unknown-linux-musl"
check: fmt-check lint test
cargo check
clean:
cargo clean
git clean -f -d -X
fmt:
cargo fmt
git ls-files | grep '\.py$' | xargs black
git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --write
fmt-check:
cargo fmt --check
git ls-files | grep '\.py$' | xargs black --check
git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --diff
lint:
cargo clippy --no-deps -- -Dwarnings
git ls-files | grep '\.sh$' | xargs -L 1 shellcheck --norc
lint-fix:
cargo clippy --no-deps --fix
release:
build-release:
cargo build --release
build-release-static:
cargo build --release --target {{static_target}} --features=static-build
pushall:
for r in $(git remote) ; do \
for branch in develop master ; do \
git push $r $branch ; \
done ; \
done
release-patch:
./release.sh patch
test-binary:
env \
GITHUB_API_BASEURL=http://rest:5000/github \
GITLAB_API_BASEURL=http://rest:5000/gitlab \
cargo build --profile e2e-tests --target {{static_target}} --features=static-build
install:
cargo install --path .
install-static:
cargo install --target {{static_target}} --features=static-build --path .
build:
cargo build
build-static:
cargo build --target {{static_target}} --features=static-build
test: test-unit test-integration test-e2e
test-unit:
cargo test --lib --bins
test-unit +tests="":
cargo test --lib --bins -- --show-output {{tests}}
test-integration:
cargo test --test "*"
e2e-venv:
test-e2e +tests=".": test-binary
cd ./e2e_tests \
&& python3 -m venv venv \
&& . ./venv/bin/activate \
&& pip --disable-pip-version-check install -r ./requirements.txt >/dev/null
&& docker-compose rm --stop -f \
&& docker-compose build \
&& docker-compose run \
--rm \
-v $PWD/../target/x86_64-unknown-linux-musl/e2e-tests/grm:/grm \
pytest \
"GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest --exitfirst -p no:cacheprovider --color=yes "$@"" \
&& docker-compose rm --stop -f
test-e2e +tests=".": e2e-venv release
cd ./e2e_tests \
&& . ./venv/bin/activate \
&& TMPDIR=/dev/shm python -m pytest --color=yes {{tests}}
update-dependencies: update-cargo-dependencies update-pip-requirements
update-dependencies: update-cargo-dependencies
update-cargo-dependencies:
@cd ./depcheck \
@@ -44,14 +88,5 @@ update-cargo-dependencies:
&& pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \
&& ./update-cargo-dependencies.py
update-pip-requirements: e2e-venv
@cd ./e2e_tests \
&& ./update_requirementstxt.sh
check-pip-requirements: e2e-venv
@cd ./e2e_tests \
&& . ./venv/bin/activate \
&& pip list --outdated | grep -q '.' && exit 1 || exit 0
clean:
cargo clean
wait:
read -p "[ENTER] to continue "

View File

@@ -62,7 +62,10 @@ for tier in ["dependencies", "dev-dependencies"]:
for version_entry in open(info_file, "r").readlines():
version = semver.VersionInfo.parse(json.loads(version_entry)["vers"])
if latest_version is None or version > latest_version:
if current_version.prerelease is None and version.prerelease is not None:
if (
current_version.prerelease is None
and version.prerelease is not None
):
# skip prereleases, except when we are on a prerelease already
print(f"{name}: Skipping prerelease version {version}")
continue
@@ -91,7 +94,15 @@ for tier in ["dependencies", "dev-dependencies"]:
try:
cmd = subprocess.run(
["cargo", "update", "-Z", "no-index-update", "--aggressive", "--package", name],
[
"cargo",
"update",
"-Z",
"no-index-update",
"--aggressive",
"--package",
name,
],
check=True,
capture_output=True,
text=True,
@@ -103,9 +114,16 @@ for tier in ["dependencies", "dev-dependencies"]:
message = f"dependencies: Update {name} to {latest_version}"
subprocess.run(
["git", "commit", "--message", message, "../Cargo.toml", "../Cargo.lock"],
[
"git",
"commit",
"--message",
message,
"../Cargo.toml",
"../Cargo.lock",
],
check=True,
capture_output=True
capture_output=True,
)
@@ -114,11 +132,19 @@ for tier in ["dependencies", "dev-dependencies"]:
while True:
with open("../Cargo.lock", "r") as f:
cargo_lock = tomlkit.parse(f.read())
for package in cargo_lock['package']:
for package in cargo_lock["package"]:
spec = f"{package['name']}:{package['version']}"
try:
cmd = subprocess.run(
["cargo", "update", "-Z", "no-index-update", "--aggressive", "--package", spec],
[
"cargo",
"update",
"-Z",
"no-index-update",
"--aggressive",
"--package",
spec,
],
check=True,
capture_output=True,
text=True,
@@ -134,7 +160,7 @@ while True:
cmd = subprocess.run(
["git", "commit", "--message", message, "../Cargo.lock"],
check=True,
capture_output=True
capture_output=True,
)
break
else:

View File

@@ -7,3 +7,8 @@ title = "Git Repo Manager"
[output.html]
mathjax-support = true
# [output.linkcheck]
# follow-web-links = true
# traverse-parent-directories = false
# warning-policy = "error"

View File

@@ -1,8 +1,20 @@
# Summary
- [Overview](./overview.md)
- [Getting started](./getting_started.md)
- [Repository trees](./repos.md)
[Overview](./overview.md)
- [Installation](./installation.md)
- [Tutorial](./tutorial.md)
- [Managing Repositories](./repos.md)
- [Local Configuration](./local_configuration.md)
- [Forge Integrations](./forge_integration.md)
- [Git Worktrees](./worktrees.md)
- [Working with Worktrees](./worktree_working.md)
- [Worktrees and Remotes](./worktree_remotes.md)
- [Behavior Details](./worktree_behavior.md)
- [FAQ](./faq.md)
- [Contributing](./contributing.md)
- [Developer Documentation](./developing.md)
- [Testing](./testing.md)
- [Dependency updates](./dependency_updates.md)
- [Releases](./releases.md)
- [Formatting & Style](./formatting_and_style.md)
- [The Docs Themselves](./documentation.md)

View File

@@ -1 +0,0 @@
../../CONTRIBUTING.md

View File

@@ -0,0 +1,10 @@
# Dependency updates
Rust has the same problem as the node ecosystem, just a few magnitudes smaller:
Dependency sprawl. GRM has a dozen direct dependencies, but over 150 transitive
ones.
To keep them up to date, there is a script:
`depcheck/update-cargo-dependencies.py`. It updates direct dependencies to the
latest stable version and updates transitive dependencies where possible. To run
it, use `just update-dependencies`, which will create commits for each update.

69
docs/src/developing.md Normal file
View File

@@ -0,0 +1,69 @@
# Overview
GRM is still in very early development. I started GRM mainly to scratch my own
itches (and am heavily dogfooding it). If you have a new use case for GRM, go
for it!
## Contributing
To contribute, just fork the repo and create a pull request against `develop`.
If you plan bigger changes, please consider opening an issue first, so we can
discuss it.
If you want, add yourself to the `CONTRIBUTORS` file in your pull request.
## Branching strategy
The branching strategy is a simplified
[git-flow](https://nvie.com/posts/a-successful-git-branching-model/).
* `master` is the "production" branch. Each commit is a new release.
* `develop` is the branch where new stuff is coming in.
* feature branches branch off of `develop` and merge back into it.
Feature branches are not required, there are also changes happening directly on
`develop`.
## Required tooling
You will need the following tools:
* Rust (obviously) (easiest via `rustup`), with the nightly toolchain
* Python3
* [`just`](https://github.com/casey/just), a command runner like `make`. See
[here](https://github.com/casey/just#installation) for installation
instructions (it's most likely just a simple `cargo install just`).
* Docker & docker-compose for the e2e tests
* `black` and `shfmt` for formatting.
* `shellcheck` for shell script linting
* `mdbook` for the documentation
Here are the tools:
| Distribution | Command |
| ------------- | --------------------------------------------------------------------------------------------------- |
| Arch Linux | `pacman -S --needed python3 rustup just docker docker-compose python-black shfmt shellcheck mdbook` |
| Ubuntu/Debian | `apt-get install --no-install-recommends python3 docker.io docker-compose black shellcheck` |
Note that you will have to install `just` and `mdbook` manually on Ubuntu (e.g.
via `cargo install just mdbook` if your rust build environment is set up
correctly). Same for `shfmt`, which may just be a `go install
mvdan.cc/sh/v3/cmd/shfmt@latest`, depending on your go build environment.
For details about rustup and the toolchains, see [the installation
section](./installation.md).
## FAQ
### Why nightly?
For now, GRM requires the nightly toolchain for two reasons:
* [`io_error_more`](https://github.com/rust-lang/rust/issues/86442) to get
better error messages on IO errors
* [`const_option_ext`](https://github.com/rust-lang/rust/issues/91930) to have
static variables read from the environment that fall back to hard coded
defaults
Honestly, both of those are not really necessary or can be handled without
nightly. It's just that I'm using nightly anyway.

11
docs/src/documentation.md Normal file
View File

@@ -0,0 +1,11 @@
# Documentation
The documentation lives in the `docs` folder and uses
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
features here!
Using [GitHub actions](https://github.com/features/actions), the documentation
on `master` is automatically published to [the project
homepage](https://hakoerber.github.io/git-repo-manager/) via GitHub pages. See
`.github/workflows/gh-pages.yml` for the configuration of GitHub Actions.

View File

@@ -1,10 +1,3 @@
# FAQ
## Why is the nightly toolchain required?
Building GRM currently requires nightly features due to the usage of
[`std::path::Path::is_symlink()`](https://doc.rust-lang.org/std/fs/struct.FileType.html#method.is_symlink).
See the [tracking issue](https://github.com/rust-lang/rust/issues/85748).
`is_symlink()` is actually available in rustc 1.57, so it will be on stable in
the near future. This would mean that GRM can be built using the stable toolchain!
Currently empty, as there are no questions that are asked frequently :D

View File

@@ -0,0 +1,209 @@
# Forge Integrations
In addition to managing repositories locally, `grm` also integrates with source
code hosting platforms. Right now, the following platforms are supported:
* [GitHub](https://github.com/)
* [GitLab](https://gitlab.com/)
Imagine you are just starting out with `grm` and want to clone all your
repositories from GitHub. This is as simple as:
```bash
$ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects
```
You will end up with your projects cloned into
`~/projects/{your_github_username}/`
## Authentication
The only currently supported authentication option is using a personal access
token.
### GitHub
See the GitHub documentation for personal access tokens:
[Link](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token).
The only required permission is the "repo" scope.
### GitLab
See the GitLab documentation for personal access tokens:
[Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html).
The required scopes are a bit weird. Actually, the following should suffice:
* `read_user` to get user information (required to get the current
authenticated user name for the `--owner` filter.
* A scope that allows reading private repositories. (`read_repository` is just
for *cloning* private repos). This unfortunately does not exist.
So currently, you'll need to select the `read_api` scope.
## Filters
By default, `grm` will sync **nothing**. This is quite boring, so you have to
tell the command what repositories to include. They are all inclusive (i.e. act
as a logical OR), so you can easily chain many filters to clone a bunch of
repositories. It's quite simple:
* `--user <USER>` syncs all repositories of that remote user
* `--group <GROUP>` syncs all repositories of that remote group/organization
* `--owner` syncs all repositories of the user that is used for authentication.
This is effectively a shortcut for `--user $YOUR_USER`
* `--access` syncs all repositories that the current user has access to
Easiest to see in an example:
```bash
$ grm repos sync remote --provider github --user torvals --owner --group zalando [...]
```
This would sync all of Torvald's repositories, all of my own repositories and
all (public) repositories in the "zalando" group.
## Strategies
There are generally three ways how you can use `grm` with forges:
### Ad-hoc cloning
This is the easiest, there are no local files involved. You just run the
command, `grm` clones the repos, that's it. If you run the command again, `grm`
will figure out the differences between local and remote repositories and
resolve them locally.
### Create a file
This is effectively `grm repos find local`, but using the forge instead of the
local file system. You will end up with a normal repository file that you can
commit to git. To update the list of repositories, just run the command again
and commit the new file.
### Define options in a file
This is a hybrid approach: You define filtering options in a file that you can
commit to source control. Effectively, you are persisting the options you gave
to `grm` on the command line with the ad-hoc approach. Similarly, `grm` will
figure out differences between local and remote and resolve them.
A file would look like this:
```toml
provider = "github"
token_command = "cat ~/.github_token"
root = "~/projects"
[filters]
owner = true
groups = [
"zalando"
]
```
The options in the file map to the command line options of the `grm repos sync
remote` command.
You'd then run the `grm repos sync` command the same way as with a list of
repositories in a configuration:
```bash
$ grm repos sync --config example.config.toml
```
You can even use that file to generate a repository list that you can feed into
`grm repos sync`:
```bash
$ grm repos find config --config example.config.toml > repos.toml
$ grm repos sync config --config repos.toml
```
## Using with self-hosted GitLab
By default, `grm` uses the default GitLab API endpoint
([https://gitlab.com](https://gitlab.com)). You can override the endpoint by
specifying the `--api-url` parameter. Like this:
```bash
$ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...]
```
## The cloning protocol
By default, `grm` will use HTTPS for public repositories and SSH otherwise. This
can be overridden with the `--force-ssh` switch.
## About the token command
To ensure maximum flexibility, `grm` has a single way to get the token it uses
to authenticate: Specify a command that returns the token via stdout. This
easily integrates with password managers like
[`pass`](https://www.passwordstore.org/).
Of course, you are also free to specify something like `echo mytoken` as the
command, as long as you are OK with the security implications (like having the
token in clear text in your shell history). It may be better to have the token
in a file instead and read it: `cat ~/.gitlab_token`.
Generally, use whatever you want. The command just has to return successfully
and return the token as the first line of stdout.
## Examples
Maybe you just want to locally clone all repos from your GitHub user?
```bash
$ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token"
```
This will clone all repositories into
`~/github_projects/{your_github_username}`.
If instead you want to clone **all** repositories you have access to (e.g. via
organizations or other users' private repos you have access to), just change the
filter a little bit:
```bash
$ grm repos sync remote --provider github --access --root ~/github_projects --token-command "pass show github_grm_access_token"
```
## Limitations
### GitHub
Unfortunately, GitHub does not have a nice API endpoint to get **private**
repositories for a certain user
([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user)
only returns public repositories).
Therefore, using `--user {user}` will only show public repositories for GitHub.
Note that this does not apply to `--access`: If you have access to another
user's private repository, it will be listed.
## Adding integrations
Adding a new integration involves writing some Rust code. Most of the logic is
generic, so you will not have to reinvent the wheel. Generally, you will need to
gather the following information:
* A list of repositories for a single user
* A list of repositories for a group (or any similar concept if applicable)
* A list of repositories for the user that the API token belongs to
* The username of the currently authenticated user
Authentication currently only works via a bearer token passed via the
`Authorization` HTTP header.
Each repo has to have the following properties:
* A name (which also acts as the identifier for diff between local and remote
repositories)
* An SSH URL to push to
* An HTTPS URL to clone and fetch from
* A flag that marks the repository as private
If you plan to implement another forge, please first open an issue so we can go
through the required setup. I'm happy to help!

View File

@@ -0,0 +1,45 @@
# Formatting & Style
## Code formatting
I'm allergic to discussions about formatting. I'd rather make the computer do it
for me.
For Rust, just use `cargo fmt`. For Python, use
[black](https://github.com/psf/black). I'd rather not spend any effort in
configuring the formatters (not possible for black anyway). For shell scripts,
use [`shfmt`](https://github.com/mvdan/sh).
To autoformat all code, use `just fmt`
## Style
Honestly, no idea about style. I'm still learning Rust, so I'm trying to find a
good style. Just try to keep it consistent when you add code.
## Linting
You can use `just lint` to run all lints.
### Rust
Clippy is the guard that prevents shitty code from getting into the code base.
When running `just check`, any clippy suggestions will make the command fail.
So make clippy happy! The easiest way:
* Commit your changes (so clippy can change safely).
* Run `cargo clippy --fix` to do the easy changes automatically.
* Run `cargo clippy` and take a look at the messages.
Until now, I had no need to override or silence any clippy suggestions.
### Shell
`shellcheck` lints all shell scripts. As they change very rarely, this is not
too important.
## Unsafe code
Any `unsafe` code is forbidden for now globally via `#![forbid(unsafe_code)]`.
I cannot think of any reason GRM may need `unsafe`. If it comes up, it needs to
be discussed.

View File

@@ -1,22 +0,0 @@
# Quickstart
## Installation
Building GRM currently requires the nightly Rust toolchain. The easiest way
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
Make sure that the nightly toolchain is installed:
```
$ rustup toolchain install nightly
```
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch master
```
If you're brave, you can also run the development build:
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
```

57
docs/src/installation.md Normal file
View File

@@ -0,0 +1,57 @@
# Installation
## Installation
Building GRM currently requires the nightly Rust toolchain. The easiest way is
using [`rustup`](https://rustup.rs/). Make sure that rustup is properly
installed.
Make sure that the nightly toolchain is installed:
```
$ rustup toolchain install nightly
```
Then, install the build dependencies:
| Distribution | Command |
| ------------- | ------------------------------------------------------------------------------ |
| Arch Linux | `pacman -S --needed gcc openssl pkg-config` |
| Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` |
Then, it's a simple command to install the latest stable version:
```bash
$ cargo +nightly install git-repo-manager
```
If you're brave, you can also run the development build:
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
```
## Static build
Note that by default, you will get a dynamically linked executable.
Alternatively, you can also build a statically linked binary. For this, you will
need `musl` and a few other build dependencies installed installed:
| Distribution | Command |
| ------------- | --------------------------------------------------------------------------- |
| Arch Linux | `pacman -S --needed gcc musl perl make` |
| Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` |
(`perl` and `make` are required for the OpenSSL build script)
The, add the musl target via `rustup`:
```
$ rustup +nightly target add x86_64-unknown-linux-musl
```
Then, use a modified build command to get a statically linked binary:
```
$ cargo +nightly install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build
```

View File

@@ -0,0 +1,83 @@
# Local Configuration
When managing multiple git repositories with GRM, you'll generally have a
configuration file containing information about all the repos you have. GRM then
makes sure that you repositories match that configuration. If they don't exist
yet, it will clone them. It will also make sure that all remotes are configured
properly.
Let's try it out:
## Get the example configuration
```bash
$ curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml
```
Then, you're ready to run the first sync. This will clone all configured
repositories and set up the remotes.
```bash
$ grm repos sync config --config example.config.toml
[] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: OK
[] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git"
[] dotfiles: Repository successfully cloned
[] dotfiles: OK
```
If you run it again, it will report no changes:
```
$ grm repos sync config -c example.config.toml
[✔] git-repo-manager: OK
[✔] dotfiles: OK
```
### Generate your own configuration
Now, if you already have a few repositories, it would be quite laborious to
write a configuration from scratch. Luckily, GRM has a way to generate a
configuration from an existing file tree:
```bash
$ grm repos find local ~/your/project/root > config.toml
```
This will detect all repositories and remotes and write them to `config.toml`.
### Show the state of your projects
```bash
$ grm repos status --config example.config.toml
╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡
│ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │
│ ┆ ┆ ┆ <origin/master> ✔ ┆ ┆ origin │
├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯
```
You can also use `status` without `--config` to check the repository you're
currently in:
```
$ cd ~/example-projects/dotfiles
$ grm repos status
╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
```
## YAML
By default, the repo configuration uses TOML. If you prefer YAML, just give it a
YAML file instead (file ending does not matter, `grm` will figure out the
format). For generating a configuration, pass `--format yaml` to `grm repo
find` which generates a YAML configuration instead of a TOML configuration.

View File

@@ -1,8 +1,8 @@
# Overview
Welcome! This is the documentation for [Git Repo
Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a
tool that helps you manage git repositories.
Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a tool
that helps you manage git repositories in a declarative way.
GRM helps you manage git repositories in a declarative way. Configure your
repositories in a TOML or YAML file, GRM does the rest. Take a look at [the
@@ -12,12 +12,12 @@ to get a feel for the way you configure your repositories. See the [repository
tree chapter](./repos.md) for details.
GRM also provides some tooling to work with single git repositories using
`git-worktree`. See [the worktree chapter](./worktree.md) for more details.
`git-worktree`. See [the worktree chapter](./worktrees.md) for more details.
## Why use GRM?
If you're working with a lot of git repositories, GRM can help you to manage them
in an easy way:
If you're working with a lot of git repositories, GRM can help you to manage
them in an easy way:
* You want to easily clone many repositories to a new machine.
* You want to change remotes for multiple repositories (e.g. because your GitLab

27
docs/src/releases.md Normal file
View File

@@ -0,0 +1,27 @@
# Releases
To make a release, make sure you are on a clean `develop` branch, sync your
remotes and then run `./release (major|minor|patch)`. It will handle a
git-flow-y release, meaning that it will perform a merge from `develop` to
`master`, create a git tag, sync all remotes and run `cargo publish`.
Make sure to run `just check` before releasing to make sure that nothing is
broken.
As GRM is still `v0.x`, there is not much consideration for backwards
compatibility. Generally, update the patch version for small stuff and the minor
version for bigger / backwards incompatible changes.
Generally, it's good to regularly release a new patch release with [updated
dependencies](./dependency_updates.md). As `./release.sh patch` is exposed as a
Justfile target (`release-patch`), it's possible to do both in one step:
```bash
$ just update-dependencies check release-patch
```
## Release notes
There are currently no release notes. Things are changing quite quickly and
there is simply no need for a record of changes (except the git history of
course).

View File

@@ -1,82 +1,13 @@
# Managing tree of git repositories
# Managing Repositories
When managing multiple git repositories with GRM, you'll generally have a
configuration file containing information about all the repos you have. GRM then
makes sure that you repositories match that config. If they don't exist yet, it
will clone them. It will also make sure that all remotes are configured properly.
GRM helps you manage a bunch of git repositories easily. There are generally two
ways to go about that:
Let's try it out:
You can either manage a list of repositories in a TOML or YAML file, and use GRM
to sync the configuration with the state of the repository.
## Get the example configuration
Or, you can pull repository information from a forge (e.g. GitHub, GitLab) and
clone the repositories.
```bash
$ curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml
```
Then, you're ready to run the first sync. This will clone all configured repositories
and set up the remotes.
```bash
$ grm repos sync --config example.config.toml
[] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: OK
[] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git"
[] dotfiles: Repository successfully cloned
[] dotfiles: OK
```
If you run it again, it will report no changes:
```
$ grm repos sync --config example.config.toml
[✔] git-repo-manager: OK
[✔] dotfiles: OK
```
### Generate your own configuration
Now, if you already have a few repositories, it would be quite laborious to write
a configuration from scratch. Luckily, GRM has a way to generate a configuration
from an existing file tree:
```bash
$ grm repos find ~/your/project/root > config.toml
```
This will detect all repositories and remotes and write them to `config.toml`.
### Show the state of your projects
```bash
$ grm repos status --config example.config.toml
╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡
│ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │
│ ┆ ┆ ┆ <origin/master> ✔ ┆ ┆ origin │
├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯
```
You can also use `status` without `--config` to check the repository you're currently
in:
```
$ cd ~/example-projects/dotfiles
$ grm repos status
╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
```
## YAML
By default, the repo configuration uses TOML. If you prefer YAML, just give it
a YAML file instead (file ending does not matter, `grm` will figure out the format
itself). For generating a configuration, pass `--format yaml` to `grm repo find`
to generate YAML instead of TOML.
There are also hybrid modes where you pull information from a forge and create a
configuration file that you can use later.

124
docs/src/testing.md Normal file
View File

@@ -0,0 +1,124 @@
# Testing
There are two distinct test suites: One for unit test (`just test-unit`) and
integration tests (`just test-integration`) that is part of the rust crate, and
a separate e2e test suite in python (`just test-e2e`).
To run all tests, run `just test`.
When contributing, consider whether it makes sense to add tests which could
prevent regressions in the future. When fixing bugs, it makes sense to add tests
that expose the wrong behavior beforehand.
The unit and integration tests are very small and only test a few self-contained
functions (like validation of certain input).
## E2E tests
The main focus of the testing setup lays on the e2e tests. Each user-facing
behavior *should* have a corresponding e2e test. These are the most important
tests, as they test functionality the user will use in the end.
The test suite is written in python and uses
[pytest](https://docs.pytest.org/en/stable/). There are helper functions that
set up temporary git repositories and remotes in a `tmpfs`.
Effectively, each tests works like this:
* Set up some prerequisites (e.g. different git repositories or configuration
files)
* Run `grm`
* Check that everything is according to expected behavior (e.g. that `grm` had
certain output and exit code, that the target repositories have certain
branches, heads and remotes, ...)
As there are many different scenarios, the tests make heavy use of the
[`@pytest.mark.parametrize`](https://docs.pytest.org/en/stable/how-to/parametrize.html#pytest-mark-parametrize)
decorator to get all permutations of input parameters (e.g. whether a
configuration exists, what a config value is set to, how the repository looks
like, ...)
Whenever you write a new test, think about the different circumstances that can
happen. What are the failure modes? What affects the behavior? Parametrize each
of these behaviors.
### Optimization
Note: You will most likely not need to read this.
Each test parameter will exponentially increase the number of tests that will be
run. As a general rule, comprehensiveness is more important than test suite
runtime (so if in doubt, better to add another parameter to catch every edge
case). But try to keep the total runtime sane. Currently, the whole `just e2e`
target runs ~8'000 tests and takes around 5 minutes on my machine, exlucding
binary and docker build time. I'd say that keeping it under 10 minutes is a good
idea.
To optimize tests, look out for two patterns: Dependency and Orthogonality
#### Dependency
If a parameter depends on another one, it makes little sense to handle them
independently. Example: You have a paramter that specifies whether a
configuration is used, and another parameter that sets a certain value in that
configuration file. It might look something like this:
```python
@pytest.mark.parametrize("use_config", [True, False])
@pytest.mark.parametrize("use_value", ["0", "1"])
def test(...):
```
This leads to 4 tests being instantiated. But there is little point in setting a
configuration value when no config is used, so the combinations `(False, "0")`
and `(False, "1")` are redundant. To remedy this, spell out the optimized
permutation manually:
```python
@pytest.mark.parametrize("config", ((True, "0"), (True, "1"), (False, None)))
def test(...):
(use_config, use_value) = config
```
This cuts down the number of tests by 25%. If you have more dependent parameters
(e.g. additional configuration values), this gets even better. Generally, this
will cut down the number of tests to
\\[ \frac{1}{o \cdot c} + \frac{1}{(o \cdot c) ^ {(n + 1)}} \\]
with \\( o \\) being the number of values of a parent parameters a parameter is
dependent on, \\( c \\) being the cardinality of the test input (so you can
assume \\( o = 1 \\) and \\( c = 2 \\) for boolean parameters), and \\( n \\)
being the number of parameters that are optimized, i.e. folded into their
dependent parameter.
As an example: Folding down two boolean parameters into one dependent parent
boolean parameter will cut down the number of tests to 62.5%!
#### Orthogonality
If different test parameters are independent of each other, there is little
point in testing their combinations. Instead, split them up into different test
functions. For boolean parameters, this will cut the number of tests in half.
So instead of this:
```python
@pytest.mark.parametrize("param1", [True, False])
@pytest.mark.parametrize("param2", [True, False])
def test(...):
```
Rather do this:
```python
@pytest.mark.parametrize("param1", [True, False])
def test_param1(...):
@pytest.mark.parametrize("param2", [True, False])
def test_param2(...):
```
The tests are running in Docker via docker-compose. This is mainly needed to
test networking functionality like GitLab integration, with the GitLab API being
mocked by a simple flask container.

183
docs/src/tutorial.md Normal file
View File

@@ -0,0 +1,183 @@
# Tutorial
Here, you'll find a quick overview over the most common functionality of GRM.
## Managing existing repositories
Let's say you have your git repositories at `~/code`. To start managing them via
GRM, first create a configuration:
```bash
grm repos find local ~/code --format yaml > ~/code/config.yml
```
The result may look something like this:
```yaml
---
trees:
- root: ~/code
repos:
- name: git-repo-manager
worktree_setup: true
remotes:
- name: origin
url: "https://github.com/hakoerber/git-repo-manager.git"
type: https
```
To apply the configuration and check whether all repositories are in sync, run
the following:
```bash
$ grm repos sync config --config ~/code/config.yml
[] git-repo-manager: OK
```
Well, obiously there are no changes. To check how changes would be applied,
let's change the name of the remote (currently `origin`):
```bash
$ sed -i 's/name: origin/name: github/' ~/code/config.yml
$ grm repos sync config --config ~/code/config.yml
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: Deleting remote "origin"
[] git-repo-manager: OK
```
GRM replaced the `origin` remote with `github`.
The configuration (`~/code/config.yml` in this example) would usually be
something you'd track in git or synchronize between machines via some other
means. Then, on every machine, all your repositories are a single `grm repos
sync` away!
## Getting repositories from a forge
Let's say you have a bunch of repositories on GitHub and you'd like to clone
them all to your local machine.
To authenticate, you'll need to get a personal access token, as described in
[the forge documentation](./forge_integration.md#github). Let's assume you put
your token into `~/.github_token` (please don't if you're doing this "for
real"!)
Let's first see what kind of repos we can find:
```bash
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml
---
trees: []
$
```
Ummm, ok? No repos? This is because you have to *tell* GRM what to look for (if
you don't, GRM will just relax, as it's lazy).
There are different filters (see [the forge
documentation](./forge_integration.md#filters) for more info). In our case,
we'll just use the `--owner` filter to get all repos that belong to us:
```bash
$ grm repos find remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml
---
trees:
- root: ~/code/github.com
repos:
- name: git-repo-manager
worktree_setup: false
remotes:
- name: origin
url: "https://github.com/hakoerber/git-repo-manager.git"
type: https
```
Nice! The format is the same as we got from `grm repos find local` above. So if
we wanted, we could save this file and use it with `grm repos sync config` as
above. But there is an even easier way: We can directly clone the repositories!
```bash
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/
[] Cloning into "~/code/github.com/git-repo-manager" from "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned
[] git-repo-manager: OK
```
Nice! Just to make sure, let's run the same command again:
```bash
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/
[] git-repo-manager: OK
```
GRM saw that the repository is already there and did nothing (remember, it's
lazy).
## Using worktrees
Worktrees are something that make it easier to work with multiple branches at
the same time in a repository. Let's say we wanted to hack on the codebase of
GRM:
```bash
$ cd ~/code/github.com/git-repo-manager
$ ls
.gitignore
Cargo.toml
...
```
Well, this is just a normal git repository. But let's try worktrees! First, we
have to convert the existing repository to use the special worktree setup. For
all worktree operations, we will use `grm worktree` (or `grm wt` for short):
```bash
$ grm wt convert
[] Conversion done
$ ls
$
```
So, the code is gone? Not really, there is just no active worktree right now. So
let's add one for `master`:
```bash
$ grm wt add master --track origin/master
[] Conversion done
$ ls
master
$ (cd ./master && git status)
On branch master
nothing to commit, working tree clean
```
Now, a single worktree is kind of pointless (if we only have one, we could also
just use the normal setup, without worktrees). So let's another one for
`develop`:
```bash
$ grm wt add develop --track origin/develop
[] Conversion done
$ ls
develop
master
$ (cd ./develop && git status)
On branch develop
nothing to commit, working tree clean
```
What's the point? The cool thing is that we can now start working in the
`develop` worktree, without affecting the `master` worktree at all. If you're
working on `develop` and want to quickly see what a certain file looks like in
`master`, just look inside `./master`, it's all there!
This becomes especially interesting when you have many feature branches and are
working on multiple features at the same time.
There are a lot of options that influence how worktrees are handled. Maybe you
want to automatically track `origin/master` when you add a worktree called
`master`? Maybe you want your feature branches to have a prefix, so when you're
working on the `feature1` worktree, the remote branch will be
`origin/awesomefeatures/feature1`? Check out [the chapter on
worktrees](./worktrees.md) for all the things that are possible.

View File

@@ -0,0 +1,32 @@
# Behavior Details
When working with worktrees and GRM, there is a lot going on under the hood.
Each time you create a new worktree, GRM has to figure out what commit to set
your new branch to and how to configure any potential remote branches.
To state again, the most important guideline is the following:
**The branch inside the worktree is always the same as the directory name of the
worktree.**
The second set of guidelines relates to the commit to check out, and the remote
branches to use:
* When a branch already exists, you will get a worktree for that branch
* Existing local branches are never changed
* Only do remote operations if specifically requested (via configuration file or
command line parameters)
* When you specify `--track`, you will get that exact branch as the tracking
branch
* When you specify `--no-track`, you will get no tracking branch
Apart from that, GRM tries to do The Right Thing<sup>TM</sup>. It should be as
little surprising as possible.
In 99% of the cases, you will not have to care about the details, as the normal
workflows are covered by the rules above. In case you want to know the exact
behavior "specification", take a look at the [module documentation for
`grm::worktree`](https://docs.rs/git-repo-manager/latest/grm/worktree/index.html).
If you think existing behavior is super-duper confusing and you have a better
idea, do not hesitate to open a GitHub issue to discuss this!

View File

@@ -0,0 +1,75 @@
# Worktrees and Remotes
To fetch all remote references from all remotes in a worktree setup, you can use
the following command:
```
$ grm wt fetch
[✔] Fetched from all remotes
```
This is equivalent to running `git fetch --all` in any of the worktrees.
Often, you may want to pull all remote changes into your worktrees. For this,
use the `git pull` equivalent:
```
$ grm wt pull
[✔] master: Done
[✔] my-cool-branch: Done
```
This will refuse when there are local changes, or if the branch cannot be fast
forwarded. If you want to rebase your local branches, use the `--rebase` switch:
```
$ grm wt pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
As noted, this will fail if there are any local changes in your worktree. If you
want to stash these changes automatically before the pull (and unstash them
afterwards), use the `--stash` option.
This will rebase your changes onto the upstream branch. This is mainly helpful
for persistent branches that change on the remote side.
There is a similar rebase feature that rebases onto the **default** branch
instead:
```
$ grm wt rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
This is super helpful for feature branches. If you want to incorporate changes
made on the remote branches, use `grm wt rebase` and all your branches will be
up to date. If you want to also update to remote tracking branches in one go,
use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
on non-fast-forwards:
```
$ grm wt rebase --pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
"So, what's the difference between `pull --rebase` and `rebase --pull`? Why the
hell is there a `--rebase` flag in the `rebase` command?"
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree to
their remote branch, if possible. `rebase` rebases onto the **default** branch
instead. The switches to `rebase` are just convenience, so you do not have to
run two commands.
* `rebase --pull` is the same as `pull` && `rebase`
* `rebase --pull --rebase` is the same as `pull --rebase` && `rebase`
I understand that the UX is not the most intuitive. If you can think of an
improvement, please let me know (e.g. via an GitHub issue)!
As with `pull`, `rebase` will also refuse to run when there are changes in your
worktree. And you can also use the `--stash` option to stash/unstash changes
automatically.

View File

@@ -0,0 +1,173 @@
# Working with Worktrees
## Creating a new worktree
To actually work, you'll first have to create a new worktree checkout. All
worktree-related commands are available as subcommands of `grm worktree` (or
`grm wt` for short):
```
$ grm wt add mybranch
[✔] Worktree mybranch created
```
You'll see that there is now a directory called `mybranch` that contains a
checkout of your repository, using the branch `mybranch`
```bash
$ cd ./mybranch && git status
On branch mybranch
nothing to commit, working tree clean
```
You can work in this repository as usual. Make changes, commit them, revert
them, whatever you're up to :)
Just note that you *should* not change the branch inside the worktree directory.
There is nothing preventing you from doing so, but you will notice that you'll
run into problems when trying to remove a worktree (more on that later). It may
also lead to confusing behavior, as there can be no two worktrees that have the
same branch checked out. So if you decide to use the worktree setup, go all in,
let `grm` manage your branches and bury `git branch` (and `git checkout -b`).
You will notice that there is no tracking branch set up for the new branch. You
can of course set up one manually after creating the worktree, but there is an
easier way, using the `--track` flag during creation. Let's create another
worktree. Go back to the root of the repository, and run:
```bash
$ grm wt add mybranch2 --track origin/mybranch2
[] Worktree mybranch2 created
```
You'll see that this branch is now tracking `mybranch` on the `origin` remote:
```bash
$ cd ./mybranch2 && git status
On branch mybranch
Your branch is up to date with 'origin/mybranch2'.
nothing to commit, working tree clean
```
The behavior of `--track` differs depending on the existence of the remote
branch:
* If the remote branch already exists, `grm` uses it as the base of the new
local branch.
* If the remote branch does not exist (as in our example), `grm` will create a
new remote tracking branch, using the default branch (either `main` or
`master`) as the base
Often, you'll have a workflow that uses tracking branches by default. It would
be quite tedious to add `--track` every single time. Luckily, the `grm.toml`
file supports defaults for the tracking behavior. See this for an example:
```toml
[track]
default = true
default_remote = "origin"
```
This will set up a tracking branch on `origin` that has the same name as the
local branch.
Sometimes, you might want to have a certain prefix for all your tracking
branches. Maybe to prevent collisions with other contributors. You can simply
set `default_remote_prefix` in `grm.toml`:
```toml
[track]
default = true
default_remote = "origin"
default_remote_prefix = "myname"
```
When using branch `my-feature-branch`, the remote tracking branch would be
`origin/myname/my-feature-branch` in this case.
Note that `--track` overrides any configuration in `grm.toml`. If you want to
disable tracking, use `--no-track`.
## Showing the status of your worktrees
There is a handy little command that will show your an overview over all
worktrees in a repository, including their status (i.e. changes files). Just run
the following in the root of your repository:
```
$ grm wt status
╭───────────┬────────┬──────────┬──────────────────╮
│ Worktree ┆ Status ┆ Branch ┆ Remote branch │
╞═══════════╪════════╪══════════╪══════════════════╡
│ mybranch ┆ ✔ ┆ mybranch ┆ │
│ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │
╰───────────┴────────┴──────────┴──────────────────╯
```
The "Status" column would show any uncommitted changes (new / modified / deleted
files) and the "Remote branch" would show differences to the remote branch (e.g.
if there are new pushes to the remote branch that are not yet incorporated into
your local branch).
## Deleting worktrees
If you're done with your worktrees, use `grm wt delete` to delete them. Let's
start with `mybranch2`:
```
$ grm wt delete mybranch2
[✔] Worktree mybranch2 deleted
```
Easy. On to `mybranch`:
```
$ grm wt delete mybranch
[!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete
```
Hmmm. `grm` tells you:
"Hey, there is no remote branch that you could have pushed your changes to. I'd
rather not delete work that you cannot recover."
Note that `grm` is very cautious here. As your repository will not be deleted,
you could still recover the commits via
[`git-reflog`](https://git-scm.com/docs/git-reflog). But better safe than
sorry! Note that you'd get a similar error message if your worktree had any
uncommitted files, for the same reason. Now you can either commit & push your
changes, or your tell `grm` that you know what you're doing:
```
$ grm wt delete mybranch --force
[✔] Worktree mybranch deleted
```
If you just want to delete all worktrees that do not contain any changes, you
can also use the following:
```
$ grm wt clean
```
Note that this will not delete the default branch of the repository. It can of
course still be delete with `grm wt delete` if necessary.
### Converting an existing repository
It is possible to convert an existing directory to a worktree setup, using `grm
wt convert`. This command has to be run in the root of the repository you want
to convert:
```
$ grm wt convert
[✔] Conversion successful
```
This command will refuse to run if you have any changes in your repository.
Commit them and try again!
Afterwards, the directory is empty, as there are no worktrees checked out yet.
Now you can use the usual commands to set up worktrees.

View File

@@ -1,58 +1,60 @@
# Git Worktrees
## Why?
The default workflow when using git is having your repository in a single
directory. Then, you can check out a certain reference (usually a branch),
which will update the files in the directory to match the state of that
reference. Most of the time, this is exactly what you need and works perfectly.
But especially when you're working with branches a lot, you may notice that
there is a lot of work required to make everything run smoothly.
The default workflow when using git is having your repository in a single directory.
Then, you can check out a certain reference (usually a branch), which will update
the files in the directory to match the state of that reference. Most of the time,
this is exactly what you need and works perfectly. But especially when you're using
with branches a lot, you may notice that there is a lot of work required to make
everything run smootly.
Maybe you experienced the following: You're working on a feature branch. Then,
for some reason, you have to change branches (maybe to investigate some issue).
But you get the following:
Maybe you have experienced the following: You're working on a feature branch.
Then, for some reason, you have to change branches (maybe to investigate some
issue). But you get the following:
```
error: Your local changes to the following files would be overwritten by checkout
```
Now you can create a temporary commit or stash your changes. In any case, you have
some mental overhead before you can work on something else. Especially with stashes,
you'll have to remember to do a `git stash pop` before resuming your work (I
cannot count the number of times where is "rediscovered" some code hidden in some
old stash I forgot about.
Now you can create a temporary commit or stash your changes. In any case, you
have some mental overhead before you can work on something else. Especially with
stashes, you'll have to remember to do a `git stash pop` before resuming your
work (I cannot count the number of times where I "rediscovered" some code hidden
in some old stash I forgot about). Also, conflicts on a `git stash pop` are just
horrible.
And even worse: If you're currently in the process of resolving merge conflicts or an
interactive rebase, there is just no way to "pause" this work to check out a
different branch.
And even worse: If you're currently in the process of resolving merge conflicts
or an interactive rebase, there is just no way to "pause" this work to check out
a different branch.
Sometimes, it's crucial to have an unchanging state of your repository until some
long-running process finishes. I'm thinking of Ansible and Terraform runs. I'd
rather not change to a different branch while ansible or Terraform are running as
I have no idea how those tools would behave (and I'm not too eager to find out).
Sometimes, it's crucial to have an unchanging state of your repository until
some long-running process finishes. I'm thinking of Ansible and Terraform runs.
I'd rather not change to a different branch while ansible or Terraform are
running as I have no idea how those tools would behave (and I'm not too eager to
find out).
In any case, Git Worktrees are here for the rescue:
## What are git worktrees?
[Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have multiple
independent checkouts of your repository on different directories. You can have
multiple directories that correspond to different references in your repository.
Each worktree has it's independent working tree (duh) and index, so there is no
to run into conflicts. Changing to a different branch is just a `cd` away (if
the worktree is already set up).
[Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have
multiple independent checkouts of your repository on different directories. You
can have multiple directories that correspond to different references in your
repository. Each worktree has it's independent working tree (duh) and index, so
there is no way to run into conflicts. Changing to a different branch is just a
`cd` away (if the worktree is already set up).
## Worktrees in GRM
GRM exposes an opinionated way to use worktrees in your repositories. Opinionated,
because there is a single invariant that makes reasoning about your worktree
setup quite easy:
GRM exposes an opinionated way to use worktrees in your repositories.
Opinionated, because there is a single invariant that makes reasoning about your
worktree setup quite easy:
**The branch inside the worktree is always the same as the directory name of the worktree.**
**The branch inside the worktree is always the same as the directory name of the
worktree.**
In other words: If you're checking out branch `mybranch` into a new worktree, the
worktree directory will be named `mybranch`.
In other words: If you're checking out branch `mybranch` into a new worktree,
the worktree directory will be named `mybranch`.
GRM can be used with both "normal" and worktree-enabled repositories. But note
that a single repository can be either the former or the latter. You'll have to
@@ -67,303 +69,27 @@ name = "git-repo-manager"
worktree_setup = true
```
Now, when you run a `grm sync`, you'll notice that the directory of the repository
is empty! Well, not totally, there is a hidden directory called `.git-main-working-tree`.
This is where the repository actually "lives" (it's a bare checkout).
Now, when you run a `grm sync`, you'll notice that the directory of the
repository is empty! Well, not totally, there is a hidden directory called
`.git-main-working-tree`. This is where the repository actually "lives" (it's a
bare checkout).
Note that there are few specific things you can configure for a certain
workspace. This is all done in an optional `grm.toml` file right in the root
of the worktree. More on that later.
workspace. This is all done in an optional `grm.toml` file right in the root of
the worktree. More on that later.
### Creating a new worktree
To actually work, you'll first have to create a new worktree checkout. All
worktree-related commands are available as subcommands of `grm worktree` (or
`grm wt` for short):
## Manual access
```
$ grm wt add mybranch
[✔] Worktree mybranch created
```
You'll see that there is now a directory called `mybranch` that contains a checkout
of your repository, using the branch `mybranch`
```bash
$ cd ./mybranch && git status
On branch mybranch
nothing to commit, working tree clean
```
You can work in this repository as usual. Make changes, commit them, revert them,
whatever you're up to :)
Just note that you *should* not change the branch inside the worktree
directory. There is nothing preventing you from doing so, but you will notice
that you'll run into problems when trying to remove a worktree (more on that
later). It may also lead to confusing behaviour, as there can be no two
worktrees that have the same branch checked out. So if you decide to use the
worktree setup, go all in, let `grm` manage your branches and bury `git branch`
(and `git checkout -b`).
You will notice that there is no tracking branch set up for the new branch. You
can of course set up one manually after creating the worktree, but there is an
easier way, using the `--track` flag during creation. Let's create another
worktree. Go back to the root of the repository, and run:
```bash
$ grm wt add mybranch2 --track origin/mybranch2
[] Worktree mybranch2 created
```
You'll see that this branch is now tracking `mybranch` on the `origin` remote:
```bash
$ cd ./mybranch2 && git status
On branch mybranch
Your branch is up to date with 'origin/mybranch2'.
nothing to commit, working tree clean
```
The behaviour of `--track` differs depending on the existence of the remote branch:
* If the remote branch already exists, `grm` uses it as the base of the new
local branch.
* If the remote branch does not exist (as in our example), `grm` will create a
new remote tracking branch, using the default branch (either `main` or `master`)
as the base
Often, you'll have a workflow that uses tracking branches by default. It would
be quite tedious to add `--track` every single time. Luckily, the `grm.toml` file
supports defaults for the tracking behaviour. See this for an example:
```toml
[track]
default = true
default_remote = "origin"
```
This will set up a tracking branch on `origin` that has the same name as the local
branch.
Sometimes, you might want to have a certain prefix for all your tracking branches.
Maybe to prevent collissions with other contributors. You can simply set
`default_remote_prefix` in `grm.toml`:
```toml
[track]
default = true
default_remote = "origin"
default_remote_prefix = "myname"
```
When using branch `my-feature-branch`, the remote tracking branch would be
`origin/myname/my-feature-branch` in this case.
Note that `--track` overrides any configuration in `grm.toml`. If you want to
disable tracking, use `--no-track`.
### Showing the status of your worktrees
There is a handy little command that will show your an overview over all worktrees
in a repository, including their status (i.e. changes files). Just run the following
in the root of your repository:
```
$ grm wt status
╭───────────┬────────┬──────────┬──────────────────╮
│ Worktree ┆ Status ┆ Branch ┆ Remote branch │
╞═══════════╪════════╪══════════╪══════════════════╡
│ mybranch ┆ ✔ ┆ mybranch ┆ │
│ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │
╰───────────┴────────┴──────────┴──────────────────╯
```
The "Status" column would show any uncommitted changes (new / modified / deleted
files) and the "Remote branch" would show differences to the remote branch (e.g.
if there are new pushes to the remote branch that are not yet incorporated into
your local branch).
### Deleting worktrees
If you're done with your worktrees, use `grm wt delete` to delete them. Let's
start with `mybranch2`:
```
$ grm wt delete mybranch2
[✔] Worktree mybranch2 deleted
```
Easy. On to `mybranch`:
```
$ grm wt delete mybranch
[!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete
```
Hmmm. `grm` tells you:
"Hey, there is no remote branch that you could have pushed
your changes to. I'd rather not delete work that you cannot recover."
Note that `grm` is very cautious here. As your repository will not be deleted,
you could still recover the commits via [`git-reflog`](https://git-scm.com/docs/git-reflog).
But better safe then sorry! Note that you'd get a similar error message if your
worktree had any uncommitted files, for the same reason. Now you can either
commit & push your changes, or your tell `grm` that you know what you're doing:
```
$ grm wt delete mybranch --force
[✔] Worktree mybranch deleted
```
If you just want to delete all worktrees that do not contain any changes, you
can also use the following:
```
$ grm wt clean
```
Note that this will not delete the default branch of the repository. It can of
course still be delete with `grm wt delete` if neccessary.
### Persistent branches
You most likely have a few branches that are "special", that you don't want to
clean up and that are the usual target for feature branches to merge into. GRM
calls them "persistent branches" and treats them a bit differently:
* Their worktrees will never be deleted by `grm wt clean`
* If the branches in other worktrees are merged into them, they will be cleaned
up, even though they may not be in line with their upstream. Same goes for
`grm wt delete`, which will not require a `--force` flag. Note that of
course, actual changes in the worktree will still block an automatic cleanup!
* As soon as you enable persistent branches, non-persistent branches will only
ever cleaned up when merged into a persistent branch.
To elaborate: This is mostly relevant for a feature-branch workflow. Whenever a
feature branch is merged, it can usually be thrown away. As merging is usually
done on some remote code management platform (GitHub, GitLab, ...), this means
that you usually keep a branch around until it is merged into one of the "main"
branches (`master`, `main`, `develop`, ...)
Enable persistent branches by setting the following in the `grm.toml` in the
worktree root:
```toml
persistent_branches = [
"master",
"develop",
]
```
Note that setting persistent branches will disable any detection of "default"
branches. The first entry will be considered your repositories' default branch.
### Converting an existing repository
It is possible to convert an existing directory to a worktree setup, using `grm
wt convert`. This command has to be run in the root of the repository you want
to convert:
```
grm wt convert
[✔] Conversion successful
```
This command will refuse to run if you have any changes in your repository.
Commit them and try again!
Afterwards, the directory is empty, as there are no worktrees checked out yet.
Now you can use the usual commands to set up worktrees.
### Working with remotes
To fetch all remote references from all remotes in a worktree setup, you can
use the following command:
```
grm wt fetch
[✔] Fetched from all remotes
```
This is equivalent to running `git fetch --all` in any of the worktrees.
Often, you may want to pull all remote changes into your worktrees. For this,
use the `git pull` equivalent:
```
grm wt pull
[✔] master: Done
[✔] my-cool-branch: Done
```
This will refuse when there are local changes, or if the branch cannot be fast
forwarded. If you want to rebase your local branches, use the `--rebase` switch:
```
grm wt pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
As noted, this will fail if there are any local changes in your worktree. If you
want to stash these changes automatically before the pull (and unstash them
afterwards), use the `--stash` option.
This will rebase your changes onto the upstream branch. This is mainly helpful
for persistent branches that change on the remote side.
There is a similar rebase feature that rebases onto the **default** branch instead:
```
grm wt rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
This is super helpful for feature branches. If you want to incorporate changes
made on the remote branches, use `grm wt rebase` and all your branches will
be up to date. If you want to also update to remote tracking branches in one go,
use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
on non-fast-forwards:
```
grm wt rebase --pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
"So, what's the difference between `pull --rebase` and `rebase --pull`? Why the
hell is there a `--rebase` flag in the `rebase` command?"
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree
to their remote branch, if possible. `rebase` rabases onto the **default** branch
instead. The switches to `rebase` are just convenience, so you do not have to
run two commands.
* `rebase --pull` is the same as `pull` && `rebase`
* `rebase --pull --rebase` is the same as `pull --rebase` && `rebase`
I understand that the UX is not the most intuitive. If you can think of an
improvement, please let me know (e.g. via an GitHub issue)!
As with `pull`, `rebase` will also refuse to run when there are changes in your
worktree. And you can also use the `--stash` option to stash/unstash changes
automatically.
### Manual access
GRM isn't doing any magic, it's just git under the hood. If you need to have access
to the underlying git repository, you can always do this:
GRM isn't doing any magic, it's just git under the hood. If you need to have
access to the underlying git repository, you can always do this:
```
$ git --git-dir ./.git-main-working-tree [...]
```
This should never be required (whenever you have to do this, you can consider
this a bug in GRM and open an [issue](https://github.com/hakoerber/git-repo-manager/issues/new),
but it may help in a pinch.
this a bug in GRM and open an
[issue](https://github.com/hakoerber/git-repo-manager/issues/new), but it may
help in a pinch.

View File

@@ -1,2 +1 @@
/venv/
/__pycache__/

14
e2e_tests/conftest.py Normal file
View File

@@ -0,0 +1,14 @@
import os
from helpers import *
def pytest_configure(config):
os.environ["GIT_AUTHOR_NAME"] = "Example user"
os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com"
os.environ["GIT_COMMITTER_NAME"] = "Example user"
os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com"
def pytest_unconfigure(config):
pass

View File

@@ -0,0 +1,32 @@
version: "3.7"
services:
pytest:
build: ./docker
volumes:
- type: bind
source: ./
target: /tests
read_only: true
- type: tmpfs
target: /tmp
environment:
TMPDIR: /tmp
depends_on:
- rest
command:
- "true"
networks:
main:
rest:
build: ./docker-rest/
expose:
- "5000"
networks:
main:
aliases:
- alternate-rest
networks:
main:

View File

@@ -0,0 +1,19 @@
FROM docker.io/debian:11.3
WORKDIR /app
ENV FLASK_APP=app.py
RUN apt-get update \
&& apt-get install -y \
dumb-init \
python3-flask \
python3-jinja2 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
EXPOSE 5000
COPY flask .
CMD ["/usr/bin/dumb-init", "--", "flask", "run", "--port", "5000", "--host", "0.0.0.0"]

View File

@@ -0,0 +1,7 @@
from flask import Flask
app = Flask(__name__)
app.url_map.strict_slashes = False
import github
import gitlab

View File

@@ -0,0 +1,103 @@
import os.path
from app import app
from flask import Flask, request, abort, jsonify, make_response
import jinja2
def check_headers():
if request.headers.get("accept") != "application/vnd.github.v3+json":
app.logger.error("Invalid accept header")
abort(500)
auth_header = request.headers.get("authorization")
if auth_header != "token secret-token:myauthtoken":
app.logger.error("Invalid authorization header: %s", auth_header)
abort(
make_response(
jsonify(
{
"message": "Bad credentials",
"documentation_url": "https://docs.example.com/rest",
}
),
401,
)
)
def add_pagination(response, page, last_page):
host = request.headers["host"]
link_header = ""
def args(page):
args = request.args.copy()
args["page"] = page
return "&".join([f"{k}={v}" for k, v in args.items()])
if page < last_page:
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
)
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
)
response.headers["link"] = link_header
def read_project_files(namespaces=[]):
last_page = 4
page = username = int(request.args.get("page", "1"))
response_file = f"./github_api_page_{page}.json.j2"
if not os.path.exists(response_file):
return jsonify([])
response = make_response(
jinja2.Template(open(response_file).read()).render(
namespace=namespaces[page - 1]
)
)
add_pagination(response, page, last_page)
response.headers["content-type"] = "application/json"
return response
def single_namespaced_projects(namespace):
return read_project_files([namespace] * 4)
def mixed_projects(namespaces):
return read_project_files(namespaces)
@app.route("/github/users/<string:user>/repos/")
def github_user_repos(user):
check_headers()
if user == "myuser1":
return single_namespaced_projects("myuser1")
return jsonify([])
@app.route("/github/orgs/<string:group>/repos/")
def github_group_repos(group):
check_headers()
if not (request.args.get("type") == "all"):
abort(500, "wrong arguments")
if group == "mygroup1":
return single_namespaced_projects("mygroup1")
return jsonify([])
@app.route("/github/user/repos/")
def github_own_repos():
check_headers()
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
@app.route("/github/user/")
def github_user():
check_headers()
response = make_response(open("./github_api_user.json").read())
response.headers["content-type"] = "application/json"
return response

View File

@@ -0,0 +1,228 @@
[
{
"id": 1,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject1",
"full_name": "{{ namespace }}/myproject1",
"private": true,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject1",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject1",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject1/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject1/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject1/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject1/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject1/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject1/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject1/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject1/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject1/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject1/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject1/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject1/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject1/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject1/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject1/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject1.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject1.git",
"clone_url": "https://example.com/{{ namespace }}/myproject1.git",
"svn_url": "https://example.com/{{ namespace }}/myproject1",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
},
{
"id": 2,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject2",
"full_name": "{{ namespace }}/myproject2",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject2",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject2",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject2/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject2/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject2/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject2/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject2/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject2/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject2/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject2/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject2/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject2/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject2/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject2/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject2/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject2/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject2/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject2.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject2.git",
"clone_url": "https://example.com/{{ namespace }}/myproject2.git",
"svn_url": "https://example.com/{{ namespace }}/myproject2",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,115 @@
[
{
"id": 3,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject3",
"full_name": "{{ namespace }}/myproject3",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject3",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject3",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject3/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject3/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject3/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject3/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject3/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject3/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject3/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject3/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject3/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject3/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject3/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject3/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject3/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject3/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject3/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject3.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject3.git",
"clone_url": "https://example.com/{{ namespace }}/myproject3.git",
"svn_url": "https://example.com/{{ namespace }}/myproject3",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,115 @@
[
{
"id": 3,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject4",
"full_name": "{{ namespace }}/myproject4",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject4",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject4",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject4/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject4/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject4/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject4/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject4/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject4/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject4/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject4/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject4/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject4/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject4/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject4/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject4/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject4/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject4/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject4.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject4.git",
"clone_url": "https://example.com/{{ namespace }}/myproject4.git",
"svn_url": "https://example.com/{{ namespace }}/myproject4",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,115 @@
[
{
"id": 3,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject5",
"full_name": "{{ namespace }}/myproject5",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject5",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject5",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject5/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject5/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject5/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject5/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject5/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject5/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject5/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject5/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject5/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject5/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject5/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject5/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject5/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject5/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject5/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject5.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject5.git",
"clone_url": "https://example.com/{{ namespace }}/myproject5.git",
"svn_url": "https://example.com/{{ namespace }}/myproject5",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,46 @@
{
"login": "myuser1",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://example.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.example.com/users/octocat",
"html_url": "https://example.com/octocat",
"followers_url": "https://api.example.com/users/octocat/followers",
"following_url": "https://api.example.com/users/octocat/following{/other_user}",
"gists_url": "https://api.example.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.example.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/octocat/subscriptions",
"organizations_url": "https://api.example.com/users/octocat/orgs",
"repos_url": "https://api.example.com/users/octocat/repos",
"events_url": "https://api.example.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.example.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
"name": "monalisa octocat",
"company": "GitHub",
"blog": "https://example.com/blog",
"location": "San Francisco",
"email": "octocat@example.com",
"hireable": false,
"bio": "There once was...",
"twitter_username": "monatheoctocat",
"public_repos": 2,
"public_gists": 1,
"followers": 20,
"following": 0,
"created_at": "2008-01-14T04:33:35Z",
"updated_at": "2008-01-14T04:33:35Z",
"private_gists": 81,
"total_private_repos": 100,
"owned_private_repos": 100,
"disk_usage": 10000,
"collaborators": 8,
"two_factor_authentication": true,
"plan": {
"name": "Medium",
"space": 400,
"private_repos": 20,
"collaborators": 0
}
}

View File

@@ -0,0 +1,106 @@
import os.path
from app import app
from flask import Flask, request, abort, jsonify, make_response
import jinja2
def check_headers():
if request.headers.get("accept") != "application/json":
app.logger.error("Invalid accept header")
abort(500)
auth_header = request.headers.get("authorization")
if auth_header != "bearer secret-token:myauthtoken":
app.logger.error("Invalid authorization header: %s", auth_header)
abort(
make_response(
jsonify(
{
"message": "Bad credentials",
"documentation_url": "https://docs.example.com/rest",
}
),
401,
)
)
def add_pagination(response, page, last_page):
host = request.headers["host"]
link_header = ""
def args(page):
args = request.args.copy()
args["page"] = page
return "&".join([f"{k}={v}" for k, v in args.items()])
if page < last_page:
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
)
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
)
response.headers["link"] = link_header
def read_project_files(namespaces=[]):
last_page = 4
page = username = int(request.args.get("page", "1"))
response_file = f"./gitlab_api_page_{page}.json"
if not os.path.exists(response_file):
return jsonify([])
response = make_response(
jinja2.Template(open(response_file).read()).render(
namespace=namespaces[page - 1]
)
)
add_pagination(response, page, last_page)
response.headers["content-type"] = "application/json"
return response
def single_namespaced_projects(namespace):
return read_project_files([namespace] * 4)
def mixed_projects(namespaces):
return read_project_files(namespaces)
@app.route("/gitlab/api/v4/users/<string:user>/projects")
def gitlab_user_repos(user):
check_headers()
if user == "myuser1":
return single_namespaced_projects("myuser1")
return jsonify([])
@app.route("/gitlab/api/v4/groups/<string:group>/projects")
def gitlab_group_repos(group):
check_headers()
if not (
request.args.get("include_subgroups") == "true"
and request.args.get("archived") == "false"
):
abort(500, "wrong arguments")
if group == "mygroup1":
return single_namespaced_projects("mygroup1")
return jsonify([])
@app.route("/gitlab/api/v4/projects/")
def gitlab_own_repos():
check_headers()
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
@app.route("/gitlab/api/v4/user/")
def gitlab_user():
check_headers()
response = make_response(open("./gitlab_api_user.json").read())
response.headers["content-type"] = "application/json"
return response

View File

@@ -0,0 +1,236 @@
[
{
"id": 1,
"description": "",
"name": "myproject1",
"name_with_namespace": "{{ namespace }} / myproject1",
"path": "myproject1",
"path_with_namespace": "{{ namespace }}/myproject1",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject1.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject1.git",
"web_url": "https://example.com/{{ namespace }}/myproject1",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject1",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "private",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject1-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
},
{
"id": 2,
"description": "",
"name": "myproject2",
"name_with_namespace": "{{ namespace }} / myproject2",
"path": "myproject2",
"path_with_namespace": "{{ namespace }}/myproject2",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject2.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject2.git",
"web_url": "https://example.com/{{ namespace }}/myproject2",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject2",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject2-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,119 @@
[
{
"id": 3,
"description": "",
"name": "myproject3",
"name_with_namespace": "{{ namespace }} / myproject3",
"path": "myproject3",
"path_with_namespace": "{{ namespace }}/myproject3",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject3.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject3.git",
"web_url": "https://example.com/{{ namespace }}/myproject3",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject3",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject3-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,119 @@
[
{
"id": 4,
"description": "",
"name": "myproject4",
"name_with_namespace": "{{ namespace }} / myproject4",
"path": "myproject4",
"path_with_namespace": "{{ namespace }}/myproject4",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject4.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject4.git",
"web_url": "https://example.com/{{ namespace }}/myproject4",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject4",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject4-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,119 @@
[
{
"id": 5,
"description": "",
"name": "myproject5",
"name_with_namespace": "{{ namespace }} / myproject5",
"path": "myproject5",
"path_with_namespace": "{{ namespace }}/myproject5",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject5.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject5.git",
"web_url": "https://example.com/{{ namespace }}/myproject5",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject5",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject5-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,42 @@
{
"id": 1,
"username": "myuser1",
"name": "My User",
"state": "active",
"avatar_url": "https://example.com/avatar",
"web_url": "https://example.com/myuser1",
"created_at": "2016-12-10T10:09:11.585Z",
"bio": "",
"location": "",
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": "",
"job_title": "",
"pronouns": "",
"bot": false,
"work_information": null,
"followers": 0,
"following": 0,
"is_followed": false,
"local_time": "11:59 PM",
"last_sign_in_at": "2020-03-14T09:13:44.977Z",
"confirmed_at": "2022-05-19T23:48:47.033Z",
"last_activity_on": "2022-05-19",
"email": "myuser1@example.com",
"theme_id": null,
"color_scheme_id": 1,
"projects_limit": 100000,
"current_sign_in_at": "2022-05-19T23:45:49.661Z",
"identities": [],
"can_create_group": true,
"can_create_project": true,
"two_factor_enabled": false,
"external": false,
"private_profile": false,
"commit_email": "myuser1@example.com",
"shared_runners_minutes_limit": 2000,
"extra_shared_runners_minutes_limit": null
}

View File

@@ -0,0 +1,14 @@
FROM docker.io/debian:11.3
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
python3-pytest \
python3-toml \
python3-git \
python3-yaml \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /tests
ENTRYPOINT ["/bin/sh", "-c", "--"]

View File

@@ -5,12 +5,24 @@ import os.path
import subprocess
import tempfile
import hashlib
import shutil
import inspect
import git
binary = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "target/release/grm"
)
binary = os.environ["GRM_BINARY"]
def funcname():
return inspect.stack()[1][3]
def copytree(src, dest):
shutil.copytree(src, dest, dirs_exist_ok=True)
def get_temporary_directory(dir=None):
return tempfile.TemporaryDirectory(dir=dir)
def grm(args, cwd=None, is_invalid=False):
@@ -20,13 +32,19 @@ def grm(args, cwd=None, is_invalid=False):
print(f"grmcmd: {args}")
print(f"stdout:\n{cmd.stdout}")
print(f"stderr:\n{cmd.stderr}")
assert "secret-token:" not in cmd.stdout
assert "secret-token:" not in cmd.stderr
assert "panicked" not in cmd.stderr
return cmd
def shell(script):
script = "set -o errexit\nset -o nounset\n" + script
subprocess.run(["bash"], input=script, text=True, check=True)
script = "set -o errexit\nset -o nounset\nset -o pipefail\n" + script
cmd = subprocess.run(["bash"], input=script, text=True, capture_output=True)
if cmd.returncode != 0:
print(cmd.stdout)
print(cmd.stderr)
cmd.check_returncode()
def checksum_directory(path):
@@ -112,78 +130,204 @@ def checksum_directory(path):
class TempGitRepository:
def __init__(self, dir=None):
self.dir = dir
pass
def __enter__(self):
self.tmpdir = tempfile.TemporaryDirectory(dir=self.dir)
self.remote_1_dir = tempfile.TemporaryDirectory()
self.remote_2_dir = tempfile.TemporaryDirectory()
shell(
f"""
self.tmpdir = get_temporary_directory(self.dir)
self.remote_1 = get_temporary_directory()
self.remote_2 = get_temporary_directory()
cmd = f"""
cd {self.tmpdir.name}
git init
git -c init.defaultBranch=master init
echo test > root-commit
git add root-commit
git commit -m "root-commit"
git remote add origin file://{self.remote_1_dir.name}
git remote add otherremote file://{self.remote_2_dir.name}
git remote add origin file://{self.remote_1.name}
git remote add otherremote file://{self.remote_2.name}
"""
)
shell(cmd)
return self.tmpdir.name
def __exit__(self, exc_type, exc_val, exc_tb):
del self.tmpdir
del self.remote_1_dir
del self.remote_2_dir
pass
class TempGitRemote:
obj = {}
def __init__(self, tmpdir, remoteid=None):
self.tmpdir = tmpdir
self.remoteid = remoteid
@classmethod
def get(cls, cachekey=None, initfunc=None):
if cachekey is None:
tmpdir = get_temporary_directory()
shell(
f"""
cd {tmpdir.name}
git -c init.defaultBranch=master init --bare
"""
)
newobj = cls(tmpdir)
remoteid = None
if initfunc is not None:
remoteid = newobj.init(initfunc)
newobj.remoteid = remoteid
return newobj, remoteid
else:
refresh = False
if cachekey not in cls.obj:
tmpdir = get_temporary_directory()
shell(
f"""
cd {tmpdir.name}
git -c init.defaultBranch=master init --bare
"""
)
newobj = cls(tmpdir)
remoteid = newobj.init(initfunc)
newobj.remoteid = remoteid
cls.obj[cachekey] = newobj
return cls.clone(cls.obj[cachekey])
@classmethod
def clone(cls, source):
new_remote = get_temporary_directory()
copytree(source.tmpdir.name, new_remote.name)
return cls(new_remote, source.remoteid), source.remoteid
def init(self, func):
return func(self.tmpdir.name)
def __enter__(self):
return self.tmpdir
def __exit__(self, exc_type, exc_val, exc_tb):
pass
class TempGitRepositoryWorktree:
def __init__(self):
pass
obj = {}
def __enter__(self):
self.tmpdir = tempfile.TemporaryDirectory()
self.remote_1_dir = tempfile.TemporaryDirectory()
self.remote_2_dir = tempfile.TemporaryDirectory()
def __init__(self, remotes, tmpdir, commit, remote1, remote2, remote1id, remote2id):
self.remotes = remotes
self.tmpdir = tmpdir
self.commit = commit
self.remote1 = remote1
self.remote2 = remote2
self.remote1id = remote1id
self.remote2id = remote2id
@classmethod
def get(cls, cachekey, branch=None, remotes=2, basedir=None, remote_setup=None):
if cachekey not in cls.obj:
tmpdir = get_temporary_directory()
shell(
f"""
cd {self.remote_1_dir.name}
git init --bare
"""
)
shell(
f"""
cd {self.remote_2_dir.name}
git init --bare
"""
)
shell(
f"""
cd {self.tmpdir.name}
git init
cd {tmpdir.name}
git -c init.defaultBranch=master init
echo test > root-commit-in-worktree-1
git add root-commit-in-worktree-1
git commit -m "root-commit-in-worktree-1"
echo test > root-commit-in-worktree-2
git add root-commit-in-worktree-2
git commit -m "root-commit-in-worktree-2"
git remote add origin file://{self.remote_1_dir.name}
git remote add otherremote file://{self.remote_2_dir.name}
git push origin HEAD:master
git ls-files | xargs rm -rf
mv .git .git-main-working-tree
git --git-dir .git-main-working-tree config core.bare true
"""
)
commit = git.Repo(
f"{self.tmpdir.name}/.git-main-working-tree"
).head.commit.hexsha
return (self.tmpdir.name, commit)
repo = git.Repo(f"{tmpdir.name}/.git-main-working-tree")
commit = repo.head.commit.hexsha
if branch is not None:
repo.create_head(branch)
remote1 = None
remote2 = None
remote1id = None
remote2id = None
if remotes >= 1:
cachekeyremote, initfunc = (remote_setup or ((None, None),))[0]
remote1, remote1id = TempGitRemote.get(
cachekey=cachekeyremote, initfunc=initfunc
)
remote1 = remote1
remote1id = remote1id
shell(
f"""
cd {tmpdir.name}
git --git-dir .git-main-working-tree remote add origin file://{remote1.tmpdir.name}
"""
)
repo.remotes.origin.fetch()
repo.remotes.origin.push("master")
if remotes >= 2:
cachekeyremote, initfunc = (remote_setup or (None, (None, None)))[1]
remote2, remote2id = TempGitRemote.get(
cachekey=cachekeyremote, initfunc=initfunc
)
remote2 = remote2
remote2id = remote2id
shell(
f"""
cd {tmpdir.name}
git --git-dir .git-main-working-tree remote add otherremote file://{remote2.tmpdir.name}
"""
)
repo.remotes.otherremote.fetch()
repo.remotes.otherremote.push("master")
cls.obj[cachekey] = cls(
remotes, tmpdir, commit, remote1, remote2, remote1id, remote2id
)
return cls.clone(cls.obj[cachekey], remote_setup=remote_setup)
@classmethod
def clone(cls, source, remote_setup):
newdir = get_temporary_directory()
copytree(source.tmpdir.name, newdir.name)
remote1 = None
remote2 = None
remote1id = None
remote2id = None
repo = git.Repo(os.path.join(newdir.name, ".git-main-working-tree"))
if source.remotes >= 1:
cachekey, initfunc = (remote_setup or ((None, None),))[0]
remote1, remote1id = TempGitRemote.get(cachekey=cachekey, initfunc=initfunc)
if remote1id != source.remote1id:
repo.remotes.origin.fetch()
repo.remotes.origin.push("master")
if source.remotes >= 2:
cachekey, initfunc = (remote_setup or (None, (None, None)))[1]
remote2, remote2id = TempGitRemote.get(cachekey=cachekey, initfunc=initfunc)
if remote2id != source.remote2id:
repo.remotes.otherremote.fetch()
repo.remotes.otherremote.push("master")
return cls(
source.remotes,
newdir,
source.commit,
remote1,
remote2,
remote1id,
remote2id,
)
def __enter__(self):
return (self.tmpdir.name, self.commit)
def __exit__(self, exc_type, exc_val, exc_tb):
del self.tmpdir
del self.remote_1_dir
del self.remote_2_dir
pass
class RepoTree:
@@ -191,7 +335,7 @@ class RepoTree:
pass
def __enter__(self):
self.root = tempfile.TemporaryDirectory()
self.root = get_temporary_directory()
self.config = tempfile.NamedTemporaryFile()
with open(self.config.name, "w") as f:
f.write(
@@ -208,7 +352,7 @@ class RepoTree:
"""
)
cmd = grm(["repos", "sync", "--config", self.config.name])
cmd = grm(["repos", "sync", "config", "--config", self.config.name])
assert cmd.returncode == 0
return (self.root.name, self.config.name, ["test", "test_worktree"])
@@ -222,7 +366,7 @@ class EmptyDir:
pass
def __enter__(self):
self.tmpdir = tempfile.TemporaryDirectory()
self.tmpdir = get_temporary_directory()
return self.tmpdir.name
def __exit__(self, exc_type, exc_val, exc_tb):
@@ -234,7 +378,7 @@ class NonGitDir:
pass
def __enter__(self):
self.tmpdir = tempfile.TemporaryDirectory()
self.tmpdir = get_temporary_directory()
shell(
f"""
cd {self.tmpdir.name}
@@ -254,11 +398,11 @@ class TempGitFileRemote:
pass
def __enter__(self):
self.tmpdir = tempfile.TemporaryDirectory()
self.tmpdir = get_temporary_directory()
shell(
f"""
cd {self.tmpdir.name}
git init
git -c init.defaultBranch=master init
echo test > root-commit-in-remote-1
git add root-commit-in-remote-1
git commit -m "root-commit-in-remote-1"

View File

@@ -1,14 +0,0 @@
attrs==21.4.0
gitdb==4.0.9
GitPython==3.1.27
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.0.8
pytest==7.1.2
PyYAML==6.0
smmap==5.0.0
toml==0.10.2
tomli==2.0.1
typing_extensions==4.2.0

View File

@@ -11,7 +11,7 @@ from helpers import *
def test_repos_find_nonexistent():
with NonExistentPath() as nonexistent_dir:
cmd = grm(["repos", "find", nonexistent_dir])
cmd = grm(["repos", "find", "local", nonexistent_dir])
assert "does not exist" in cmd.stderr.lower()
assert cmd.returncode != 0
assert not os.path.exists(nonexistent_dir)
@@ -19,14 +19,14 @@ def test_repos_find_nonexistent():
def test_repos_find_file():
with tempfile.NamedTemporaryFile() as tmpfile:
cmd = grm(["repos", "find", tmpfile.name])
cmd = grm(["repos", "find", "local", tmpfile.name])
assert "not a directory" in cmd.stderr.lower()
assert cmd.returncode != 0
def test_repos_find_empty():
with tempfile.TemporaryDirectory() as tmpdir:
cmd = grm(["repos", "find", tmpdir])
cmd = grm(["repos", "find", "local", tmpdir])
assert cmd.returncode == 0
assert len(cmd.stdout) == 0
assert len(cmd.stderr) != 0
@@ -35,7 +35,8 @@ def test_repos_find_empty():
def test_repos_find_invalid_format():
with tempfile.TemporaryDirectory() as tmpdir:
cmd = grm(
["repos", "find", tmpdir, "--format", "invalidformat"], is_invalid=True
["repos", "find", "local", tmpdir, "--format", "invalidformat"],
is_invalid=True,
)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
@@ -55,7 +56,7 @@ def test_repos_find_non_git_repos():
"""
)
cmd = grm(["repos", "find", tmpdir])
cmd = grm(["repos", "find", "local", tmpdir])
assert cmd.returncode == 0
assert len(cmd.stdout) == 0
@@ -72,7 +73,7 @@ def test_repos_find(configtype, default):
mkdir repo1
(
cd ./repo1
git init
git -c init.defaultBranch=master init
echo test > test
git add test
git commit -m "commit1"
@@ -82,8 +83,8 @@ def test_repos_find(configtype, default):
mkdir repo2
(
cd ./repo2
git init
git co -b main
git -c init.defaultBranch=master init
git checkout -b main
echo test > test
git add test
git commit -m "commit1"
@@ -97,7 +98,7 @@ def test_repos_find(configtype, default):
"""
)
args = ["repos", "find", tmpdir]
args = ["repos", "find", "local", tmpdir]
if not default:
args += ["--format", configtype]
cmd = grm(args)
@@ -152,7 +153,7 @@ def test_repos_find(configtype, default):
def test_repos_find_in_root(configtype, default):
with TempGitRepository() as repo_dir:
args = ["repos", "find", repo_dir]
args = ["repos", "find", "local", repo_dir]
if not default:
args += ["--format", configtype]
cmd = grm(args)
@@ -202,7 +203,7 @@ def test_repos_find_with_invalid_repo(configtype, default):
mkdir repo1
(
cd ./repo1
git init
git -c init.defaultBranch=master init
echo test > test
git add test
git commit -m "commit1"
@@ -212,8 +213,8 @@ def test_repos_find_with_invalid_repo(configtype, default):
mkdir repo2
(
cd ./repo2
git init
git co -b main
git -c init.defaultBranch=master init
git checkout -b main
echo test > test
git add test
git commit -m "commit1"
@@ -227,7 +228,7 @@ def test_repos_find_with_invalid_repo(configtype, default):
"""
)
args = ["repos", "find", tmpdir]
args = ["repos", "find", "local", tmpdir]
if not default:
args += ["--format", configtype]
cmd = grm(args)

View File

@@ -0,0 +1,959 @@
#!/usr/bin/env python3
import re
import os
import toml
import pytest
import yaml
from helpers import *
ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"]
PROVIDERS = ["github", "gitlab"]
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_invalid_provider(use_config):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
"""
provider = "thisproviderdoesnotexist"
token_command = "true"
root = "/"
"""
)
args = ["repos", "find", "config", "--config", config.name]
cmd = grm(args, is_invalid=True)
else:
args = [
"repos",
"find",
"remote",
"--provider",
"thisproviderdoesnotexist",
"--token-command",
"true",
"--root",
"/",
]
cmd = grm(args, is_invalid=True)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
if not use_config:
assert re.match(".*isn't a valid value for.*provider", cmd.stderr)
@pytest.mark.parametrize("provider", PROVIDERS)
def test_repos_find_remote_invalid_format(provider):
cmd = grm(
[
"repos",
"find",
"remote",
"--provider",
provider,
"--format",
"invalidformat",
"--token-command",
"true",
"--root",
"/myroot",
],
is_invalid=True,
)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "isn't a valid value" in cmd.stderr
@pytest.mark.parametrize("provider", PROVIDERS)
def test_repos_find_remote_token_command_failed(provider):
cmd = grm(
[
"repos",
"find",
"remote",
"--provider",
provider,
"--format",
"yaml",
"--token-command",
"false",
"--root",
"/myroot",
],
is_invalid=True,
)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "token command failed" in cmd.stderr.lower()
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_wrong_token(provider, use_config):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
f"""
provider = "{provider}"
token_command = "echo wrongtoken"
root = "/myroot"
[filters]
access = true
"""
)
args = ["repos", "find", "config", "--config", config.name]
cmd = grm(args, is_invalid=True)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo wrongtoken",
"--root",
"/myroot",
"--access",
]
cmd = grm(args, is_invalid=True)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "bad credentials" in cmd.stderr.lower()
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
)
args = ["repos", "find", "config", "--config", config.name]
if not default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
]
if not default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 0
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_user_empty(
provider, configtype, configtype_default, use_config
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
[filters]
users = ["someotheruser"]
"""
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--user",
"someotheruser",
]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 0
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("use_owner", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
@pytest.mark.parametrize("override_remote_name", [True, False])
def test_repos_find_remote_user(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
use_owner,
force_ssh,
use_alternate_endpoint,
use_config,
override_remote_name,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if override_remote_name:
cfg += f'remote_name = "otherremote"\n'
if use_owner:
cfg += """
[filters]
owner = true\n
"""
else:
cfg += """
[filters]
users = ["myuser1"]\n
"""
print(cfg)
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
]
if use_owner:
args += ["--owner"]
else:
args += ["--user", "myuser1"]
if force_ssh:
args += ["--force-ssh"]
if override_remote_name:
args += ["--remote-name", "otherremote"]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 1
assert set(output["trees"][0].keys()) == {"root", "repos"}
assert isinstance(output["trees"][0]["repos"], list)
assert len(output["trees"][0]["repos"]) == 5
for i in range(1, 6):
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
0
]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
if override_remote_name:
assert repo["remotes"][0]["name"] == "otherremote"
else:
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_group_empty(
provider, configtype, configtype_default, use_alternate_endpoint, use_config
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
groups = ["someothergroup"]\n
"""
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
"someothergroup",
]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 0
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_group(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
groups = ["mygroup1"]\n
"""
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
"mygroup1",
]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if force_ssh:
args += ["--force-ssh"]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 1
assert set(output["trees"][0].keys()) == {"root", "repos"}
assert isinstance(output["trees"][0]["repos"], list)
assert len(output["trees"][0]["repos"]) == 5
for i in range(1, 6):
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
0
]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
if force_ssh or i == 1:
assert repo["remotes"][0]["name"] == "origin"
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert repo["remotes"][0]["name"] == "origin"
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("use_owner", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_user_and_group(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
use_owner,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
groups = ["mygroup1"]\n
"""
if use_owner:
cfg += "owner = true\n"
else:
cfg += 'users = ["myuser1"]\n'
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
"mygroup1",
]
if use_owner:
args += ["--owner"]
else:
args += ["--user", "myuser1"]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if force_ssh:
args += ["--force-ssh"]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 2
user_namespace = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
assert set(user_namespace.keys()) == {"root", "repos"}
assert isinstance(user_namespace["repos"], list)
assert len(user_namespace["repos"]) == 5
for i in range(1, 6):
repo = [r for r in user_namespace["repos"] if r["name"] == f"myproject{i}"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
group_namespace = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][0]
assert set(group_namespace.keys()) == {"root", "repos"}
assert isinstance(group_namespace["repos"], list)
assert len(group_namespace["repos"]) == 5
for i in range(1, 6):
repo = [r for r in group_namespace["repos"] if r["name"] == f"myproject{i}"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("with_user_filter", [True, False])
@pytest.mark.parametrize("with_group_filter", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_owner(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
with_user_filter,
with_group_filter,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
access = true\n
"""
if with_user_filter:
cfg += 'users = ["myuser1"]\n'
if with_group_filter:
cfg += 'groups = ["mygroup1"]\n'
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--access",
]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if with_user_filter:
args += ["--user", "myuser1"]
if with_group_filter:
args += ["--group", "mygroup1"]
if force_ssh:
args += ["--force-ssh"]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 4
user_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
assert set(user_namespace_1.keys()) == {"root", "repos"}
assert isinstance(user_namespace_1["repos"], list)
if with_user_filter:
assert len(user_namespace_1["repos"]) == 5
for i in range(1, 6):
repo = [
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
else:
assert len(user_namespace_1["repos"]) == 2
for i in range(1, 3):
repo = [
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
user_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/myuser2"][0]
assert set(user_namespace_2.keys()) == {"root", "repos"}
assert isinstance(user_namespace_2["repos"], list)
assert len(user_namespace_2["repos"]) == 1
repo = user_namespace_2["repos"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh:
assert (
repo["remotes"][0]["url"] == f"ssh://git@example.com/myuser2/myproject3.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"] == f"https://example.com/myuser2/myproject3.git"
)
assert repo["remotes"][0]["type"] == "https"
group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][
0
]
assert set(group_namespace_1.keys()) == {"root", "repos"}
assert isinstance(group_namespace_1["repos"], list)
if with_group_filter:
assert len(group_namespace_1["repos"]) == 5
for i in range(1, 6):
repo = [
r for r in group_namespace_1["repos"] if r["name"] == f"myproject{i}"
][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
else:
assert len(group_namespace_1["repos"]) == 1
repo = group_namespace_1["repos"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject4.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject4.git"
)
assert repo["remotes"][0]["type"] == "https"
group_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup2"][
0
]
assert set(group_namespace_2.keys()) == {"root", "repos"}
assert isinstance(group_namespace_2["repos"], list)
assert len(group_namespace_2["repos"]) == 1
repo = group_namespace_2["repos"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin"
if force_ssh:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup2/myproject5.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"] == f"https://example.com/mygroup2/myproject5.git"
)
assert repo["remotes"][0]["type"] == "https"

View File

@@ -133,6 +133,76 @@ templates = {
"""
),
},
"repo_in_subdirectory": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "outer/inner"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: outer/inner
remotes:
- name: origin
url: "file://{remote}"
type: "file"
"""
),
},
"nested_trees": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "outer"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees]]
root = "{root}/subdir"
[[trees.repos]]
name = "inner"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote2}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: outer
remotes:
- name: origin
url: "file://{remote1}"
type: "file"
- root: "{root}/subdir"
repos:
- name: inner
remotes:
- name: origin
url: "file://{remote2}"
type: "file"
"""
),
},
}
@@ -154,7 +224,7 @@ def test_repos_sync_config_is_valid_symlink(configtype):
subprocess.run(["cat", config.name])
cmd = grm(["repos", "sync", "--config", config_symlink])
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -174,7 +244,7 @@ def test_repos_sync_config_is_invalid_symlink():
config_symlink = os.path.join(config_dir, "cfglink")
os.symlink(nonexistent_dir, config_symlink)
cmd = grm(["repos", "sync", "--config", config_symlink])
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
@@ -185,7 +255,7 @@ def test_repos_sync_config_is_invalid_symlink():
def test_repos_sync_config_is_directory():
with tempfile.TemporaryDirectory() as config:
cmd = grm(["repos", "sync", "--config", config])
cmd = grm(["repos", "sync", "config", "--config", config])
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
@@ -197,12 +267,11 @@ def test_repos_sync_config_is_unreadable():
config_path = os.path.join(config_dir, "cfg")
open(config_path, "w")
os.chmod(config_path, 0o0000)
cmd = grm(["repos", "sync", "--config", config_path])
cmd = grm(["repos", "sync", "config", "--config", config_path])
assert os.path.exists(config_path)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "permission denied" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@@ -213,7 +282,7 @@ def test_repos_sync_unmanaged_repos(configtype):
with open(config.name, "w") as f:
f.write(templates["repo_simple"][configtype].format(root=root))
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(root, "test")
@@ -232,9 +301,8 @@ def test_repos_sync_root_is_file(configtype):
with open(config.name, "w") as f:
f.write(templates["repo_simple"][configtype].format(root=target.name))
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "not a directory" in cmd.stderr.lower()
@@ -251,7 +319,7 @@ def test_repos_sync_normal_clone(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -276,6 +344,84 @@ def test_repos_sync_normal_clone(configtype):
assert urls[0] == f"file://{remote2}"
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_repo_in_subdirectory(configtype):
with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote, remote_head_commit_sha):
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
templates["repo_in_subdirectory"][configtype].format(
root=target, remote=remote
)
)
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "outer", "inner")
assert os.path.exists(git_dir)
with git.Repo(git_dir) as repo:
assert not repo.bare
assert not repo.is_dirty()
assert set([str(r) for r in repo.remotes]) == {"origin"}
assert str(repo.active_branch) == "master"
assert str(repo.head.commit) == remote_head_commit_sha
assert len(repo.remotes) == 1
urls = list(repo.remote("origin").urls)
assert len(urls) == 1
assert urls[0] == f"file://{remote}"
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert not "found unmanaged repository" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_nested_clone(configtype):
with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
templates["nested_trees"][configtype].format(
root=target, remote1=remote1, remote2=remote2
)
)
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
def validate(git_dir, sha, remote):
assert os.path.exists(git_dir)
with git.Repo(git_dir) as repo:
assert not repo.bare
assert not repo.is_dirty()
assert set([str(r) for r in repo.remotes]) == {"origin"}
assert str(repo.active_branch) == "master"
assert str(repo.head.commit) == sha
assert len(repo.remotes) == 1
urls = list(repo.remote("origin").urls)
assert len(urls) == 1
assert urls[0] == f"file://{remote}"
validate(
os.path.join(target, "outer"), remote1_head_commit_sha, remote1
)
validate(
os.path.join(target, "subdir", "inner"),
remote2_head_commit_sha,
remote2,
)
cmd = grm(["repos", "sync", "config", "--config", config.name])
print(cmd.stdout)
print(cmd.stderr)
assert not "found unmanaged repository" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_init(configtype):
with tempfile.TemporaryDirectory() as target:
@@ -283,7 +429,7 @@ def test_repos_sync_normal_init(configtype):
with open(config.name, "w") as f:
f.write(templates["repo_simple"][configtype].format(root=target))
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -309,7 +455,7 @@ def test_repos_sync_normal_add_remote(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -329,7 +475,7 @@ def test_repos_sync_normal_add_remote(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
with git.Repo(git_dir) as repo:
assert set([str(r) for r in repo.remotes]) == {
@@ -359,7 +505,7 @@ def test_repos_sync_normal_remove_remote(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -382,7 +528,7 @@ def test_repos_sync_normal_remove_remote(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
shell(f"cd {git_dir} && git remote -v")
with git.Repo(git_dir) as repo:
@@ -424,7 +570,7 @@ def test_repos_sync_normal_change_remote_url(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -444,7 +590,7 @@ def test_repos_sync_normal_change_remote_url(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
with git.Repo(git_dir) as repo:
assert set([str(r) for r in repo.remotes]) == {"origin"}
@@ -467,7 +613,7 @@ def test_repos_sync_normal_change_remote_name(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -487,7 +633,7 @@ def test_repos_sync_normal_change_remote_name(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
with git.Repo(git_dir) as repo:
# See the note in `test_repos_sync_normal_remove_remote()`
@@ -501,7 +647,8 @@ def test_repos_sync_normal_change_remote_name(configtype):
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_worktree_clone(configtype):
@pytest.mark.parametrize("init_worktree", [True, False, "default"])
def test_repos_sync_worktree_clone(configtype, init_worktree):
with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote, head_commit_sha):
with tempfile.NamedTemporaryFile() as config:
@@ -512,13 +659,28 @@ def test_repos_sync_worktree_clone(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
args = ["repos", "sync", "config", "--config", config.name]
if init_worktree is True:
args.append("--init-worktree=true")
if init_worktree is False:
args.append("--init-worktree=false")
for i in [1, 2]:
cmd = grm(args)
assert cmd.returncode == 0
worktree_dir = f"{target}/test"
assert os.path.exists(worktree_dir)
assert set(os.listdir(worktree_dir)) == {".git-main-working-tree"}
if init_worktree is True or init_worktree == "default":
assert set(os.listdir(worktree_dir)) == {
".git-main-working-tree",
"master",
}
else:
assert set(os.listdir(worktree_dir)) == {
".git-main-working-tree"
}
with git.Repo(
os.path.join(worktree_dir, ".git-main-working-tree")
@@ -538,7 +700,7 @@ def test_repos_sync_worktree_init(configtype):
templates["worktree_repo_simple"][configtype].format(root=target)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
worktree_dir = f"{target}/test"
@@ -573,7 +735,7 @@ def test_repos_sync_invalid_syntax(configtype):
)
else:
raise NotImplementedError()
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0
@@ -590,11 +752,11 @@ def test_repos_sync_unchanged(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
before = checksum_directory(target)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
after = checksum_directory(target)
assert cmd.returncode == 0
@@ -614,7 +776,7 @@ def test_repos_sync_normal_change_to_worktree(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -626,7 +788,7 @@ def test_repos_sync_normal_change_to_worktree(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0
assert "already exists" in cmd.stderr
assert "not using a worktree setup" in cmd.stderr
@@ -645,7 +807,7 @@ def test_repos_sync_worktree_change_to_normal(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
@@ -657,7 +819,7 @@ def test_repos_sync_worktree_change_to_normal(configtype):
)
)
cmd = grm(["repos", "sync", "--config", config.name])
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0
assert "already exists" in cmd.stderr
assert "using a worktree setup" in cmd.stderr

View File

@@ -6,7 +6,7 @@ from helpers import *
def test_worktree_clean():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
assert "test" in os.listdir(base_dir)
@@ -17,7 +17,7 @@ def test_worktree_clean():
def test_worktree_clean_refusal_no_tracking_branch():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -31,7 +31,7 @@ def test_worktree_clean_refusal_no_tracking_branch():
def test_worktree_clean_refusal_uncommited_changes_new_file():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -47,7 +47,7 @@ def test_worktree_clean_refusal_uncommited_changes_new_file():
def test_worktree_clean_refusal_uncommited_changes_changed_file():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -63,7 +63,7 @@ def test_worktree_clean_refusal_uncommited_changes_changed_file():
def test_worktree_clean_refusal_uncommited_changes_cleand_file():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -81,7 +81,7 @@ def test_worktree_clean_refusal_uncommited_changes_cleand_file():
def test_worktree_clean_refusal_commited_changes():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -99,7 +99,7 @@ def test_worktree_clean_refusal_commited_changes():
def test_worktree_clean_refusal_tracking_branch_mismatch():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -117,7 +117,7 @@ def test_worktree_clean_refusal_tracking_branch_mismatch():
def test_worktree_clean_fail_from_subdir():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -148,7 +148,7 @@ def test_worktree_clean_non_git():
def test_worktree_clean_configured_default_branch(
configure_default_branch, branch_list_empty
):
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
if configure_default_branch:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
if branch_list_empty:

View File

@@ -6,7 +6,7 @@ from helpers import *
def test_worktree_never_clean_persistent_branches():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
"""
@@ -33,7 +33,7 @@ def test_worktree_never_clean_persistent_branches():
def test_worktree_clean_branch_merged_into_persistent():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
"""
@@ -72,7 +72,7 @@ def test_worktree_clean_branch_merged_into_persistent():
def test_worktree_no_clean_unmerged_branch():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
"""
@@ -105,7 +105,7 @@ def test_worktree_no_clean_unmerged_branch():
def test_worktree_delete_branch_merged_into_persistent():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
"""

View File

@@ -23,7 +23,7 @@ def test_convert():
def test_convert_already_worktree():
with TempGitRepositoryWorktree() as (git_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (git_dir, _commit):
before = checksum_directory(git_dir)
cmd = grm(["wt", "convert"], cwd=git_dir)

View File

@@ -9,7 +9,7 @@ import git
def test_worktree_fetch():
with TempGitRepositoryWorktree() as (base_dir, root_commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit):
with TempGitFileRemote() as (remote_path, _remote_sha):
shell(
f"""
@@ -56,7 +56,7 @@ def test_worktree_fetch():
@pytest.mark.parametrize("has_changes", [True, False])
@pytest.mark.parametrize("stash", [True, False])
def test_worktree_pull(rebase, ffable, has_changes, stash):
with TempGitRepositoryWorktree() as (base_dir, root_commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit):
with TempGitFileRemote() as (remote_path, _remote_sha):
shell(
f"""

View File

@@ -14,7 +14,7 @@ import git
@pytest.mark.parametrize("has_changes", [True, False])
@pytest.mark.parametrize("stash", [True, False])
def test_worktree_rebase(pull, rebase, ffable, has_changes, stash):
with TempGitRepositoryWorktree() as (base_dir, _root_commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _root_commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write('persistent_branches = ["mybasebranch"]')

View File

@@ -1,10 +1,18 @@
#!/usr/bin/env python3
import re
from helpers import *
import pytest
def test_worktree_status():
with TempGitRepositoryWorktree() as (base_dir, _commit):
@pytest.mark.parametrize("has_config", [True, False])
def test_worktree_status(has_config):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
if has_config:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write("")
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -16,7 +24,7 @@ def test_worktree_status():
def test_worktree_status_fail_from_subdir():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
@@ -40,3 +48,30 @@ def test_worktree_status_non_git():
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert len(cmd.stderr) != 0
def test_worktree_status_warn_with_non_worktree_dir():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
shell(
f"""
cd {base_dir}
mkdir not_a_worktree
"""
)
cmd = grm(["wt", "status"], cwd=base_dir)
assert cmd.returncode == 0
assert len(cmd.stdout) != 0
assert len(cmd.stderr) != 0
assert (
re.match(
".*error.*not_a_worktree.*not a valid worktree directory",
cmd.stderr,
re.IGNORECASE,
)
is not None
)

View File

@@ -4,96 +4,565 @@ from helpers import *
import git
import pytest
import datetime
import os.path
@pytest.mark.parametrize(
"config_setup",
(
(False, False, False),
(True, False, False),
(True, False, True),
(True, True, False),
(True, True, True),
),
)
@pytest.mark.parametrize("explicit_notrack", [True, False])
@pytest.mark.parametrize("explicit_track", [True, False])
@pytest.mark.parametrize(
"local_branch_setup", ((False, False), (True, False), (True, True))
)
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
@pytest.mark.parametrize("has_config", [True, False])
@pytest.mark.parametrize("has_default", [True, False])
@pytest.mark.parametrize("has_prefix", [True, False])
def test_worktree_add_simple(
remote_branch_already_exists, has_config, has_default, has_prefix
@pytest.mark.parametrize("remote_branch_with_prefix_already_exists", [True, False])
@pytest.mark.parametrize(
"remote_setup",
(
(0, "origin", False),
(1, "origin", False),
(2, "origin", False),
(2, "otherremote", False),
(2, "origin", True),
(2, "otherremote", True),
),
)
@pytest.mark.parametrize("track_differs_from_existing_branch_upstream", [True, False])
@pytest.mark.parametrize("worktree_with_slash", [True, False])
def test_worktree_add(
config_setup,
explicit_notrack,
explicit_track,
local_branch_setup,
remote_branch_already_exists,
remote_branch_with_prefix_already_exists,
remote_setup,
track_differs_from_existing_branch_upstream,
worktree_with_slash,
):
with TempGitRepositoryWorktree() as (base_dir, _commit):
if has_config:
(remote_count, default_remote, remotes_differ) = remote_setup
(
config_enabled,
config_has_default_remote_prefix,
config_has_default_track_enabled,
) = config_setup
(local_branch_exists, local_branch_has_tracking_branch) = local_branch_setup
has_remotes = True if remote_count > 0 else False
if worktree_with_slash:
worktree_name = "dir/nested/test"
else:
worktree_name = "test"
if track_differs_from_existing_branch_upstream:
explicit_track_branch_name = f"{default_remote}/somethingelse"
else:
explicit_track_branch_name = f"{default_remote}/{worktree_name}"
timestamp = datetime.datetime.now().replace(microsecond=0).isoformat()
# GitPython has some weird behavior here. It is not possible to use kwargs
# to set the commit and author date.
#
# `committer_date=x` (which is documented) does not work, as `git commit`
# does not accept --committer-date
#
# `author_date=x` does not work, as it's now called --date in `git commit`
#
# `date=x` should work, but is refused by GitPython, as it does not know
# about the new behavior in `git commit`
#
# Fortunately, there are env variables that control those timestamps.
os.environ["GIT_COMMITTER_DATE"] = str(timestamp)
os.environ["GIT_AUTHOR_DATE"] = str(timestamp)
def setup_remote1(directory):
if remote_branch_already_exists:
with tempfile.TemporaryDirectory() as cloned:
repo = git.Repo.clone_from(directory, cloned)
newfile = os.path.join(cloned, "change")
open(newfile, "w").close()
repo.index.add([newfile])
repo.index.commit("commit")
repo.remotes.origin.push(f"HEAD:{worktree_name}", force=True)
if remote_branch_with_prefix_already_exists:
with tempfile.TemporaryDirectory() as cloned:
repo = git.Repo.clone_from(directory, cloned)
newfile = os.path.join(cloned, "change2")
open(newfile, "w").close()
repo.index.add([newfile])
repo.index.commit("commit")
repo.remotes.origin.push(f"HEAD:myprefix/{worktree_name}", force=True)
return "_".join(
[
str(worktree_with_slash),
str(remote_branch_already_exists),
str(remote_branch_with_prefix_already_exists),
str(remotes_differ),
]
)
def setup_remote2(directory):
if remote_branch_already_exists:
with tempfile.TemporaryDirectory() as cloned:
repo = git.Repo.clone_from(directory, cloned)
newfile = os.path.join(cloned, "change")
open(newfile, "w").close()
repo.index.add([newfile])
repo.index.commit("commit")
if remotes_differ:
newfile = os.path.join(cloned, "change_on_second_remote")
open(newfile, "w").close()
repo.index.add([newfile])
repo.index.commit("commit_on_second_remote")
repo.remotes.origin.push(f"HEAD:{worktree_name}", force=True)
if remote_branch_with_prefix_already_exists:
with tempfile.TemporaryDirectory() as cloned:
repo = git.Repo.clone_from(directory, cloned)
newfile = os.path.join(cloned, "change2")
open(newfile, "w").close()
repo.index.add([newfile])
repo.index.commit("commit")
if remotes_differ:
newfile = os.path.join(cloned, "change_on_second_remote2")
open(newfile, "w").close()
repo.index.add([newfile])
repo.index.commit("commit_on_second_remote2")
repo.remotes.origin.push(f"HEAD:myprefix/{worktree_name}", force=True)
return "_".join(
[
str(worktree_with_slash),
str(remote_branch_already_exists),
str(remote_branch_with_prefix_already_exists),
str(remotes_differ),
]
)
cachefn = lambda nr: "_".join(
[
str(nr),
str(default_remote),
str(local_branch_exists),
str(remote_branch_already_exists),
str(remote_branch_with_prefix_already_exists),
str(remote_count),
str(remotes_differ),
str(worktree_name),
]
)
remote1_cache_key = cachefn(1)
remote2_cache_key = cachefn(2)
cachekey = "_".join(
[
str(local_branch_exists),
str(local_branch_has_tracking_branch),
str(remote_branch_already_exists),
str(remote_branch_with_prefix_already_exists),
str(remote_count),
str(remotes_differ),
str(worktree_name),
]
)
with TempGitRepositoryWorktree.get(
cachekey=cachekey,
branch=worktree_name if local_branch_exists else None,
remotes=remote_count,
remote_setup=[
[remote1_cache_key, setup_remote1],
[remote2_cache_key, setup_remote2],
],
) as (base_dir, initial_commit):
repo = git.Repo(os.path.join(base_dir, ".git-main-working-tree"))
if config_enabled:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
f"""
[track]
default = {str(has_default).lower()}
default_remote = "origin"
default = {str(config_has_default_track_enabled).lower()}
default_remote = "{default_remote}"
"""
)
if has_prefix:
if config_has_default_remote_prefix:
f.write(
"""
default_remote_prefix = "myprefix"
"""
)
if remote_branch_already_exists:
shell(
f"""
cd {base_dir}
git --git-dir ./.git-main-working-tree worktree add tmp
(
cd tmp
touch change
git add change
git commit -m commit
git push origin HEAD:test
#git reset --hard 'HEAD@{1}'
git branch -va
if local_branch_exists:
if has_remotes and local_branch_has_tracking_branch:
origin = repo.remote(default_remote)
if remote_count >= 2:
otherremote = repo.remote("otherremote")
br = list(filter(lambda x: x.name == worktree_name, repo.branches))[0]
assert os.path.exists(base_dir)
if track_differs_from_existing_branch_upstream:
origin.push(
f"{worktree_name}:someothername", force=True, set_upstream=True
)
git --git-dir ./.git-main-working-tree worktree remove tmp
"""
if remote_count >= 2:
otherremote.push(
f"{worktree_name}:someothername",
force=True,
set_upstream=True,
)
cmd = grm(["wt", "add", "test"], cwd=base_dir)
br.set_tracking_branch(
list(
filter(
lambda x: x.remote_head == "someothername", origin.refs
)
)[0]
)
else:
origin.push(
f"{worktree_name}:{worktree_name}",
force=True,
set_upstream=True,
)
if remote_count >= 2:
otherremote.push(
f"{worktree_name}:{worktree_name}",
force=True,
set_upstream=True,
)
br.set_tracking_branch(
list(
filter(
lambda x: x.remote_head == worktree_name, origin.refs
)
)[0]
)
args = ["wt", "add", worktree_name]
if explicit_track:
args.extend(["--track", explicit_track_branch_name])
if explicit_notrack:
args.extend(["--no-track"])
cmd = grm(args, cwd=base_dir)
if explicit_track and not explicit_notrack and not has_remotes:
assert cmd.returncode != 0
assert f'remote "{default_remote}" not found' in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stdout.strip().split("\n")) == 1
assert f"worktree {worktree_name} created" in cmd.stdout.lower()
def check_deviation_error(base):
if (
not local_branch_exists
and (explicit_notrack or (not explicit_notrack and not explicit_track))
and (
remote_branch_already_exists
or (
config_enabled
and config_has_default_remote_prefix
and remote_branch_with_prefix_already_exists
)
)
and remote_count >= 2
and remotes_differ
):
assert (
f"branch exists on multiple remotes, but they deviate"
in cmd.stderr.lower()
)
assert len(cmd.stderr.strip().split("\n")) == base + 1
else:
if base == 0:
assert len(cmd.stderr) == base
else:
assert len(cmd.stderr.strip().split("\n")) == base
if explicit_track and explicit_notrack:
assert "--track will be ignored" in cmd.stderr.lower()
check_deviation_error(1)
else:
check_deviation_error(0)
files = os.listdir(base_dir)
if has_config is True:
if config_enabled is True:
if worktree_with_slash:
assert set(files) == {".git-main-working-tree", "grm.toml", "dir"}
else:
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
assert len(files) == 3
if worktree_with_slash:
assert set(files) == {".git-main-working-tree", "grm.toml", "dir"}
assert set(os.listdir(os.path.join(base_dir, "dir"))) == {"nested"}
assert set(os.listdir(os.path.join(base_dir, "dir/nested"))) == {"test"}
else:
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
else:
assert len(files) == 2
if worktree_with_slash:
assert set(files) == {".git-main-working-tree", "dir"}
assert set(os.listdir(os.path.join(base_dir, "dir"))) == {"nested"}
assert set(os.listdir(os.path.join(base_dir, "dir/nested"))) == {"test"}
else:
assert set(files) == {".git-main-working-tree", "test"}
repo = git.Repo(os.path.join(base_dir, "test"))
repo = git.Repo(os.path.join(base_dir, worktree_name))
assert not repo.bare
assert not repo.is_dirty()
if has_config and has_default:
if has_prefix and not remote_branch_already_exists:
# assert not repo.is_dirty()
assert str(repo.head.ref) == worktree_name
local_commit = repo.head.commit.hexsha
if not has_remotes:
assert local_commit == initial_commit
elif local_branch_exists:
assert local_commit == initial_commit
elif explicit_track and not explicit_notrack:
assert local_commit == repo.commit(explicit_track_branch_name).hexsha
elif explicit_notrack:
if config_enabled and config_has_default_remote_prefix:
if remote_branch_with_prefix_already_exists:
assert (
str(repo.active_branch.tracking_branch()) == "origin/myprefix/test"
local_commit
== repo.commit(
f"{default_remote}/myprefix/{worktree_name}"
).hexsha
)
elif remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert str(repo.active_branch.tracking_branch()) == "origin/test"
assert local_commit == initial_commit
elif remote_count == 1:
if config_enabled and config_has_default_remote_prefix:
if remote_branch_with_prefix_already_exists:
assert (
local_commit
== repo.commit(
f"{default_remote}/myprefix/{worktree_name}"
).hexsha
)
elif remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert local_commit == initial_commit
elif remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert local_commit == initial_commit
elif remotes_differ:
if config_enabled: # we have a default remote
if (
config_has_default_remote_prefix
and remote_branch_with_prefix_already_exists
):
assert (
local_commit
== repo.commit(
f"{default_remote}/myprefix/{worktree_name}"
).hexsha
)
elif remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert local_commit == initial_commit
else:
assert local_commit == initial_commit
else:
if config_enabled and config_has_default_remote_prefix:
if remote_branch_with_prefix_already_exists:
assert (
local_commit
== repo.commit(
f"{default_remote}/myprefix/{worktree_name}"
).hexsha
)
elif remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert local_commit == initial_commit
elif config_enabled:
if not config_has_default_remote_prefix:
if config_has_default_track_enabled:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
if remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert local_commit == initial_commit
else:
if remote_branch_with_prefix_already_exists:
assert (
local_commit
== repo.commit(
f"{default_remote}/myprefix/{worktree_name}"
).hexsha
)
elif remote_branch_already_exists:
assert (
local_commit
== repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
elif config_has_default_track_enabled:
assert (
local_commit
== repo.commit(
f"{default_remote}/myprefix/{worktree_name}"
).hexsha
)
else:
assert local_commit == initial_commit
elif remote_branch_already_exists and not remotes_differ:
assert (
local_commit == repo.commit(f"{default_remote}/{worktree_name}").hexsha
)
else:
assert local_commit == initial_commit
# Check whether tracking is ok
if not has_remotes:
assert repo.active_branch.tracking_branch() is None
elif explicit_notrack:
if local_branch_exists and local_branch_has_tracking_branch:
if track_differs_from_existing_branch_upstream:
assert (
str(repo.active_branch.tracking_branch())
== f"{default_remote}/someothername"
)
else:
assert (
str(repo.active_branch.tracking_branch())
== f"{default_remote}/{worktree_name}"
)
else:
assert repo.active_branch.tracking_branch() is None
elif explicit_track:
assert (
str(repo.active_branch.tracking_branch()) == explicit_track_branch_name
)
elif config_enabled and config_has_default_track_enabled:
if config_has_default_remote_prefix:
assert (
str(repo.active_branch.tracking_branch())
== f"{default_remote}/myprefix/{worktree_name}"
)
else:
assert (
str(repo.active_branch.tracking_branch())
== f"{default_remote}/{worktree_name}"
)
elif local_branch_exists and local_branch_has_tracking_branch:
if track_differs_from_existing_branch_upstream:
assert (
str(repo.active_branch.tracking_branch())
== f"{default_remote}/someothername"
)
else:
assert (
str(repo.active_branch.tracking_branch())
== f"{default_remote}/{worktree_name}"
)
else:
assert repo.active_branch.tracking_branch() is None
def test_worktree_add_into_subdirectory():
with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "dir/test"], cwd=base_dir)
def test_worktree_add_invalid_name():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
for worktree_name in [
"/absolute/path",
"trailingslash/",
"with spaces",
"with\t tabs",
"with\nnewline",
]:
args = ["wt", "add", worktree_name]
cmd = grm(args, cwd=base_dir)
assert cmd.returncode != 0
assert not os.path.exists(worktree_name)
assert not os.path.exists(os.path.join(base_dir, worktree_name))
assert "invalid worktree name" in str(cmd.stderr.lower())
def test_worktree_add_invalid_track():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
for track in ["/absolute/path", "trailingslash/", "/"]:
args = ["wt", "add", "foo", "--track", track]
cmd = grm(args, cwd=base_dir)
assert cmd.returncode != 0
assert len(cmd.stderr.strip().split("\n")) == 1
assert not os.path.exists("foo")
assert not os.path.exists(os.path.join(base_dir, "foo"))
assert "tracking branch" in str(cmd.stderr.lower())
@pytest.mark.parametrize("use_track", [True, False])
@pytest.mark.parametrize("use_configuration", [True, False])
@pytest.mark.parametrize("use_configuration_default", [True, False])
def test_worktree_add_invalid_remote_name(
use_track, use_configuration, use_configuration_default
):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
if use_configuration:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
f"""
[track]
default = {str(use_configuration_default).lower()}
default_remote = "thisremotedoesnotexist"
"""
)
args = ["wt", "add", "foo"]
if use_track:
args.extend(["--track", "thisremotedoesnotexist/master"])
cmd = grm(args, cwd=base_dir)
if use_track or (use_configuration and use_configuration_default):
assert cmd.returncode != 0
assert "thisremotedoesnotexist" in cmd.stderr
else:
assert cmd.returncode == 0
files = os.listdir(base_dir)
assert len(files) == 2
assert set(files) == {".git-main-working-tree", "dir"}
files = os.listdir(os.path.join(base_dir, "dir"))
assert set(files) == {"test"}
repo = git.Repo(os.path.join(base_dir, "dir", "test"))
assert not repo.bare
assert not repo.is_dirty()
assert repo.active_branch.tracking_branch() is None
assert len(cmd.stderr) == 0
def test_worktree_add_into_invalid_subdirectory():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "/dir/test"], cwd=base_dir)
assert cmd.returncode == 1
assert "dir" not in os.listdir(base_dir)
@@ -104,200 +573,57 @@ def test_worktree_add_into_invalid_subdirectory():
assert "dir" not in os.listdir(base_dir)
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
@pytest.mark.parametrize("has_config", [True, False])
@pytest.mark.parametrize("has_default", [True, False])
@pytest.mark.parametrize("has_prefix", [True, False])
def test_worktree_add_with_tracking(
remote_branch_already_exists, has_config, has_default, has_prefix
):
with TempGitRepositoryWorktree() as (base_dir, _commit):
if has_config:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
f"""
[track]
default = {str(has_default).lower()}
default_remote = "origin"
"""
)
if has_prefix:
f.write(
"""
default_remote_prefix = "myprefix"
"""
)
if remote_branch_already_exists:
shell(
f"""
cd {base_dir}
git --git-dir ./.git-main-working-tree worktree add tmp
(
cd tmp
touch change
git add change
git commit -m commit
git push origin HEAD:test
#git reset --hard 'HEAD@{1}'
git branch -va
)
git --git-dir ./.git-main-working-tree worktree remove tmp
"""
)
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
print(cmd.stderr)
assert cmd.returncode == 0
files = os.listdir(base_dir)
if has_config is True:
assert len(files) == 3
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
else:
assert len(files) == 2
assert set(files) == {".git-main-working-tree", "test"}
repo = git.Repo(os.path.join(base_dir, "test"))
assert not repo.bare
assert not repo.is_dirty()
assert str(repo.active_branch) == "test"
assert str(repo.active_branch.tracking_branch()) == "origin/test"
@pytest.mark.parametrize("has_config", [True, False])
@pytest.mark.parametrize("has_default", [True, False])
@pytest.mark.parametrize("has_prefix", [True, False])
@pytest.mark.parametrize("track", [True, False])
def test_worktree_add_with_explicit_no_tracking(
has_config, has_default, has_prefix, track
):
with TempGitRepositoryWorktree() as (base_dir, _commit):
if has_config:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
f"""
[track]
default = {str(has_default).lower()}
default_remote = "origin"
"""
)
if has_prefix:
f.write(
"""
default_remote_prefix = "myprefix"
"""
)
if track is True:
cmd = grm(
["wt", "add", "test", "--track", "origin/test", "--no-track"],
cwd=base_dir,
)
else:
cmd = grm(["wt", "add", "test", "--no-track"], cwd=base_dir)
print(cmd.stderr)
assert cmd.returncode == 0
files = os.listdir(base_dir)
if has_config is True:
assert len(files) == 3
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
else:
assert len(files) == 2
assert set(files) == {".git-main-working-tree", "test"}
repo = git.Repo(os.path.join(base_dir, "test"))
assert not repo.bare
assert not repo.is_dirty()
assert str(repo.active_branch) == "test"
assert repo.active_branch.tracking_branch() is None
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
@pytest.mark.parametrize("has_default", [True, False])
@pytest.mark.parametrize("has_prefix", [True, False])
def test_worktree_add_with_config(
remote_branch_already_exists, has_default, has_prefix
):
with TempGitRepositoryWorktree() as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write(
f"""
[track]
default = {str(has_default).lower()}
default_remote = "origin"
"""
)
if has_prefix:
f.write(
"""
default_remote_prefix = "myprefix"
"""
)
if remote_branch_already_exists:
shell(
f"""
cd {base_dir}
git --git-dir ./.git-main-working-tree worktree add tmp
(
cd tmp
touch change
git add change
git commit -m commit
git push origin HEAD:test
#git reset --hard 'HEAD@{1}'
git branch -va
)
git --git-dir ./.git-main-working-tree worktree remove tmp
"""
)
cmd = grm(["wt", "add", "test"], cwd=base_dir)
print(cmd.stderr)
assert cmd.returncode == 0
files = os.listdir(base_dir)
assert len(files) == 3
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
repo = git.Repo(os.path.join(base_dir, "test"))
assert not repo.bare
assert not repo.is_dirty()
assert str(repo.active_branch) == "test"
if has_default:
if has_prefix and not remote_branch_already_exists:
assert (
str(repo.active_branch.tracking_branch()) == "origin/myprefix/test"
)
else:
assert str(repo.active_branch.tracking_branch()) == "origin/test"
else:
assert repo.active_branch.tracking_branch() is None
def test_worktree_delete():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
assert "test" in os.listdir(base_dir)
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode == 0
assert len(cmd.stdout.strip().split("\n")) == 1
assert "test" not in os.listdir(base_dir)
cmd = grm(["wt", "add", "check"], cwd=base_dir)
assert cmd.returncode == 0
repo = git.Repo(os.path.join(base_dir, ".git-main-working-tree"))
print(repo.branches)
assert "test" not in [str(b) for b in repo.branches]
@pytest.mark.parametrize("has_other_worktree", [True, False])
def test_worktree_delete_in_subfolder(has_other_worktree):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "dir/test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
assert "dir" in os.listdir(base_dir)
if has_other_worktree is True:
cmd = grm(
["wt", "add", "dir/test2", "--track", "origin/test"], cwd=base_dir
)
assert cmd.returncode == 0
assert {"test", "test2"} == set(os.listdir(os.path.join(base_dir, "dir")))
else:
assert {"test"} == set(os.listdir(os.path.join(base_dir, "dir")))
cmd = grm(["wt", "delete", "dir/test"], cwd=base_dir)
assert cmd.returncode == 0
assert len(cmd.stdout.strip().split("\n")) == 1
if has_other_worktree is True:
assert {"test2"} == set(os.listdir(os.path.join(base_dir, "dir")))
else:
assert "dir" not in os.listdir(base_dir)
def test_worktree_delete_refusal_no_tracking_branch():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
before = checksum_directory(f"{base_dir}/test")
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
stderr = cmd.stderr.lower()
assert "refuse" in stderr or "refusing" in stderr
assert "test" in os.listdir(base_dir)
@@ -306,94 +632,45 @@ def test_worktree_delete_refusal_no_tracking_branch():
assert before == after
def test_worktree_delete_refusal_uncommited_changes_new_file():
with TempGitRepositoryWorktree() as (base_dir, _commit):
@pytest.mark.parametrize(
"reason",
(
"new_file",
"changed_file",
"deleted_file",
"new_commit",
"tracking_branch_mismatch",
),
)
def test_worktree_delete_refusal(reason):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
if reason == "new_file":
shell(f"cd {base_dir}/test && touch changed_file")
before = checksum_directory(f"{base_dir}/test")
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode != 0
stderr = cmd.stderr.lower()
assert "refuse" in stderr or "refusing" in stderr
assert "test" in os.listdir(base_dir)
after = checksum_directory(f"{base_dir}/test")
assert before == after
def test_worktree_delete_refusal_uncommited_changes_changed_file():
with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
shell(f"cd {base_dir}/test && git ls-files | shuf | head | xargs rm -rf")
before = checksum_directory(f"{base_dir}/test")
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode != 0
stderr = cmd.stderr.lower()
assert "refuse" in stderr or "refusing" in stderr
assert "test" in os.listdir(base_dir)
after = checksum_directory(f"{base_dir}/test")
assert before == after
def test_worktree_delete_refusal_uncommited_changes_deleted_file():
with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
elif reason == "changed_file":
shell(
f"cd {base_dir}/test && git ls-files | shuf | head | while read f ; do echo $RANDOM > $f ; done"
)
before = checksum_directory(f"{base_dir}/test")
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode != 0
stderr = cmd.stderr.lower()
assert "refuse" in stderr or "refusing" in stderr
assert "test" in os.listdir(base_dir)
after = checksum_directory(f"{base_dir}/test")
assert before == after
def test_worktree_delete_refusal_commited_changes():
with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
elif reason == "deleted_file":
shell(f"cd {base_dir}/test && git ls-files | shuf | head | xargs rm -rf")
elif reason == "new_commit":
shell(
f'cd {base_dir}/test && touch changed_file && git add changed_file && git commit -m "commitmsg"'
)
before = checksum_directory(f"{base_dir}/test")
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode != 0
stderr = cmd.stderr.lower()
assert "refuse" in stderr or "refusing" in stderr
assert "test" in os.listdir(base_dir)
after = checksum_directory(f"{base_dir}/test")
assert before == after
def test_worktree_delete_refusal_tracking_branch_mismatch():
with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
elif reason == "tracking_branch_mismatch":
shell(
f"cd {base_dir}/test && git push origin test && git reset --hard origin/test^"
)
else:
raise NotImplementedError()
before = checksum_directory(f"{base_dir}/test")
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
stderr = cmd.stderr.lower()
assert "refuse" in stderr or "refusing" in stderr
assert "test" in os.listdir(base_dir)
@@ -403,17 +680,18 @@ def test_worktree_delete_refusal_tracking_branch_mismatch():
def test_worktree_delete_force_refusal():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
cmd = grm(["wt", "delete", "test", "--force"], cwd=base_dir)
assert cmd.returncode == 0
assert len(cmd.stdout.strip().split("\n")) == 1
assert "test" not in os.listdir(base_dir)
def test_worktree_add_delete_add():
with TempGitRepositoryWorktree() as (base_dir, _commit):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0
assert "test" in os.listdir(base_dir)

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
set -o nounset
set -o errexit
# shellcheck disable=SC1091
source ./venv/bin/activate
pip --disable-pip-version-check install -r ./requirements.txt
pip3 list --outdated --format=freeze | grep -v '^\-e' | cut -d = -f 1 | while read -r package ; do
[[ "$package" == "pip" ]] && continue
[[ "$package" == "setuptools" ]] && continue
pip install --upgrade "${package}"
version="$(pip show "${package}" | grep '^Version' | cut -d ' ' -f 2)"
message="e2e_tests/pip: Update ${package} to ${version}"
pip freeze | grep -v '^pkg_resources' > requirements.txt
git add ./requirements.txt
git commit --message "${message}"
done

163
release.sh Executable file
View File

@@ -0,0 +1,163 @@
#!/usr/bin/env bash
set -o nounset
set -o errexit
set -o pipefail
usage() {
printf '%s\n' "usage: $0 (major|minor|patch)" >&2
}
if (($# != 1)); then
usage
exit 1
fi
current_version="$(grep '^version \?=' Cargo.toml | head -1 | cut -d '=' -f 2 | tr -d " '"'"')"
major="$(printf '%s' "${current_version}" | grep -oP '^\d+')"
minor="$(printf '%s' "${current_version}" | grep -oP '\.\d+\.' | tr -d '.')"
patch="$(printf '%s' "${current_version}" | grep -oP '\d+$' | tr -d '.')"
case "$1" in
major)
((major++)) || true
minor=0
patch=0
printf '%s\n' "Are you sure you want to release 1.x?" >&2
exit 1
;;
minor)
((minor++)) || true
patch=0
;;
patch)
((patch++)) || true
;;
*)
usage
exit 1
;;
esac
new_version="${major}.${minor}.${patch}"
if ! [[ "${new_version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
printf '%s\n' 'Version has to a complete semver' >&2
exit 1
fi
current_branch="$(git rev-parse --abbrev-ref HEAD)"
if [[ "${current_branch}" != "develop" ]]; then
printf '%s\n' 'You need to be on develop' >&2
exit 1
fi
gitstatus="$(git status --porcelain)"
if [[ -n "${gitstatus}" ]]; then
printf '%s\n' 'There are uncommitted changes' >&2
exit 1
fi
if git tag --list "v${new_version}" | grep -q .; then
printf 'Tag %s already exists\n' "v${new_version}" >&2
exit 1
fi
for remote in $(git remote); do
if git ls-remote --tags "${remote}" | grep -q "refs/tags/v${new_version}$"; then
printf 'Tag %s already exists on %s' "v${new_version}" "${remote}" >&2
exit 1
fi
done
git fetch --all
for remote in $(git remote); do
for branch in master develop; do
if ! git diff --quiet "${remote}/${branch}..${branch}"; then
printf 'Remote branch %s/%s not up to date, synchronize first!\n' "${remote}" "${branch}" >&2
exit 1
fi
done
done
if ! git merge-base --is-ancestor master develop; then
printf '%s\n' 'Develop is not a straight descendant of master, rebase!' >&2
exit 1
fi
changes="$(git log --oneline master..develop | wc -l)"
if ((changes == 0)); then
printf '%s\n' 'No changes between master and develop?' >&2
exit 1
fi
sed -i "0,/^version/{s/^version.*$/version = \"${new_version}\"/}" Cargo.toml
cargo update --package git-repo-manager --precise "${new_version}"
diff="$(git diff --numstat)"
if (($(printf '%s\n' "${diff}" | wc -l || true) != 2)); then
printf '%s\n' 'Weird changes detected, bailing' >&2
exit 1
fi
if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.lock$'; then
printf '%s\n' 'Weird changes detected, bailing' >&2
exit 1
fi
if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.toml$'; then
printf '%s\n' 'Weird changes detected, bailing' >&2
exit 1
fi
git add Cargo.lock Cargo.toml
git commit -m "Release v${new_version}"
git switch master 2>/dev/null || { [[ -d "../master" ]] && cd "../master"; } || {
printf '%s\n' 'Could not change to master' >&2
exit 1
}
current_branch="$(git rev-parse --abbrev-ref HEAD)"
if [[ "${current_branch}" != "master" ]]; then
printf '%s\n' 'Looks like branch switching to master did not work' >&2
exit 1
fi
git merge --no-ff --no-edit develop
git tag "v${new_version}"
for remote in $(git remote); do
while ! git push "${remote}" "v${new_version}" master; do
:
done
done
git switch develop 2>/dev/null || { [[ -d "../develop" ]] && cd "../develop"; } || {
printf '%s\n' 'Could not change to develop' >&2
exit 1
}
current_branch="$(git rev-parse --abbrev-ref HEAD)"
if [[ "${current_branch}" != "develop" ]]; then
printf '%s\n' 'Looks like branch switching to develop did not work' >&2
exit 1
fi
git merge --ff-only master
for remote in $(git remote); do
while ! git push "${remote}" develop; do
:
done
done
cargo publish
printf 'Published %s successfully\n' "${new_version}"
exit 0

45
src/auth.rs Normal file
View File

@@ -0,0 +1,45 @@
use std::process;
#[derive(Clone)]
pub struct AuthToken(String);
impl AuthToken {
pub fn access(&self) -> &str {
&self.0
}
}
pub fn get_token_from_command(command: &str) -> Result<AuthToken, String> {
let output = process::Command::new("/usr/bin/env")
.arg("sh")
.arg("-c")
.arg(command)
.output()
.map_err(|error| format!("Failed to run token-command: {}", error))?;
let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?;
let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?;
if !output.status.success() {
if !stderr.is_empty() {
return Err(format!("Token command failed: {}", stderr));
} else {
return Err(String::from("Token command failed."));
}
}
if !stderr.is_empty() {
return Err(format!("Token command produced stderr: {}", stderr));
}
if stdout.is_empty() {
return Err(String::from("Token command did not produce output"));
}
let token = stdout
.split('\n')
.next()
.ok_or_else(|| String::from("Output did not contain any newline"))?;
Ok(AuthToken(token.to_string()))
}

View File

@@ -1,35 +1,265 @@
use serde::{Deserialize, Serialize};
use std::process;
use super::repo::RepoConfig;
use std::path::Path;
use super::auth;
use super::output::*;
use super::path;
use super::provider;
use super::provider::Filter;
use super::provider::Provider;
use super::repo;
use super::tree;
pub type RemoteProvider = provider::RemoteProvider;
pub type RemoteType = repo::RemoteType;
fn worktree_setup_default() -> bool {
false
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Config {
ConfigTrees(ConfigTrees),
ConfigProvider(ConfigProvider),
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Config {
pub trees: Trees,
pub struct ConfigTrees {
pub trees: Vec<ConfigTree>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Trees(Vec<Tree>);
pub struct ConfigProviderFilter {
pub access: Option<bool>,
pub owner: Option<bool>,
pub users: Option<Vec<String>>,
pub groups: Option<Vec<String>>,
}
impl Trees {
#[derive(Debug, Serialize, Deserialize)]
pub struct ConfigProvider {
pub provider: RemoteProvider,
pub token_command: String,
pub root: String,
pub filters: Option<ConfigProviderFilter>,
pub force_ssh: Option<bool>,
pub api_url: Option<String>,
pub worktree: Option<bool>,
pub init_worktree: Option<bool>,
pub remote_name: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RemoteConfig {
pub name: String,
pub url: String,
#[serde(rename = "type")]
pub remote_type: RemoteType,
}
impl RemoteConfig {
pub fn from_remote(remote: repo::Remote) -> Self {
Self {
name: remote.name,
url: remote.url,
remote_type: remote.remote_type,
}
}
pub fn into_remote(self) -> repo::Remote {
repo::Remote {
name: self.name,
url: self.url,
remote_type: self.remote_type,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RepoConfig {
pub name: String,
#[serde(default = "worktree_setup_default")]
pub worktree_setup: bool,
pub remotes: Option<Vec<RemoteConfig>>,
}
impl RepoConfig {
pub fn from_repo(repo: repo::Repo) -> Self {
Self {
name: repo.name,
worktree_setup: repo.worktree_setup,
remotes: repo
.remotes
.map(|remotes| remotes.into_iter().map(RemoteConfig::from_remote).collect()),
}
}
pub fn into_repo(self) -> repo::Repo {
let (namespace, name) = if let Some((namespace, name)) = self.name.rsplit_once('/') {
(Some(namespace.to_string()), name.to_string())
} else {
(None, self.name)
};
repo::Repo {
name,
namespace,
worktree_setup: self.worktree_setup,
remotes: self.remotes.map(|remotes| {
remotes
.into_iter()
.map(|remote| remote.into_remote())
.collect()
}),
}
}
}
impl ConfigTrees {
pub fn to_config(self) -> Config {
Config { trees: self }
Config::ConfigTrees(self)
}
pub fn from_vec(vec: Vec<Tree>) -> Self {
Trees(vec)
pub fn from_vec(vec: Vec<ConfigTree>) -> Self {
ConfigTrees { trees: vec }
}
pub fn as_vec(self) -> Vec<Tree> {
self.0
pub fn from_trees(vec: Vec<tree::Tree>) -> Self {
ConfigTrees {
trees: vec.into_iter().map(ConfigTree::from_tree).collect(),
}
}
pub fn as_vec_ref(&self) -> &Vec<Tree> {
self.0.as_ref()
pub fn trees(self) -> Vec<ConfigTree> {
self.trees
}
pub fn trees_mut(&mut self) -> &mut Vec<ConfigTree> {
&mut self.trees
}
pub fn trees_ref(&self) -> &Vec<ConfigTree> {
self.trees.as_ref()
}
}
impl Config {
pub fn trees(self) -> Result<Vec<ConfigTree>, String> {
match self {
Config::ConfigTrees(config) => Ok(config.trees),
Config::ConfigProvider(config) => {
let token = match auth::get_token_from_command(&config.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filters = config.filters.unwrap_or(ConfigProviderFilter {
access: Some(false),
owner: Some(false),
users: Some(vec![]),
groups: Some(vec![]),
});
let filter = Filter::new(
filters.users.unwrap_or_default(),
filters.groups.unwrap_or_default(),
filters.owner.unwrap_or(false),
filters.access.unwrap_or(false),
);
let repos = match config.provider {
RemoteProvider::Github => {
match provider::Github::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
config.remote_name,
)?
}
RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
config.remote_name,
)?
}
};
let mut trees = vec![];
for (namespace, namespace_repos) in repos {
let repos = namespace_repos
.into_iter()
.map(RepoConfig::from_repo)
.collect();
let tree = ConfigTree {
root: if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&config.root).join(namespace))
} else {
path::path_as_string(Path::new(&config.root))
},
repos: Some(repos),
};
trees.push(tree);
}
Ok(trees)
}
}
}
pub fn from_trees(trees: Vec<ConfigTree>) -> Self {
Config::ConfigTrees(ConfigTrees { trees })
}
pub fn normalize(&mut self) {
if let Config::ConfigTrees(config) = self {
let home = path::env_home().display().to_string();
for tree in &mut config.trees_mut().iter_mut() {
if tree.root.starts_with(&home) {
// The tilde is not handled differently, it's just a normal path component for `Path`.
// Therefore we can treat it like that during **output**.
//
// The `unwrap()` is safe here as we are testing via `starts_with()`
// beforehand
let mut path = tree.root.strip_prefix(&home).unwrap();
if path.starts_with('/') {
path = path.strip_prefix('/').unwrap();
}
tree.root = Path::new("~").join(path).display().to_string();
}
}
}
}
pub fn as_toml(&self) -> Result<String, String> {
match toml::to_string(self) {
Ok(toml) => Ok(toml),
@@ -44,12 +274,31 @@ impl Config {
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Tree {
pub struct ConfigTree {
pub root: String,
pub repos: Option<Vec<RepoConfig>>,
}
pub fn read_config(path: &str) -> Result<Config, String> {
impl ConfigTree {
pub fn from_repos(root: String, repos: Vec<repo::Repo>) -> Self {
Self {
root,
repos: Some(repos.into_iter().map(RepoConfig::from_repo).collect()),
}
}
pub fn from_tree(tree: tree::Tree) -> Self {
Self {
root: tree.root,
repos: Some(tree.repos.into_iter().map(RepoConfig::from_repo).collect()),
}
}
}
pub fn read_config<'a, T>(path: &str) -> Result<T, String>
where
T: for<'de> serde::Deserialize<'de>,
{
let content = match std::fs::read_to_string(&path) {
Ok(s) => s,
Err(e) => {
@@ -64,7 +313,7 @@ pub fn read_config(path: &str) -> Result<Config, String> {
}
};
let config: Config = match toml::from_str(&content) {
let config: T = match toml::from_str(&content) {
Ok(c) => c,
Err(_) => match serde_yaml::from_str(&content) {
Ok(c) => c,

View File

@@ -31,20 +31,51 @@ pub struct Repos {
#[derive(Parser)]
pub enum ReposAction {
#[clap(
visible_alias = "run",
about = "Synchronize the repositories to the configured values"
)]
Sync(Sync),
#[clap(about = "Generate a repository configuration from an existing file tree")]
Find(Find),
#[clap(subcommand)]
Sync(SyncAction),
#[clap(subcommand)]
Find(FindAction),
#[clap(about = "Show status of configured repositories")]
Status(OptionalConfig),
}
#[derive(Parser)]
#[clap()]
pub struct Sync {
#[clap(about = "Sync local repositories with a configured list")]
pub enum SyncAction {
#[clap(about = "Synchronize the repositories to the configured values")]
Config(Config),
#[clap(about = "Synchronize the repositories from a remote provider")]
Remote(SyncRemoteArgs),
}
#[derive(Parser)]
#[clap(about = "Generate a repository configuration from existing repositories")]
pub enum FindAction {
#[clap(about = "Find local repositories")]
Local(FindLocalArgs),
#[clap(about = "Find repositories on remote provider")]
Remote(FindRemoteArgs),
#[clap(about = "Find repositories as defined in the configuration file")]
Config(FindConfigArgs),
}
#[derive(Parser)]
pub struct FindLocalArgs {
#[clap(help = "The path to search through")]
pub path: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
}
#[derive(Parser)]
pub struct FindConfigArgs {
#[clap(
short,
long,
@@ -52,6 +83,173 @@ pub struct Sync {
help = "Path to the configuration file"
)]
pub config: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
}
#[derive(Parser)]
#[clap()]
pub struct FindRemoteArgs {
#[clap(short, long, help = "Path to the configuration file")]
pub config: Option<String>,
#[clap(arg_enum, short, long, help = "Remote provider to use")]
pub provider: RemoteProvider,
#[clap(short, long, help = "Name of the remote to use")]
pub remote_name: Option<String>,
#[clap(
multiple_occurrences = true,
name = "user",
long,
help = "Users to get repositories from"
)]
pub users: Vec<String>,
#[clap(
multiple_occurrences = true,
name = "group",
long,
help = "Groups to get repositories from"
)]
pub groups: Vec<String>,
#[clap(long, help = "Get repositories that belong to the requesting user")]
pub owner: bool,
#[clap(long, help = "Get repositories that the requesting user has access to")]
pub access: bool,
#[clap(long, help = "Always use SSH, even for public repositories")]
pub force_ssh: bool,
#[clap(long, help = "Command to get API token")]
pub token_command: String,
#[clap(long, help = "Root of the repo tree to produce")]
pub root: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
#[clap(
long,
help = "Use worktree setup for repositories",
possible_values = &["true", "false"],
default_value = "false",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub worktree: String,
#[clap(long, help = "Base URL for the API")]
pub api_url: Option<String>,
}
#[derive(Parser)]
#[clap()]
pub struct Config {
#[clap(
short,
long,
default_value = "./config.toml",
help = "Path to the configuration file"
)]
pub config: String,
#[clap(
long,
help = "Check out the default worktree after clone",
possible_values = &["true", "false"],
default_value = "true",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub init_worktree: String,
}
pub type RemoteProvider = super::provider::RemoteProvider;
#[derive(Parser)]
#[clap()]
pub struct SyncRemoteArgs {
#[clap(arg_enum, short, long, help = "Remote provider to use")]
pub provider: RemoteProvider,
#[clap(short, long, help = "Name of the remote to use")]
pub remote_name: Option<String>,
#[clap(
multiple_occurrences = true,
name = "user",
long,
help = "Users to get repositories from"
)]
pub users: Vec<String>,
#[clap(
multiple_occurrences = true,
name = "group",
long,
help = "Groups to get repositories from"
)]
pub groups: Vec<String>,
#[clap(long, help = "Get repositories that belong to the requesting user")]
pub owner: bool,
#[clap(long, help = "Get repositories that the requesting user has access to")]
pub access: bool,
#[clap(long, help = "Always use SSH, even for public repositories")]
pub force_ssh: bool,
#[clap(long, help = "Command to get API token")]
pub token_command: String,
#[clap(long, help = "Root of the repo tree to produce")]
pub root: String,
#[clap(
long,
help = "Use worktree setup for repositories",
possible_values = &["true", "false"],
default_value = "false",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub worktree: String,
#[clap(long, help = "Base URL for the API")]
pub api_url: Option<String>,
#[clap(
long,
help = "Check out the default worktree after clone",
possible_values = &["true", "false"],
default_value = "true",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub init_worktree: String,
}
#[derive(Parser)]
@@ -67,21 +265,6 @@ pub enum ConfigFormat {
Toml,
}
#[derive(Parser)]
pub struct Find {
#[clap(help = "The path to search through")]
pub path: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
}
#[derive(Parser)]
pub struct Worktree {
#[clap(subcommand, name = "action")]

View File

@@ -1,26 +1,37 @@
#![forbid(unsafe_code)]
use std::path::Path;
use std::process;
mod cmd;
use grm::auth;
use grm::config;
use grm::find_in_tree;
use grm::output::*;
use grm::path;
use grm::provider;
use grm::provider::Provider;
use grm::repo;
use grm::table;
use grm::tree;
use grm::worktree;
fn main() {
let opts = cmd::parse();
match opts.subcmd {
cmd::SubCommand::Repos(repos) => match repos.action {
cmd::ReposAction::Sync(sync) => {
let config = match config::read_config(&sync.config) {
cmd::ReposAction::Sync(sync) => match sync {
cmd::SyncAction::Config(args) => {
let config = match config::read_config(&args.config) {
Ok(config) => config,
Err(error) => {
print_error(&error);
process::exit(1);
}
};
match grm::sync_trees(config) {
match tree::sync_trees(config, args.init_worktree == "true") {
Ok(success) => {
if !success {
process::exit(1)
@@ -32,6 +43,87 @@ fn main() {
}
}
}
cmd::SyncAction::Remote(args) => {
let token = match auth::get_token_from_command(&args.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access);
let worktree = args.worktree == "true";
let repos = match args.provider {
cmd::RemoteProvider::Github => {
match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
worktree,
args.force_ssh,
args.remote_name,
)
}
cmd::RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
worktree,
args.force_ssh,
args.remote_name,
)
}
};
match repos {
Ok(repos) => {
let mut trees: Vec<config::ConfigTree> = vec![];
for (namespace, repolist) in repos {
let root = if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&args.root).join(namespace))
} else {
path::path_as_string(Path::new(&args.root))
};
let tree = config::ConfigTree::from_repos(root, repolist);
trees.push(tree);
}
let config = config::Config::from_trees(trees);
match tree::sync_trees(config, args.init_worktree == "true") {
Ok(success) => {
if !success {
process::exit(1)
}
}
Err(error) => {
print_error(&format!("Error syncing trees: {}", error));
process::exit(1);
}
}
}
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
}
},
cmd::ReposAction::Status(args) => match &args.config {
Some(config_path) => {
let config = match config::read_config(config_path) {
@@ -41,7 +133,7 @@ fn main() {
process::exit(1);
}
};
match grm::table::get_status_table(config) {
match table::get_status_table(config) {
Ok((tables, errors)) => {
for table in tables {
println!("{}", table);
@@ -65,7 +157,7 @@ fn main() {
}
};
match grm::table::show_single_repo_status(&dir) {
match table::show_single_repo_status(&dir) {
Ok((table, warnings)) => {
println!("{}", table);
for warning in warnings {
@@ -79,8 +171,9 @@ fn main() {
}
}
},
cmd::ReposAction::Find(find) => {
let path = Path::new(&find.path);
cmd::ReposAction::Find(find) => match find {
cmd::FindAction::Local(args) => {
let path = Path::new(&args.path);
if !path.exists() {
print_error(&format!("Path \"{}\" does not exist", path.display()));
process::exit(1);
@@ -102,7 +195,7 @@ fn main() {
}
};
let (found_repos, warnings) = match grm::find_in_tree(&path) {
let (found_repos, warnings) = match find_in_tree(&path) {
Ok((repos, warnings)) => (repos, warnings),
Err(error) => {
print_error(&error);
@@ -110,16 +203,18 @@ fn main() {
}
};
let trees = grm::config::Trees::from_vec(vec![found_repos]);
if trees.as_vec_ref().iter().all(|t| match &t.repos {
let trees = config::ConfigTrees::from_trees(vec![found_repos]);
if trees.trees_ref().iter().all(|t| match &t.repos {
None => false,
Some(r) => r.is_empty(),
}) {
print_warning("No repositories found");
} else {
let config = trees.to_config();
let mut config = trees.to_config();
match find.format {
config.normalize();
match args.format {
cmd::ConfigFormat::Toml => {
let toml = match config.as_toml() {
Ok(toml) => toml,
@@ -152,6 +247,233 @@ fn main() {
print_warning(&warning);
}
}
cmd::FindAction::Config(args) => {
let config: config::ConfigProvider = match config::read_config(&args.config) {
Ok(config) => config,
Err(error) => {
print_error(&error);
process::exit(1);
}
};
let token = match auth::get_token_from_command(&config.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filters = config.filters.unwrap_or(config::ConfigProviderFilter {
access: Some(false),
owner: Some(false),
users: Some(vec![]),
groups: Some(vec![]),
});
let filter = provider::Filter::new(
filters.users.unwrap_or_default(),
filters.groups.unwrap_or_default(),
filters.owner.unwrap_or(false),
filters.access.unwrap_or(false),
);
let repos = match config.provider {
provider::RemoteProvider::Github => {
match match provider::Github::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
config.remote_name,
) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
}
provider::RemoteProvider::Gitlab => {
match match provider::Gitlab::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
config.remote_name,
) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
}
};
let mut trees = vec![];
for (namespace, namespace_repos) in repos {
let tree = config::ConfigTree {
root: if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&config.root).join(namespace))
} else {
path::path_as_string(Path::new(&config.root))
},
repos: Some(
namespace_repos
.into_iter()
.map(config::RepoConfig::from_repo)
.collect(),
),
};
trees.push(tree);
}
let config = config::Config::from_trees(trees);
match args.format {
cmd::ConfigFormat::Toml => {
let toml = match config.as_toml() {
Ok(toml) => toml,
Err(error) => {
print_error(&format!(
"Failed converting config to TOML: {}",
&error
));
process::exit(1);
}
};
print!("{}", toml);
}
cmd::ConfigFormat::Yaml => {
let yaml = match config.as_yaml() {
Ok(yaml) => yaml,
Err(error) => {
print_error(&format!(
"Failed converting config to YAML: {}",
&error
));
process::exit(1);
}
};
print!("{}", yaml);
}
}
}
cmd::FindAction::Remote(args) => {
let token = match auth::get_token_from_command(&args.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access);
let worktree = args.worktree == "true";
let repos = match args.provider {
cmd::RemoteProvider::Github => {
match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
worktree,
args.force_ssh,
args.remote_name,
)
}
cmd::RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
worktree,
args.force_ssh,
args.remote_name,
)
}
};
let repos = repos.unwrap_or_else(|error| {
print_error(&format!("Error: {}", error));
process::exit(1);
});
let mut trees: Vec<config::ConfigTree> = vec![];
for (namespace, repolist) in repos {
let tree = config::ConfigTree {
root: if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&args.root).join(namespace))
} else {
path::path_as_string(Path::new(&args.root))
},
repos: Some(
repolist
.into_iter()
.map(config::RepoConfig::from_repo)
.collect(),
),
};
trees.push(tree);
}
let mut config = config::Config::from_trees(trees);
config.normalize();
match args.format {
cmd::ConfigFormat::Toml => {
let toml = match config.as_toml() {
Ok(toml) => toml,
Err(error) => {
print_error(&format!(
"Failed converting config to TOML: {}",
&error
));
process::exit(1);
}
};
print!("{}", toml);
}
cmd::ConfigFormat::Yaml => {
let yaml = match config.as_yaml() {
Ok(yaml) => yaml,
Err(error) => {
print_error(&format!(
"Failed converting config to YAML: {}",
&error
));
process::exit(1);
}
};
print!("{}", yaml);
}
}
}
},
},
cmd::SubCommand::Worktree(args) => {
let cwd = std::env::current_dir().unwrap_or_else(|error| {
@@ -161,6 +483,9 @@ fn main() {
match args.action {
cmd::WorktreeAction::Add(action_args) => {
if action_args.track.is_some() && action_args.no_track {
print_warning("You are using --track and --no-track at the same time. --track will be ignored");
}
let track = match &action_args.track {
Some(branch) => {
let split = branch.split_once('/');
@@ -182,23 +507,20 @@ fn main() {
None => None,
};
let mut name: &str = &action_args.name;
let subdirectory;
let split = name.split_once('/');
match split {
None => subdirectory = None,
Some(split) => {
if split.0.is_empty() || split.1.is_empty() {
print_error("Worktree name cannot start or end with a slash");
process::exit(1);
} else {
(subdirectory, name) = (Some(Path::new(split.0)), split.1);
match worktree::add_worktree(
&cwd,
&action_args.name,
track,
action_args.no_track,
) {
Ok(warnings) => {
if let Some(warnings) = warnings {
for warning in warnings {
print_warning(&warning);
}
}
print_success(&format!("Worktree {} created", &action_args.name));
}
match grm::add_worktree(&cwd, name, subdirectory, track, action_args.no_track) {
Ok(_) => print_success(&format!("Worktree {} created", &action_args.name)),
Err(error) => {
print_error(&format!("Error creating worktree: {}", error));
process::exit(1);
@@ -206,8 +528,6 @@ fn main() {
}
}
cmd::WorktreeAction::Delete(action_args) => {
let worktree_dir = cwd.join(&action_args.name);
let worktree_config = match repo::read_worktree_root_config(&cwd) {
Ok(config) => config,
Err(error) => {
@@ -219,31 +539,32 @@ fn main() {
}
};
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
print_error(&format!("Error opening repository: {}", error));
process::exit(1);
});
match repo.remove_worktree(
&cwd,
&action_args.name,
&worktree_dir,
Path::new(&action_args.name),
action_args.force,
&worktree_config,
) {
Ok(_) => print_success(&format!("Worktree {} deleted", &action_args.name)),
Err(error) => {
match error {
grm::WorktreeRemoveFailureReason::Error(msg) => {
repo::WorktreeRemoveFailureReason::Error(msg) => {
print_error(&msg);
process::exit(1);
}
grm::WorktreeRemoveFailureReason::Changes(changes) => {
repo::WorktreeRemoveFailureReason::Changes(changes) => {
print_warning(&format!(
"Changes in worktree: {}. Refusing to delete",
changes
));
}
grm::WorktreeRemoveFailureReason::NotMerged(message) => {
repo::WorktreeRemoveFailureReason::NotMerged(message) => {
print_warning(&message);
}
}
@@ -252,12 +573,12 @@ fn main() {
}
}
cmd::WorktreeAction::Status(_args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
print_error(&format!("Error opening repository: {}", error));
process::exit(1);
});
match grm::table::get_worktree_status_table(&repo, &cwd) {
match table::get_worktree_status_table(&repo, &cwd) {
Ok((table, errors)) => {
println!("{}", table);
for error in errors {
@@ -277,8 +598,8 @@ fn main() {
// * Remove all files
// * Set `core.bare` to `true`
let repo = grm::Repo::open(&cwd, false).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, false).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -305,8 +626,8 @@ fn main() {
}
}
cmd::WorktreeAction::Clean(_args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -339,8 +660,8 @@ fn main() {
}
}
cmd::WorktreeAction::Fetch(_args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -355,8 +676,8 @@ fn main() {
print_success("Fetched from all remotes");
}
cmd::WorktreeAction::Pull(args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -396,8 +717,8 @@ fn main() {
print_error("There is no point in using --rebase without --pull");
process::exit(1);
}
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -412,12 +733,8 @@ fn main() {
});
}
let config =
grm::repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
print_error(&format!(
"Failed to read worktree configuration: {}",
error
));
let config = repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
print_error(&format!("Failed to read worktree configuration: {}", error));
process::exit(1);
});

View File

@@ -1,341 +1,36 @@
#![feature(io_error_more)]
#![feature(const_option_ext)]
#![forbid(unsafe_code)]
use std::fs;
use std::path::{Path, PathBuf};
use std::process;
use std::path::Path;
pub mod auth;
pub mod config;
pub mod output;
pub mod path;
pub mod provider;
pub mod repo;
pub mod table;
use config::{Config, Tree};
use output::*;
use repo::{clone_repo, detect_remote_type, Remote, RepoConfig};
pub use repo::{RemoteTrackingStatus, Repo, RepoErrorKind, WorktreeRemoveFailureReason};
const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
const BRANCH_NAMESPACE_SEPARATOR: &str = "/";
const GIT_CONFIG_BARE_KEY: &str = "core.bare";
const GIT_CONFIG_PUSH_DEFAULT: &str = "push.default";
#[cfg(test)]
mod tests {
use super::*;
fn setup() {
std::env::set_var("HOME", "/home/test");
}
#[test]
fn check_expand_tilde() {
setup();
assert_eq!(
expand_path(Path::new("~/file")),
Path::new("/home/test/file")
);
}
#[test]
fn check_expand_invalid_tilde() {
setup();
assert_eq!(
expand_path(Path::new("/home/~/file")),
Path::new("/home/~/file")
);
}
#[test]
fn check_expand_home() {
setup();
assert_eq!(
expand_path(Path::new("$HOME/file")),
Path::new("/home/test/file")
);
assert_eq!(
expand_path(Path::new("${HOME}/file")),
Path::new("/home/test/file")
);
}
}
pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap()
}
pub fn env_home() -> PathBuf {
match std::env::var("HOME") {
Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => {
print_error(&format!("Unable to read HOME: {}", e));
process::exit(1);
}
}
}
fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context(
&path_as_string(path),
home_dir,
|name| -> Result<Option<String>, &'static str> {
match name {
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None),
}
},
) {
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
Ok(std::borrow::Cow::Owned(path)) => path,
Err(e) => {
print_error(&format!("Unable to expand root: {}", e));
process::exit(1);
}
};
Path::new(&expanded_path).to_path_buf()
}
fn sync_repo(root_path: &Path, repo: &RepoConfig) -> Result<(), String> {
let repo_path = root_path.join(&repo.name);
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
let mut repo_handle = None;
if repo_path.exists() {
if repo.worktree_setup && !actual_git_directory.exists() {
return Err(String::from(
"Repo already exists, but is not using a worktree setup",
));
}
repo_handle = match Repo::open(&repo_path, repo.worktree_setup) {
Ok(repo) => Some(repo),
Err(error) => {
if !repo.worktree_setup && Repo::open(&repo_path, true).is_ok() {
return Err(String::from(
"Repo already exists, but is using a worktree setup",
));
} else {
return Err(format!("Opening repository failed: {}", error));
}
}
};
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action(
&repo.name,
"Repository does not have remotes configured, initializing new",
);
repo_handle = match Repo::init(&repo_path, repo.worktree_setup) {
Ok(r) => {
print_repo_success(&repo.name, "Repository created");
Some(r)
}
Err(e) => {
return Err(format!("Repository failed during init: {}", e));
}
}
} else {
let first = repo.remotes.as_ref().unwrap().first().unwrap();
match clone_repo(first, &repo_path, repo.worktree_setup) {
Ok(_) => {
print_repo_success(&repo.name, "Repository successfully cloned");
}
Err(e) => {
return Err(format!("Repository failed during clone: {}", e));
}
};
}
if let Some(remotes) = &repo.remotes {
let repo_handle = repo_handle.unwrap_or_else(|| {
Repo::open(&repo_path, repo.worktree_setup).unwrap_or_else(|_| process::exit(1))
});
let current_remotes: Vec<String> = repo_handle
.remotes()
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
for remote in remotes {
let current_remote = repo_handle.find_remote(&remote.name)?;
match current_remote {
Some(current_remote) => {
let current_url = current_remote.url();
if remote.url != current_url {
print_repo_action(
&repo.name,
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
);
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
};
}
}
None => {
print_repo_action(
&repo.name,
&format!(
"Setting up new remote \"{}\" to \"{}\"",
&remote.name, &remote.url
),
);
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
return Err(format!(
"Repository failed during setting the remotes: {}",
e
));
}
}
}
}
for current_remote in &current_remotes {
if !remotes.iter().any(|r| &r.name == current_remote) {
print_repo_action(
&repo.name,
&format!("Deleting remote \"{}\"", &current_remote,),
);
if let Err(e) = repo_handle.remote_delete(current_remote) {
return Err(format!(
"Repository failed during deleting remote \"{}\": {}",
&current_remote, e
));
}
}
}
}
Ok(())
}
pub fn find_unmanaged_repos(
root_path: &Path,
managed_repos: &[RepoConfig],
) -> Result<Vec<String>, String> {
let mut unmanaged_repos = Vec::new();
for repo in find_repo_paths(root_path)? {
let name = path_as_string(repo.strip_prefix(&root_path).unwrap());
if !managed_repos.iter().any(|r| r.name == name) {
unmanaged_repos.push(name);
}
}
Ok(unmanaged_repos)
}
pub fn sync_trees(config: Config) -> Result<bool, String> {
let mut failures = false;
for tree in config.trees.as_vec() {
let repos = tree.repos.unwrap_or_default();
let root_path = expand_path(Path::new(&tree.root));
for repo in &repos {
match sync_repo(&root_path, repo) {
Ok(_) => print_repo_success(&repo.name, "OK"),
Err(error) => {
print_repo_error(&repo.name, &error);
failures = true;
}
}
}
match find_unmanaged_repos(&root_path, &repos) {
Ok(unmanaged_repos) => {
for name in unmanaged_repos {
print_warning(&format!("Found unmanaged repository: {}", name));
}
}
Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error));
failures = true;
}
}
}
Ok(!failures)
}
/// Finds repositories recursively, returning their path
fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
let mut repos = Vec::new();
let git_dir = path.join(".git");
let git_worktree = path.join(GIT_MAIN_WORKTREE_DIRECTORY);
if git_dir.exists() || git_worktree.exists() {
repos.push(path.to_path_buf());
} else {
match fs::read_dir(path) {
Ok(contents) => {
for content in contents {
match content {
Ok(entry) => {
let path = entry.path();
if path.is_symlink() {
continue;
}
if path.is_dir() {
match find_repo_paths(&path) {
Ok(ref mut r) => repos.append(r),
Err(error) => return Err(error),
}
}
}
Err(e) => {
return Err(format!("Error accessing directory: {}", e));
}
};
}
}
Err(e) => {
return Err(format!(
"Failed to open \"{}\": {}",
&path.display(),
match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()),
}
));
}
};
}
Ok(repos)
}
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
match is_worktree {
false => path.to_path_buf(),
true => path.join(GIT_MAIN_WORKTREE_DIRECTORY),
}
}
pub mod tree;
pub mod worktree;
/// Find all git repositories under root, recursively
///
/// The bool in the return value specifies whether there is a repository
/// in root itself.
#[allow(clippy::type_complexity)]
fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)>, String> {
let mut repos: Vec<RepoConfig> = Vec::new();
fn find_repos(root: &Path) -> Result<Option<(Vec<repo::Repo>, Vec<String>, bool)>, String> {
let mut repos: Vec<repo::Repo> = Vec::new();
let mut repo_in_root = false;
let mut warnings = Vec::new();
for path in find_repo_paths(root)? {
let is_worktree = Repo::detect_worktree(&path);
for path in tree::find_repo_paths(root)? {
let is_worktree = repo::RepoHandle::detect_worktree(&path);
if path == root {
repo_in_root = true;
}
match Repo::open(&path, is_worktree) {
match repo::RepoHandle::open(&path, is_worktree) {
Err(error) => {
warnings.push(format!(
"Error opening repo {}{}: {}",
@@ -354,32 +49,32 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
Err(error) => {
warnings.push(format!(
"{}: Error getting remotes: {}",
&path_as_string(&path),
&path::path_as_string(&path),
error
));
continue;
}
};
let mut results: Vec<Remote> = Vec::new();
let mut results: Vec<repo::Remote> = Vec::new();
for remote_name in remotes.iter() {
match repo.find_remote(remote_name)? {
Some(remote) => {
let name = remote.name();
let url = remote.url();
let remote_type = match detect_remote_type(&url) {
let remote_type = match repo::detect_remote_type(&url) {
Some(t) => t,
None => {
warnings.push(format!(
"{}: Could not detect remote type of \"{}\"",
&path_as_string(&path),
&path::path_as_string(&path),
&url
));
continue;
}
};
results.push(Remote {
results.push(repo::Remote {
name,
url,
remote_type,
@@ -388,7 +83,7 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
None => {
warnings.push(format!(
"{}: Remote {} not found",
&path_as_string(&path),
&path::path_as_string(&path),
remote_name
));
continue;
@@ -397,17 +92,35 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
}
let remotes = results;
repos.push(RepoConfig {
name: match path == root {
true => match &root.parent() {
Some(parent) => path_as_string(path.strip_prefix(parent).unwrap()),
let (namespace, name) = if path == root {
(
None,
match &root.parent() {
Some(parent) => {
path::path_as_string(path.strip_prefix(parent).unwrap())
}
None => {
warnings.push(String::from("Getting name of the search root failed. Do you have a git repository in \"/\"?"));
continue
},
continue;
}
false => path_as_string(path.strip_prefix(&root).unwrap()),
},
)
} else {
let name = path.strip_prefix(&root).unwrap();
let namespace = name.parent().unwrap();
(
if namespace != Path::new("") {
Some(path::path_as_string(namespace).to_string())
} else {
None
},
path::path_as_string(name),
)
};
repos.push(repo::Repo {
name,
namespace,
remotes: Some(remotes),
worktree_setup: is_worktree,
});
@@ -417,10 +130,10 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
Ok(Some((repos, warnings, repo_in_root)))
}
pub fn find_in_tree(path: &Path) -> Result<(Tree, Vec<String>), String> {
pub fn find_in_tree(path: &Path) -> Result<(tree::Tree, Vec<String>), String> {
let mut warnings = Vec::new();
let (repos, repo_in_root): (Vec<RepoConfig>, bool) = match find_repos(path)? {
let (repos, repo_in_root): (Vec<repo::Repo>, bool) = match find_repos(path)? {
Some((vec, mut repo_warnings, repo_in_root)) => {
warnings.append(&mut repo_warnings);
(vec, repo_in_root)
@@ -439,182 +152,12 @@ pub fn find_in_tree(path: &Path) -> Result<(Tree, Vec<String>), String> {
}
}
}
let home = env_home();
if root.starts_with(&home) {
// The tilde is not handled differently, it's just a normal path component for `Path`.
// Therefore we can treat it like that during **output**.
//
// The `unwrap()` is safe here as we are testing via `starts_with()`
// beforehand
root = Path::new("~").join(root.strip_prefix(&home).unwrap());
}
Ok((
Tree {
tree::Tree {
root: root.into_os_string().into_string().unwrap(),
repos: Some(repos),
repos,
},
warnings,
))
}
pub fn add_worktree(
directory: &Path,
name: &str,
subdirectory: Option<&Path>,
track: Option<(&str, &str)>,
no_track: bool,
) -> Result<(), String> {
let repo = Repo::open(directory, true).map_err(|error| match error.kind {
RepoErrorKind::NotFound => {
String::from("Current directory does not contain a worktree setup")
}
_ => format!("Error opening repo: {}", error),
})?;
let config = repo::read_worktree_root_config(directory)?;
let path = match subdirectory {
Some(dir) => dir.join(name),
None => Path::new(name).to_path_buf(),
};
if repo.find_worktree(&path).is_ok() {
return Err(format!("Worktree {} already exists", &name));
}
let mut remote_branch_exists = false;
let default_checkout = || repo.default_branch()?.to_commit();
let checkout_commit;
if no_track {
checkout_commit = default_checkout()?;
} else {
match track {
Some((remote_name, remote_branch_name)) => {
let remote_branch = repo.find_remote_branch(remote_name, remote_branch_name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
remote_branch_exists = false;
checkout_commit = default_checkout()?;
}
}
}
None => match &config {
None => checkout_commit = default_checkout()?,
Some(config) => match &config.track {
None => checkout_commit = default_checkout()?,
Some(track_config) => {
if track_config.default {
let remote_branch =
repo.find_remote_branch(&track_config.default_remote, name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
checkout_commit = default_checkout()?;
}
}
} else {
checkout_commit = default_checkout()?;
}
}
},
},
};
}
let mut target_branch = match repo.find_local_branch(name) {
Ok(branchref) => branchref,
Err(_) => repo.create_branch(name, &checkout_commit)?,
};
fn push(
remote: &mut repo::RemoteHandle,
branch_name: &str,
remote_branch_name: &str,
repo: &repo::Repo,
) -> Result<(), String> {
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
remote.push(branch_name, remote_branch_name, repo)
}
if !no_track {
if let Some((remote_name, remote_branch_name)) = track {
if remote_branch_exists {
target_branch.set_upstream(remote_name, remote_branch_name)?;
} else {
let mut remote = repo
.find_remote(remote_name)
.map_err(|error| format!("Error getting remote {}: {}", remote_name, error))?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
push(
&mut remote,
&target_branch.name()?,
remote_branch_name,
&repo,
)?;
target_branch.set_upstream(remote_name, remote_branch_name)?;
}
} else if let Some(config) = config {
if let Some(track_config) = config.track {
if track_config.default {
let remote_name = track_config.default_remote;
if remote_branch_exists {
target_branch.set_upstream(&remote_name, name)?;
} else {
let remote_branch_name = match track_config.default_remote_prefix {
Some(prefix) => {
format!("{}{}{}", &prefix, BRANCH_NAMESPACE_SEPARATOR, &name)
}
None => name.to_string(),
};
let mut remote = repo
.find_remote(&remote_name)
.map_err(|error| {
format!("Error getting remote {}: {}", remote_name, error)
})?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
push(
&mut remote,
&target_branch.name()?,
&remote_branch_name,
&repo,
)?;
target_branch.set_upstream(&remote_name, &remote_branch_name)?;
}
}
}
}
}
if let Some(subdirectory) = subdirectory {
std::fs::create_dir_all(subdirectory).map_err(|error| error.to_string())?;
}
repo.new_worktree(name, &path, &target_branch)?;
Ok(())
}

View File

@@ -20,12 +20,12 @@ pub fn print_repo_action(repo: &str, message: &str) {
}
pub fn print_action(message: &str) {
let stderr = Term::stderr();
let stdout = Term::stdout();
let mut style = Style::new().yellow();
if stderr.is_term() {
if stdout.is_term() {
style = style.force_styling(true);
}
stderr
stdout
.write_line(&format!("[{}] {}", style.apply_to('\u{2699}'), &message))
.unwrap();
}
@@ -46,13 +46,13 @@ pub fn print_repo_success(repo: &str, message: &str) {
}
pub fn print_success(message: &str) {
let stderr = Term::stderr();
let stdout = Term::stdout();
let mut style = Style::new().green();
if stderr.is_term() {
if stdout.is_term() {
style = style.force_styling(true);
}
stderr
stdout
.write_line(&format!("[{}] {}", style.apply_to('\u{2714}'), &message))
.unwrap();
}

84
src/path.rs Normal file
View File

@@ -0,0 +1,84 @@
use std::path::{Path, PathBuf};
use std::process;
use super::output::*;
#[cfg(test)]
mod tests {
use super::*;
fn setup() {
std::env::set_var("HOME", "/home/test");
}
#[test]
fn check_expand_tilde() {
setup();
assert_eq!(
expand_path(Path::new("~/file")),
Path::new("/home/test/file")
);
}
#[test]
fn check_expand_invalid_tilde() {
setup();
assert_eq!(
expand_path(Path::new("/home/~/file")),
Path::new("/home/~/file")
);
}
#[test]
fn check_expand_home() {
setup();
assert_eq!(
expand_path(Path::new("$HOME/file")),
Path::new("/home/test/file")
);
assert_eq!(
expand_path(Path::new("${HOME}/file")),
Path::new("/home/test/file")
);
}
}
pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap()
}
pub fn env_home() -> PathBuf {
match std::env::var("HOME") {
Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => {
print_error(&format!("Unable to read HOME: {}", e));
process::exit(1);
}
}
}
pub fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context(
&path_as_string(path),
home_dir,
|name| -> Result<Option<String>, &'static str> {
match name {
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None),
}
},
) {
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
Ok(std::borrow::Cow::Owned(path)) => path,
Err(e) => {
print_error(&format!("Unable to expand root: {}", e));
process::exit(1);
}
};
Path::new(&expanded_path).to_path_buf()
}

140
src/provider/github.rs Normal file
View File

@@ -0,0 +1,140 @@
use serde::Deserialize;
use super::auth;
use super::escape;
use super::ApiErrorResponse;
use super::Filter;
use super::JsonError;
use super::Project;
use super::Provider;
const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json";
const GITHUB_API_BASEURL: &str =
option_env!("GITHUB_API_BASEURL").unwrap_or("https://api.github.com");
#[derive(Deserialize)]
pub struct GithubProject {
pub name: String,
pub full_name: String,
pub clone_url: String,
pub ssh_url: String,
pub private: bool,
}
#[derive(Deserialize)]
struct GithubUser {
#[serde(rename = "login")]
pub username: String,
}
impl Project for GithubProject {
fn name(&self) -> String {
self.name.clone()
}
fn namespace(&self) -> Option<String> {
if let Some((namespace, _name)) = self.full_name.rsplit_once('/') {
Some(namespace.to_string())
} else {
None
}
}
fn ssh_url(&self) -> String {
self.ssh_url.clone()
}
fn http_url(&self) -> String {
self.clone_url.clone()
}
fn private(&self) -> bool {
self.private
}
}
#[derive(Deserialize)]
pub struct GithubApiErrorResponse {
pub message: String,
}
impl JsonError for GithubApiErrorResponse {
fn to_string(self) -> String {
self.message
}
}
pub struct Github {
filter: Filter,
secret_token: auth::AuthToken,
}
impl Provider for Github {
type Project = GithubProject;
type Error = GithubApiErrorResponse;
fn new(
filter: Filter,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
) -> Result<Self, String> {
if api_url_override.is_some() {
return Err("API URL overriding is not supported for Github".to_string());
}
Ok(Self {
filter,
secret_token,
})
}
fn filter(&self) -> &Filter {
&self.filter
}
fn secret_token(&self) -> &auth::AuthToken {
&self.secret_token
}
fn auth_header_key() -> &'static str {
"token"
}
fn get_user_projects(
&self,
user: &str,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/users/{}/repos", escape(user)),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_group_projects(
&self,
group: &str,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/orgs/{}/repos?type=all", escape(group)),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_accessible_projects(
&self,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/user/repos"),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> {
Ok(super::call::<GithubUser, GithubApiErrorResponse>(
&format!("{GITHUB_API_BASEURL}/user"),
Self::auth_header_key(),
self.secret_token(),
Some(ACCEPT_HEADER_JSON),
)?
.username)
}
}

161
src/provider/gitlab.rs Normal file
View File

@@ -0,0 +1,161 @@
use serde::Deserialize;
use super::auth;
use super::escape;
use super::ApiErrorResponse;
use super::Filter;
use super::JsonError;
use super::Project;
use super::Provider;
const ACCEPT_HEADER_JSON: &str = "application/json";
const GITLAB_API_BASEURL: &str = option_env!("GITLAB_API_BASEURL").unwrap_or("https://gitlab.com");
#[derive(Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum GitlabVisibility {
Private,
Internal,
Public,
}
#[derive(Deserialize)]
pub struct GitlabProject {
#[serde(rename = "path")]
pub name: String,
pub path_with_namespace: String,
pub http_url_to_repo: String,
pub ssh_url_to_repo: String,
pub visibility: GitlabVisibility,
}
#[derive(Deserialize)]
struct GitlabUser {
pub username: String,
}
impl Project for GitlabProject {
fn name(&self) -> String {
self.name.clone()
}
fn namespace(&self) -> Option<String> {
if let Some((namespace, _name)) = self.path_with_namespace.rsplit_once('/') {
Some(namespace.to_string())
} else {
None
}
}
fn ssh_url(&self) -> String {
self.ssh_url_to_repo.clone()
}
fn http_url(&self) -> String {
self.http_url_to_repo.clone()
}
fn private(&self) -> bool {
!matches!(self.visibility, GitlabVisibility::Public)
}
}
#[derive(Deserialize)]
pub struct GitlabApiErrorResponse {
#[serde(alias = "error_description", alias = "error")]
pub message: String,
}
impl JsonError for GitlabApiErrorResponse {
fn to_string(self) -> String {
self.message
}
}
pub struct Gitlab {
filter: Filter,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
}
impl Gitlab {
fn api_url(&self) -> String {
self.api_url_override
.as_ref()
.unwrap_or(&GITLAB_API_BASEURL.to_string())
.trim_end_matches('/')
.to_string()
}
}
impl Provider for Gitlab {
type Project = GitlabProject;
type Error = GitlabApiErrorResponse;
fn new(
filter: Filter,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
) -> Result<Self, String> {
Ok(Self {
filter,
secret_token,
api_url_override,
})
}
fn filter(&self) -> &Filter {
&self.filter
}
fn secret_token(&self) -> &auth::AuthToken {
&self.secret_token
}
fn auth_header_key() -> &'static str {
"bearer"
}
fn get_user_projects(
&self,
user: &str,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!("{}/api/v4/users/{}/projects", self.api_url(), escape(user)),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_group_projects(
&self,
group: &str,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!(
"{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false",
self.api_url(),
escape(group),
),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_accessible_projects(
&self,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!("{}/api/v4/projects", self.api_url(),),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> {
Ok(super::call::<GitlabUser, GitlabApiErrorResponse>(
&format!("{}/api/v4/user", self.api_url()),
Self::auth_header_key(),
self.secret_token(),
Some(ACCEPT_HEADER_JSON),
)?
.username)
}
}

356
src/provider/mod.rs Normal file
View File

@@ -0,0 +1,356 @@
use serde::{Deserialize, Serialize};
// Required to use the `json()` method from the trait
use isahc::ReadResponseExt;
pub mod github;
pub mod gitlab;
pub use github::Github;
pub use gitlab::Gitlab;
use super::auth;
use super::repo;
use std::collections::HashMap;
const DEFAULT_REMOTE_NAME: &str = "origin";
#[derive(Debug, Deserialize, Serialize, clap::ArgEnum, Clone)]
pub enum RemoteProvider {
#[serde(alias = "github", alias = "GitHub")]
Github,
#[serde(alias = "gitlab", alias = "GitLab")]
Gitlab,
}
#[derive(Deserialize)]
#[serde(untagged)]
enum ProjectResponse<T, U> {
Success(Vec<T>),
Failure(U),
}
pub fn escape(s: &str) -> String {
url_escape::encode_component(s).to_string()
}
pub trait Project {
fn into_repo_config(
self,
provider_name: &str,
worktree_setup: bool,
force_ssh: bool,
) -> repo::Repo
where
Self: Sized,
{
repo::Repo {
name: self.name(),
namespace: self.namespace(),
worktree_setup,
remotes: Some(vec![repo::Remote {
name: String::from(provider_name),
url: if force_ssh || self.private() {
self.ssh_url()
} else {
self.http_url()
},
remote_type: if force_ssh || self.private() {
repo::RemoteType::Ssh
} else {
repo::RemoteType::Https
},
}]),
}
}
fn name(&self) -> String;
fn namespace(&self) -> Option<String>;
fn ssh_url(&self) -> String;
fn http_url(&self) -> String;
fn private(&self) -> bool;
}
#[derive(Clone)]
pub struct Filter {
users: Vec<String>,
groups: Vec<String>,
owner: bool,
access: bool,
}
impl Filter {
pub fn new(users: Vec<String>, groups: Vec<String>, owner: bool, access: bool) -> Self {
Filter {
users,
groups,
owner,
access,
}
}
}
pub enum ApiErrorResponse<T>
where
T: JsonError,
{
Json(T),
String(String),
}
impl<T> From<String> for ApiErrorResponse<T>
where
T: JsonError,
{
fn from(s: String) -> ApiErrorResponse<T> {
ApiErrorResponse::String(s)
}
}
pub trait JsonError {
fn to_string(self) -> String;
}
pub trait Provider {
type Project: serde::de::DeserializeOwned + Project;
type Error: serde::de::DeserializeOwned + JsonError;
fn new(
filter: Filter,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
) -> Result<Self, String>
where
Self: Sized;
fn filter(&self) -> &Filter;
fn secret_token(&self) -> &auth::AuthToken;
fn auth_header_key() -> &'static str;
fn get_user_projects(
&self,
user: &str,
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
fn get_group_projects(
&self,
group: &str,
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
fn get_own_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
self.get_user_projects(&self.get_current_user()?)
}
fn get_accessible_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
fn get_current_user(&self) -> Result<String, ApiErrorResponse<Self::Error>>;
///
/// Calls the API at specific uri and expects a successful response of Vec<T> back, or an error
/// response U
///
/// Handles paging with "link" HTTP headers properly and reads all pages to
/// the end.
fn call_list(
&self,
uri: &str,
accept_header: Option<&str>,
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
let mut results = vec![];
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
let request = isahc::Request::builder()
.uri(uri)
.method("GET")
.header("accept", accept_header.unwrap_or("application/json"))
.header(
"authorization",
format!(
"{} {}",
Self::auth_header_key(),
&self.secret_token().access()
),
)
.body(())
.map_err(|error| error.to_string())?;
let mut response = client
.send(request)
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
if !response.status().is_success() {
let r: Self::Error = response
.json()
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
return Err(ApiErrorResponse::Json(r));
}
let result: Vec<Self::Project> = response
.json()
.map_err(|error| format!("Failed deserializing response: {}", error))?;
results.extend(result);
if let Some(link_header) = response.headers().get("link") {
let link_header = link_header.to_str().map_err(|error| error.to_string())?;
let link_header =
parse_link_header::parse(link_header).map_err(|error| error.to_string())?;
let next_page = link_header.get(&Some(String::from("next")));
if let Some(page) = next_page {
let following_repos = self.call_list(&page.raw_uri, accept_header)?;
results.extend(following_repos);
}
}
Ok(results)
}
fn get_repos(
&self,
worktree_setup: bool,
force_ssh: bool,
remote_name: Option<String>,
) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> {
let mut repos = vec![];
if self.filter().owner {
repos.extend(self.get_own_projects().map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?);
}
if self.filter().access {
let accessible_projects =
self.get_accessible_projects()
.map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?;
for accessible_project in accessible_projects {
let mut already_present = false;
for repo in &repos {
if repo.name() == accessible_project.name()
&& repo.namespace() == accessible_project.namespace()
{
already_present = true;
}
}
if !already_present {
repos.push(accessible_project);
}
}
}
for user in &self.filter().users {
let user_projects = self.get_user_projects(user).map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?;
for user_project in user_projects {
let mut already_present = false;
for repo in &repos {
if repo.name() == user_project.name()
&& repo.namespace() == user_project.namespace()
{
already_present = true;
}
}
if !already_present {
repos.push(user_project);
}
}
}
for group in &self.filter().groups {
let group_projects = self
.get_group_projects(group)
.map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?;
for group_project in group_projects {
let mut already_present = false;
for repo in &repos {
if repo.name() == group_project.name()
&& repo.namespace() == group_project.namespace()
{
already_present = true;
}
}
if !already_present {
repos.push(group_project);
}
}
}
let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new();
let remote_name = remote_name.unwrap_or_else(|| DEFAULT_REMOTE_NAME.to_string());
for repo in repos {
let namespace = repo.namespace();
let mut repo = repo.into_repo_config(&remote_name, worktree_setup, force_ssh);
// Namespace is already part of the hashmap key. I'm not too happy
// about the data exchange format here.
repo.remove_namespace();
ret.entry(namespace).or_insert(vec![]).push(repo);
}
Ok(ret)
}
}
fn call<T, U>(
uri: &str,
auth_header_key: &str,
secret_token: &auth::AuthToken,
accept_header: Option<&str>,
) -> Result<T, ApiErrorResponse<U>>
where
T: serde::de::DeserializeOwned,
U: serde::de::DeserializeOwned + JsonError,
{
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
let request = isahc::Request::builder()
.uri(uri)
.header("accept", accept_header.unwrap_or("application/json"))
.header(
"authorization",
format!("{} {}", &auth_header_key, &secret_token.access()),
)
.body(())
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
let mut response = client
.send(request)
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
let success = response.status().is_success();
if !success {
let response: U = response
.json()
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
return Err(ApiErrorResponse::Json(response));
}
let response: T = response
.json()
.map_err(|error| format!("Failed deserializing response: {}", error))?;
Ok(response)
}

View File

@@ -3,11 +3,15 @@ use std::path::Path;
use git2::Repository;
use crate::output::*;
use super::output::*;
use super::path;
use super::worktree;
const WORKTREE_CONFIG_FILE_NAME: &str = "grm.toml";
const GIT_CONFIG_BARE_KEY: &str = "core.bare";
const GIT_CONFIG_PUSH_DEFAULT: &str = "push.default";
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum RemoteType {
Ssh,
@@ -31,7 +35,7 @@ pub enum GitPushDefaultSetting {
Upstream,
}
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub enum RepoErrorKind {
NotFound,
Unknown(String),
@@ -104,28 +108,32 @@ impl std::fmt::Display for RepoError {
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
#[derive(Debug)]
pub struct Remote {
pub name: String,
pub url: String,
#[serde(rename = "type")]
pub remote_type: RemoteType,
}
fn worktree_setup_default() -> bool {
false
#[derive(Debug)]
pub struct Repo {
pub name: String,
pub namespace: Option<String>,
pub worktree_setup: bool,
pub remotes: Option<Vec<Remote>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RepoConfig {
pub name: String,
impl Repo {
pub fn fullname(&self) -> String {
match &self.namespace {
Some(namespace) => format!("{}/{}", namespace, self.name),
None => self.name.clone(),
}
}
#[serde(default = "worktree_setup_default")]
pub worktree_setup: bool,
pub remotes: Option<Vec<Remote>>,
pub fn remove_namespace(&mut self) {
self.namespace = None
}
}
pub struct RepoChanges {
@@ -182,7 +190,7 @@ impl Worktree {
}
pub fn forward_branch(&self, rebase: bool, stash: bool) -> Result<Option<String>, String> {
let repo = Repo::open(Path::new(&self.name), false)
let repo = RepoHandle::open(Path::new(&self.name), false)
.map_err(|error| format!("Error opening worktree: {}", error))?;
if let Ok(remote_branch) = repo.find_local_branch(&self.name)?.upstream() {
@@ -225,7 +233,7 @@ impl Worktree {
let operation = operation.map_err(convert_libgit2_error)?;
// This is required to preserve the commiter of the rebased
// commits, which is the expected behaviour.
// commits, which is the expected behavior.
let rebased_commit = repo
.0
.find_commit(operation.id())
@@ -286,7 +294,7 @@ impl Worktree {
config: &Option<WorktreeRootConfig>,
stash: bool,
) -> Result<Option<String>, String> {
let repo = Repo::open(Path::new(&self.name), false)
let repo = RepoHandle::open(Path::new(&self.name), false)
.map_err(|error| format!("Error opening worktree: {}", error))?;
let guess_default_branch = || {
@@ -349,7 +357,7 @@ impl Worktree {
let operation = operation.map_err(convert_libgit2_error)?;
// This is required to preserve the commiter of the rebased
// commits, which is the expected behaviour.
// commits, which is the expected behavior.
let rebased_commit = repo
.0
.find_commit(operation.id())
@@ -443,6 +451,26 @@ mod tests {
fn check_unsupported_protocol_git() {
detect_remote_type("git://example.com");
}
#[test]
fn repo_check_fullname() {
let with_namespace = Repo {
name: "name".to_string(),
namespace: Some("namespace".to_string()),
worktree_setup: false,
remotes: None,
};
let without_namespace = Repo {
name: "name".to_string(),
namespace: None,
worktree_setup: false,
remotes: None,
};
assert_eq!(with_namespace.fullname(), "namespace/name");
assert_eq!(without_namespace.fullname(), "name");
}
}
pub fn detect_remote_type(remote_url: &str) -> Option<RemoteType> {
@@ -468,21 +496,21 @@ pub fn detect_remote_type(remote_url: &str) -> Option<RemoteType> {
None
}
pub struct Repo(git2::Repository);
pub struct RepoHandle(git2::Repository);
pub struct Branch<'a>(git2::Branch<'a>);
fn convert_libgit2_error(error: git2::Error) -> String {
error.message().to_string()
}
impl Repo {
impl RepoHandle {
pub fn open(path: &Path, is_worktree: bool) -> Result<Self, RepoError> {
let open_func = match is_worktree {
true => Repository::open_bare,
false => Repository::open,
};
let path = match is_worktree {
true => path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
false => path.to_path_buf(),
};
match open_func(path) {
@@ -507,7 +535,7 @@ impl Repo {
// Right now, we just open the repo AGAIN. It is safe, as we are only accessing the stash
// with the second reference, so there are no cross effects. But it just smells. Also,
// using `unwrap()` here as we are already sure that the repo is openable(?).
let mut repo = Repo::open(self.0.path(), false).unwrap();
let mut repo = RepoHandle::open(self.0.path(), false).unwrap();
repo.0
.stash_save2(&author, None, Some(git2::StashFlags::INCLUDE_UNTRACKED))
.map_err(convert_libgit2_error)?;
@@ -515,7 +543,7 @@ impl Repo {
}
pub fn stash_pop(&self) -> Result<(), String> {
let mut repo = Repo::open(self.0.path(), false).unwrap();
let mut repo = RepoHandle::open(self.0.path(), false).unwrap();
repo.0
.stash_pop(
0,
@@ -631,6 +659,14 @@ impl Repo {
.collect::<Result<Vec<Branch>, String>>()
}
pub fn remote_branches(&self) -> Result<Vec<Branch>, String> {
self.0
.branches(Some(git2::BranchType::Remote))
.map_err(convert_libgit2_error)?
.map(|branch| Ok(Branch(branch.map_err(convert_libgit2_error)?.0)))
.collect::<Result<Vec<Branch>, String>>()
}
pub fn fetch(&self, remote_name: &str) -> Result<(), String> {
let mut remote = self
.0
@@ -655,11 +691,11 @@ impl Repo {
pub fn init(path: &Path, is_worktree: bool) -> Result<Self, String> {
let repo = match is_worktree {
false => Repository::init(path).map_err(convert_libgit2_error)?,
true => Repository::init_bare(path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY))
true => Repository::init_bare(path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY))
.map_err(convert_libgit2_error)?,
};
let repo = Repo(repo);
let repo = RepoHandle(repo);
if is_worktree {
repo.set_config_push(GitPushDefaultSetting::Upstream)?;
@@ -672,10 +708,8 @@ impl Repo {
self.0.config().map_err(convert_libgit2_error)
}
pub fn find_worktree(&self, path: &Path) -> Result<(), String> {
self.0
.find_worktree(path.to_str().expect("Worktree path is not valid utf-8"))
.map_err(convert_libgit2_error)?;
pub fn find_worktree(&self, name: &str) -> Result<(), String> {
self.0.find_worktree(name).map_err(convert_libgit2_error)?;
Ok(())
}
@@ -720,8 +754,8 @@ impl Repo {
let mut config = self.config()?;
config
.set_bool(crate::GIT_CONFIG_BARE_KEY, value)
.map_err(|error| format!("Could not set {}: {}", crate::GIT_CONFIG_BARE_KEY, error))
.set_bool(GIT_CONFIG_BARE_KEY, value)
.map_err(|error| format!("Could not set {}: {}", GIT_CONFIG_BARE_KEY, error))
}
pub fn convert_to_worktree(
@@ -744,7 +778,7 @@ impl Repo {
return Err(WorktreeConversionFailureReason::Ignored);
}
std::fs::rename(".git", crate::GIT_MAIN_WORKTREE_DIRECTORY).map_err(|error| {
std::fs::rename(".git", worktree::GIT_MAIN_WORKTREE_DIRECTORY).map_err(|error| {
WorktreeConversionFailureReason::Error(format!(
"Error moving .git directory: {}",
error
@@ -764,7 +798,7 @@ impl Repo {
Ok(entry) => {
let path = entry.path();
// unwrap is safe here, the path will ALWAYS have a file component
if path.file_name().unwrap() == crate::GIT_MAIN_WORKTREE_DIRECTORY {
if path.file_name().unwrap() == worktree::GIT_MAIN_WORKTREE_DIRECTORY {
continue;
}
if path.is_file() || path.is_symlink() {
@@ -790,7 +824,7 @@ impl Repo {
}
}
let worktree_repo = Repo::open(root_dir, true).map_err(|error| {
let worktree_repo = RepoHandle::open(root_dir, true).map_err(|error| {
WorktreeConversionFailureReason::Error(format!(
"Opening newly converted repository failed: {}",
error
@@ -813,18 +847,12 @@ impl Repo {
config
.set_str(
crate::GIT_CONFIG_PUSH_DEFAULT,
GIT_CONFIG_PUSH_DEFAULT,
match value {
GitPushDefaultSetting::Upstream => "upstream",
},
)
.map_err(|error| {
format!(
"Could not set {}: {}",
crate::GIT_CONFIG_PUSH_DEFAULT,
error
)
})
.map_err(|error| format!("Could not set {}: {}", GIT_CONFIG_PUSH_DEFAULT, error))
}
pub fn has_untracked_files(&self, is_worktree: bool) -> Result<bool, String> {
@@ -1014,16 +1042,82 @@ impl Repo {
})
}
pub fn default_branch(&self) -> Result<Branch, String> {
match self.0.find_branch("main", git2::BranchType::Local) {
Ok(branch) => Ok(Branch(branch)),
Err(_) => match self.0.find_branch("master", git2::BranchType::Local) {
Ok(branch) => Ok(Branch(branch)),
Err(_) => Err(String::from("Could not determine default branch")),
},
pub fn get_remote_default_branch(&self, remote_name: &str) -> Result<Option<Branch>, String> {
// libgit2's `git_remote_default_branch()` and `Remote::default_branch()`
// need an actual connection to the remote, so they may fail.
if let Some(mut remote) = self.find_remote(remote_name)? {
if remote.connected() {
let remote = remote; // unmut
if let Ok(remote_default_branch) = remote.default_branch() {
return Ok(Some(self.find_local_branch(&remote_default_branch)?));
};
}
}
// Note that <remote>/HEAD only exists after a normal clone, there is no way to get the
// remote HEAD afterwards. So this is a "best effort" approach.
if let Ok(remote_head) = self.find_remote_branch(remote_name, "HEAD") {
if let Some(pointer_name) = remote_head.as_reference().symbolic_target() {
if let Some(local_branch_name) =
pointer_name.strip_prefix(&format!("refs/remotes/{}/", remote_name))
{
return Ok(Some(self.find_local_branch(local_branch_name)?));
} else {
eprintln!("Remote HEAD ({}) pointer is invalid", pointer_name);
}
} else {
eprintln!("Remote HEAD does not point to a symbolic target");
}
}
Ok(None)
}
pub fn default_branch(&self) -> Result<Branch, String> {
// This is a bit of a guessing game.
//
// In the best case, there is only one remote. Then, we can check <remote>/HEAD to get the
// default remote branch.
//
// If there are multiple remotes, we first check whether they all have the same
// <remote>/HEAD branch. If yes, good! If not, we use whatever "origin" uses, if that
// exists. If it does not, there is no way to reliably get a remote default branch.
//
// In this case, we just try to guess a local branch from a list. If even that does not
// work, well, bad luck.
let remotes = self.remotes()?;
if remotes.len() == 1 {
let remote_name = &remotes[0];
if let Some(default_branch) = self.get_remote_default_branch(remote_name)? {
return Ok(default_branch);
}
} else {
let mut default_branches: Vec<Branch> = vec![];
for remote_name in remotes {
if let Some(default_branch) = self.get_remote_default_branch(&remote_name)? {
default_branches.push(default_branch)
}
}
if !default_branches.is_empty()
&& (default_branches.len() == 1
|| default_branches
.windows(2)
.all(|w| w[0].name() == w[1].name()))
{
return Ok(default_branches.remove(0));
}
}
for branch_name in &vec!["main", "master"] {
if let Ok(branch) = self.0.find_branch(branch_name, git2::BranchType::Local) {
return Ok(Branch(branch));
}
}
Err(String::from("Could not determine default branch"))
}
// Looks like there is no distinguishing between the error cases
// "no such remote" and "failed to get remote for some reason".
// May be a good idea to handle this explicitly, by returning a
@@ -1059,18 +1153,21 @@ impl Repo {
pub fn remove_worktree(
&self,
base_dir: &Path,
name: &str,
worktree_dir: &Path,
force: bool,
worktree_config: &Option<WorktreeRootConfig>,
) -> Result<(), WorktreeRemoveFailureReason> {
if !worktree_dir.exists() {
let fullpath = base_dir.join(worktree_dir);
if !fullpath.exists() {
return Err(WorktreeRemoveFailureReason::Error(format!(
"{} does not exist",
name
)));
}
let worktree_repo = Repo::open(worktree_dir, false).map_err(|error| {
let worktree_repo = RepoHandle::open(&fullpath, false).map_err(|error| {
WorktreeRemoveFailureReason::Error(format!("Error opening repo: {}", error))
})?;
@@ -1082,12 +1179,11 @@ impl Repo {
WorktreeRemoveFailureReason::Error(format!("Failed getting name of branch: {}", error))
})?;
if branch_name != name
&& !branch_name.ends_with(&format!("{}{}", crate::BRANCH_NAMESPACE_SEPARATOR, name))
{
if branch_name != name {
return Err(WorktreeRemoveFailureReason::Error(format!(
"Branch {} is checked out in worktree, this does not look correct",
&branch_name
"Branch \"{}\" is checked out in worktree \"{}\", this does not look correct",
&branch_name,
&worktree_dir.display(),
)));
}
@@ -1157,13 +1253,46 @@ impl Repo {
}
}
if let Err(e) = std::fs::remove_dir_all(&worktree_dir) {
// worktree_dir is a relative path, starting from base_dir. We walk it
// upwards (from subdirectory to parent directories) and remove each
// component, in case it is empty. Only the leaf directory can be
// removed unconditionally (as it contains the worktree itself).
if let Err(e) = std::fs::remove_dir_all(&fullpath) {
return Err(WorktreeRemoveFailureReason::Error(format!(
"Error deleting {}: {}",
&worktree_dir.display(),
e
)));
}
if let Some(current_dir) = worktree_dir.parent() {
for current_dir in current_dir.ancestors() {
let current_dir = base_dir.join(current_dir);
if current_dir
.read_dir()
.map_err(|error| {
WorktreeRemoveFailureReason::Error(format!(
"Error reading {}: {}",
&current_dir.display(),
error
))
})?
.next()
.is_none()
{
if let Err(e) = std::fs::remove_dir(&current_dir) {
return Err(WorktreeRemoveFailureReason::Error(format!(
"Error deleting {}: {}",
&worktree_dir.display(),
e
)));
}
} else {
break;
}
}
}
self.prune_worktree(name)
.map_err(WorktreeRemoveFailureReason::Error)?;
branch
@@ -1216,7 +1345,13 @@ impl Repo {
{
let repo_dir = &directory.join(&worktree.name());
if repo_dir.exists() {
match self.remove_worktree(worktree.name(), repo_dir, false, &config) {
match self.remove_worktree(
directory,
worktree.name(),
Path::new(worktree.name()),
false,
&config,
) {
Ok(_) => print_success(&format!("Worktree {} deleted", &worktree.name())),
Err(error) => match error {
WorktreeRemoveFailureReason::Changes(changes) => {
@@ -1253,7 +1388,7 @@ impl Repo {
let mut unmanaged_worktrees = Vec::new();
for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? {
let dirname = crate::path_as_string(
let dirname = path::path_as_string(
entry
.map_err(|error| error.to_string())?
.path()
@@ -1273,28 +1408,30 @@ impl Repo {
};
let default_branch_name = match &config {
None => guess_default_branch()?,
None => guess_default_branch().ok(),
Some(config) => match &config.persistent_branches {
None => guess_default_branch()?,
None => guess_default_branch().ok(),
Some(persistent_branches) => {
if persistent_branches.is_empty() {
guess_default_branch()?
guess_default_branch().ok()
} else {
persistent_branches[0].clone()
Some(persistent_branches[0].clone())
}
}
},
};
if dirname == crate::GIT_MAIN_WORKTREE_DIRECTORY {
if dirname == worktree::GIT_MAIN_WORKTREE_DIRECTORY {
continue;
}
if dirname == WORKTREE_CONFIG_FILE_NAME {
continue;
}
if let Some(default_branch_name) = default_branch_name {
if dirname == default_branch_name {
continue;
}
}
if !&worktrees.iter().any(|worktree| worktree.name() == dirname) {
unmanaged_worktrees.push(dirname);
}
@@ -1303,7 +1440,7 @@ impl Repo {
}
pub fn detect_worktree(path: &Path) -> bool {
path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY).exists()
path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY).exists()
}
}
@@ -1339,7 +1476,7 @@ impl<'a> Branch<'a> {
}
}
impl Branch<'_> {
impl<'a> Branch<'a> {
pub fn commit(&self) -> Result<Commit, String> {
Ok(Commit(
self.0
@@ -1349,6 +1486,15 @@ impl Branch<'_> {
))
}
pub fn commit_owned(self) -> Result<Commit<'a>, String> {
Ok(Commit(
self.0
.into_reference()
.peel_to_commit()
.map_err(convert_libgit2_error)?,
))
}
pub fn set_upstream(&mut self, remote_name: &str, branch_name: &str) -> Result<(), String> {
self.0
.set_upstream(Some(&format!("{}/{}", remote_name, branch_name)))
@@ -1372,6 +1518,15 @@ impl Branch<'_> {
self.0.delete().map_err(convert_libgit2_error)
}
pub fn basename(&self) -> Result<String, String> {
let name = self.name()?;
if let Some((_prefix, basename)) = name.split_once('/') {
Ok(basename.to_string())
} else {
Ok(name)
}
}
// only used internally in this module, exposes libgit2 details
fn as_reference(&self) -> &git2::Reference {
self.0.get()
@@ -1417,6 +1572,20 @@ impl RemoteHandle<'_> {
.to_string()
}
pub fn connected(&mut self) -> bool {
self.0.connected()
}
pub fn default_branch(&self) -> Result<String, String> {
Ok(self
.0
.default_branch()
.map_err(convert_libgit2_error)?
.as_str()
.expect("Remote branch name is not valid utf-8")
.to_string())
}
pub fn is_pushable(&self) -> Result<bool, String> {
let remote_type = detect_remote_type(self.0.url().expect("Remote name is not valid utf-8"))
.ok_or_else(|| String::from("Could not detect remote type"))?;
@@ -1427,7 +1596,7 @@ impl RemoteHandle<'_> {
&mut self,
local_branch_name: &str,
remote_branch_name: &str,
_repo: &Repo,
_repo: &RepoHandle,
) -> Result<(), String> {
if !self.is_pushable()? {
return Err(String::from("Trying to push to a non-pushable remote"));
@@ -1462,7 +1631,7 @@ pub fn clone_repo(
) -> Result<(), Box<dyn std::error::Error>> {
let clone_target = match is_worktree {
false => path.to_path_buf(),
true => path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
};
print_action(&format!(
@@ -1493,7 +1662,7 @@ pub fn clone_repo(
}
}
let repo = Repo::open(&clone_target, false)?;
let repo = RepoHandle::open(&clone_target, false)?;
if is_worktree {
repo.set_config_push(GitPushDefaultSetting::Upstream)?;
@@ -1507,6 +1676,24 @@ pub fn clone_repo(
repo.rename_remote(&origin, &remote.name)?;
}
// Initialize local branches. For all remote branches, we set up local
// tracking branches with the same name (just without the remote prefix).
for remote_branch in repo.remote_branches()? {
let local_branch_name = remote_branch.basename()?;
if repo.find_local_branch(&local_branch_name).is_ok() {
continue;
}
// Ignore <remote>/HEAD, as this is not something we can check out
if local_branch_name == "HEAD" {
continue;
}
let mut local_branch = repo.create_branch(&local_branch_name, &remote_branch.commit()?)?;
local_branch.set_upstream(&remote.name, &local_branch_name)?;
}
// If there is no head_branch, we most likely cloned an empty repository and
// there is no point in setting any upstreams.
if let Ok(mut active_branch) = repo.head_branch() {

View File

@@ -1,3 +1,7 @@
use super::config;
use super::path;
use super::repo;
use comfy_table::{Cell, Table};
use std::path::Path;
@@ -19,7 +23,7 @@ fn add_table_header(table: &mut Table) {
fn add_repo_status(
table: &mut Table,
repo_name: &str,
repo_handle: &crate::Repo,
repo_handle: &repo::RepoHandle,
is_worktree: bool,
) -> Result<(), String> {
let repo_status = repo_handle.status(is_worktree)?;
@@ -63,11 +67,11 @@ fn add_repo_status(
" <{}>{}",
remote_branch_name,
&match remote_tracking_status {
crate::RemoteTrackingStatus::UpToDate =>
repo::RemoteTrackingStatus::UpToDate =>
String::from(" \u{2714}"),
crate::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
crate::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
crate::RemoteTrackingStatus::Diverged(d1, d2) =>
repo::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
repo::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
repo::RemoteTrackingStatus::Diverged(d1, d2) =>
format!(" [+{}/-{}]", &d1, &d2),
}
)
@@ -97,7 +101,7 @@ fn add_repo_status(
// Don't return table, return a type that implements Display(?)
pub fn get_worktree_status_table(
repo: &crate::Repo,
repo: &repo::RepoHandle,
directory: &Path,
) -> Result<(impl std::fmt::Display, Vec<String>), String> {
let worktrees = repo.get_worktrees()?;
@@ -109,7 +113,7 @@ pub fn get_worktree_status_table(
for worktree in &worktrees {
let worktree_dir = &directory.join(&worktree.name());
if worktree_dir.exists() {
let repo = match crate::Repo::open(worktree_dir, false) {
let repo = match repo::RepoHandle::open(worktree_dir, false) {
Ok(repo) => repo,
Err(error) => {
errors.push(format!(
@@ -130,36 +134,22 @@ pub fn get_worktree_status_table(
));
}
}
for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? {
let dirname = crate::path_as_string(
entry
.map_err(|error| error.to_string())?
.path()
.strip_prefix(&directory)
// this unwrap is safe, as we can be sure that each subentry of
// &directory also has the prefix &dir
.unwrap(),
);
if dirname == crate::GIT_MAIN_WORKTREE_DIRECTORY {
continue;
}
if !&worktrees.iter().any(|worktree| worktree.name() == dirname) {
for worktree in repo::RepoHandle::find_unmanaged_worktrees(repo, directory)? {
errors.push(format!(
"Found {}, which is not a valid worktree directory!",
&dirname
&worktree
));
}
}
Ok((table, errors))
}
pub fn get_status_table(config: crate::Config) -> Result<(Vec<Table>, Vec<String>), String> {
pub fn get_status_table(config: config::Config) -> Result<(Vec<Table>, Vec<String>), String> {
let mut errors = Vec::new();
let mut tables = Vec::new();
for tree in config.trees.as_vec() {
for tree in config.trees()? {
let repos = tree.repos.unwrap_or_default();
let root_path = crate::expand_path(Path::new(&tree.root));
let root_path = path::expand_path(Path::new(&tree.root));
let mut table = Table::new();
add_table_header(&mut table);
@@ -175,12 +165,12 @@ pub fn get_status_table(config: crate::Config) -> Result<(Vec<Table>, Vec<String
continue;
}
let repo_handle = crate::Repo::open(&repo_path, repo.worktree_setup);
let repo_handle = repo::RepoHandle::open(&repo_path, repo.worktree_setup);
let repo_handle = match repo_handle {
Ok(repo) => repo,
Err(error) => {
if error.kind == crate::RepoErrorKind::NotFound {
if error.kind == repo::RepoErrorKind::NotFound {
errors.push(format!(
"{}: No git repository found. Run sync?",
&repo.name
@@ -218,8 +208,8 @@ fn add_worktree_table_header(table: &mut Table) {
fn add_worktree_status(
table: &mut Table,
worktree: &crate::repo::Worktree,
repo: &crate::Repo,
worktree: &repo::Worktree,
repo: &repo::RepoHandle,
) -> Result<(), String> {
let repo_status = repo.status(false)?;
@@ -284,13 +274,13 @@ pub fn show_single_repo_status(
let mut table = Table::new();
let mut warnings = Vec::new();
let is_worktree = crate::Repo::detect_worktree(path);
let is_worktree = repo::RepoHandle::detect_worktree(path);
add_table_header(&mut table);
let repo_handle = crate::Repo::open(path, is_worktree);
let repo_handle = repo::RepoHandle::open(path, is_worktree);
if let Err(error) = repo_handle {
if error.kind == crate::RepoErrorKind::NotFound {
if error.kind == repo::RepoErrorKind::NotFound {
return Err(String::from("Directory is not a git directory"));
} else {
return Err(format!("Opening repository failed: {}", error));

300
src/tree.rs Normal file
View File

@@ -0,0 +1,300 @@
use std::fs;
use std::path::{Path, PathBuf};
use super::config;
use super::output::*;
use super::path;
use super::repo;
use super::worktree;
pub struct Tree {
pub root: String,
pub repos: Vec<repo::Repo>,
}
pub fn find_unmanaged_repos(
root_path: &Path,
managed_repos: &[repo::Repo],
) -> Result<Vec<PathBuf>, String> {
let mut unmanaged_repos = Vec::new();
for repo_path in find_repo_paths(root_path)? {
if !managed_repos
.iter()
.any(|r| Path::new(root_path).join(r.fullname()) == repo_path)
{
unmanaged_repos.push(repo_path);
}
}
Ok(unmanaged_repos)
}
pub fn sync_trees(config: config::Config, init_worktree: bool) -> Result<bool, String> {
let mut failures = false;
let mut unmanaged_repos_absolute_paths = vec![];
let mut managed_repos_absolute_paths = vec![];
let trees = config.trees()?;
for tree in trees {
let repos: Vec<repo::Repo> = tree
.repos
.unwrap_or_default()
.into_iter()
.map(|repo| repo.into_repo())
.collect();
let root_path = path::expand_path(Path::new(&tree.root));
for repo in &repos {
managed_repos_absolute_paths.push(root_path.join(repo.fullname()));
match sync_repo(&root_path, repo, init_worktree) {
Ok(_) => print_repo_success(&repo.name, "OK"),
Err(error) => {
print_repo_error(&repo.name, &error);
failures = true;
}
}
}
match find_unmanaged_repos(&root_path, &repos) {
Ok(repos) => {
for path in repos.into_iter() {
if !unmanaged_repos_absolute_paths.contains(&path) {
unmanaged_repos_absolute_paths.push(path);
}
}
}
Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error));
failures = true;
}
}
}
for unmanaged_repo_absolute_path in &unmanaged_repos_absolute_paths {
if managed_repos_absolute_paths
.iter()
.any(|managed_repo_absolute_path| {
managed_repo_absolute_path == unmanaged_repo_absolute_path
})
{
continue;
}
print_warning(&format!(
"Found unmanaged repository: \"{}\"",
path::path_as_string(unmanaged_repo_absolute_path)
));
}
Ok(!failures)
}
/// Finds repositories recursively, returning their path
pub fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
let mut repos = Vec::new();
let git_dir = path.join(".git");
let git_worktree = path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY);
if git_dir.exists() || git_worktree.exists() {
repos.push(path.to_path_buf());
} else {
match fs::read_dir(path) {
Ok(contents) => {
for content in contents {
match content {
Ok(entry) => {
let path = entry.path();
if path.is_symlink() {
continue;
}
if path.is_dir() {
match find_repo_paths(&path) {
Ok(ref mut r) => repos.append(r),
Err(error) => return Err(error),
}
}
}
Err(e) => {
return Err(format!("Error accessing directory: {}", e));
}
};
}
}
Err(e) => {
return Err(format!(
"Failed to open \"{}\": {}",
&path.display(),
match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()),
}
));
}
};
}
Ok(repos)
}
fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result<(), String> {
let repo_path = root_path.join(&repo.fullname());
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
let mut newly_created = false;
// Syncing a repository can have a few different flows, depending on the repository
// that is to be cloned and the local directory:
//
// * If the local directory already exists, we have to make sure that it matches the
// worktree configuration, as there is no way to convert. If the sync is supposed
// to be worktree-aware, but the local directory is not, we abort. Note that we could
// also automatically convert here. In any case, the other direction (converting a
// worktree repository to non-worktree) cannot work, as we'd have to throw away the
// worktrees.
//
// * If the local directory does not yet exist, we have to actually do something ;). If
// no remote is specified, we just initialize a new repository (git init) and are done.
//
// If there are (potentially multiple) remotes configured, we have to clone. We assume
// that the first remote is the canonical one that we do the first clone from. After
// cloning, we just add the other remotes as usual (as if they were added to the config
// afterwards)
//
// Branch handling:
//
// Handling the branches on checkout is a bit magic. For minimum surprises, we just set
// up local tracking branches for all remote branches.
if repo_path.exists()
&& repo_path
.read_dir()
.map_err(|error| error.to_string())?
.next()
.is_some()
{
if repo.worktree_setup && !actual_git_directory.exists() {
return Err(String::from(
"Repo already exists, but is not using a worktree setup",
));
};
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action(
&repo.name,
"Repository does not have remotes configured, initializing new",
);
match repo::RepoHandle::init(&repo_path, repo.worktree_setup) {
Ok(r) => {
print_repo_success(&repo.name, "Repository created");
Some(r)
}
Err(e) => {
return Err(format!("Repository failed during init: {}", e));
}
};
} else {
let first = repo.remotes.as_ref().unwrap().first().unwrap();
match repo::clone_repo(first, &repo_path, repo.worktree_setup) {
Ok(_) => {
print_repo_success(&repo.name, "Repository successfully cloned");
}
Err(e) => {
return Err(format!("Repository failed during clone: {}", e));
}
};
newly_created = true;
}
let repo_handle = match repo::RepoHandle::open(&repo_path, repo.worktree_setup) {
Ok(repo) => repo,
Err(error) => {
if !repo.worktree_setup && repo::RepoHandle::open(&repo_path, true).is_ok() {
return Err(String::from(
"Repo already exists, but is using a worktree setup",
));
} else {
return Err(format!("Opening repository failed: {}", error));
}
}
};
if newly_created && repo.worktree_setup && init_worktree {
match repo_handle.default_branch() {
Ok(branch) => {
worktree::add_worktree(&repo_path, &branch.name()?, None, false)?;
}
Err(_error) => print_repo_error(
&repo.name,
"Could not determine default branch, skipping worktree initializtion",
),
}
}
if let Some(remotes) = &repo.remotes {
let current_remotes: Vec<String> = repo_handle
.remotes()
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
for remote in remotes {
let current_remote = repo_handle.find_remote(&remote.name)?;
match current_remote {
Some(current_remote) => {
let current_url = current_remote.url();
if remote.url != current_url {
print_repo_action(
&repo.name,
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
);
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
};
}
}
None => {
print_repo_action(
&repo.name,
&format!(
"Setting up new remote \"{}\" to \"{}\"",
&remote.name, &remote.url
),
);
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
return Err(format!(
"Repository failed during setting the remotes: {}",
e
));
}
}
}
}
for current_remote in &current_remotes {
if !remotes.iter().any(|r| &r.name == current_remote) {
print_repo_action(
&repo.name,
&format!("Deleting remote \"{}\"", &current_remote,),
);
if let Err(e) = repo_handle.remote_delete(current_remote) {
return Err(format!(
"Repository failed during deleting remote \"{}\": {}",
&current_remote, e
));
}
}
}
}
Ok(())
}
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
match is_worktree {
false => path.to_path_buf(),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
}
}

780
src/worktree.rs Normal file
View File

@@ -0,0 +1,780 @@
//! This handles worktrees for repositories. Some considerations to take care
//! of:
//!
//! * Which branch to check out / create
//! * Which commit to check out
//! * Whether to track a remote branch, and which
//!
//! There are a general rules. The main goal is to do the least surprising thing
//! in each situation, and to never change existing setups (e.g. tracking,
//! branch states) except when explicitly told to. In 99% of all cases, the
//! workflow will be quite straightforward.
//!
//! * The name of the worktree (and therefore the path) is **always** the same
//! as the name of the branch.
//! * Never modify existing local branches
//! * Only modify tracking branches for existing local branches if explicitly
//! requested
//! * By default, do not do remote operations. This means that we do no do any
//! tracking setup (but of course, the local branch can already have a
//! tracking branch set up, which will just be left alone)
//! * Be quite lax with finding a remote tracking branch (as using an existing
//! branch is most likely preferred to creating a new branch)
//!
//! There are a few different options that can be given:
//!
//! * Explicit track (`--track`) and explicit no-track (`--no-track`)
//! * A configuration may specify to enable tracking a remote branch by default
//! * A configuration may specify a prefix for remote branches
//!
//! # How to handle the local branch?
//!
//! That one is easy: If a branch with the desired name already exists, all is
//! well. If not, we create a new one.
//!
//! # Which commit should be checked out?
//!
//! The most imporant rule: If the local branch already existed, just leave it
//! as it is. Only if a new branch is created do we need to answer the question
//! which commit to set it to. Generally, we set the branch to whatever the
//! "default" branch of the repository is (something like "main" or "master").
//! But there are a few cases where we can use remote branches to make the
//! result less surprising.
//!
//! First, if tracking is explicitly disabled, we still try to guess! But we
//! *do* ignore `--track`, as this is how it's done everywhere else.
//!
//! As an example: If `origin/foobar` exists and we run `grm worktree add foobar
//! --no-track`, we create a new worktree called `foobar` that's on the same
//! state as `origin/foobar` (but we will not set up tracking, see below).
//!
//! If tracking is explicitly requested to a certain state, we use that remote
//! branch. If it exists, easy. If not, no more guessing!
//!
//! Now, it's important to select the correct remote. In the easiest case, there
//! is only one remote, so we just use that one. If there is more than one
//! remote, we check whether there is a default remote configured via
//! `track.default_remote`. If yes, we use that one. If not, we have to do the
//! selection process below *for each of them*. If only one of them returns
//! some branch to track, we use that one. If more than one remote returns
//! information, we only use it if it's identical for each. Otherwise we bail,
//! as there is no point in guessing.
//!
//! The commit selection process looks like this:
//!
//! * If a prefix is specified in the configuration, we look for
//! `{remote}/{prefix}/{worktree_name}`
//!
//! * We look for `{remote}/{worktree_name}` (yes, this means that even when a
//! prefix is configured, we use a branch *without* a prefix if one with
//! prefix does not exist)
//!
//! Note that we may select different branches for different remotes when
//! prefixes is used. If remote1 has a branch with a prefix and remote2 only has
//! a branch *without* a prefix, we select them both when a prefix is used. This
//! could lead to the following situation:
//!
//! * There is `origin/prefix/foobar` and `remote2/foobar`, with different
//! states
//! * You set `track.default_prefix = "prefix"` (and no default remote!)
//! * You run `grm worktree add `prefix/foobar`
//! * Instead of just picking `origin/prefix/foobar`, grm will complain because
//! it also selected `remote2/foobar`.
//!
//! This is just emergent behavior of the logic above. Fixing it would require
//! additional logic for that edge case. I assume that it's just so rare to get
//! that behavior that it's acceptable for now.
//!
//! Now we either have a commit, we aborted, or we do not have commit. In the
//! last case, as stated above, we check out the "default" branch.
//!
//! # The remote tracking branch
//!
//! First, the only remote operations we do is branch creation! It's
//! unfortunately not possible to defer remote branch creation until the first
//! `git push`, which would be ideal. The remote tracking branch has to already
//! exist, so we have to do the equivalent of `git push --set-upstream` during
//! worktree creation.
//!
//! Whether (and which) remote branch to track works like this:
//!
//! * If `--no-track` is given, we never track a remote branch, except when
//! branch already has a tracking branch. So we'd be done already!
//!
//! * If `--track` is given, we always track this branch, regardless of anything
//! else. If the branch exists, cool, otherwise we create it.
//!
//! If neither is given, we only set up tracking if requested in the
//! configuration file (`track.default = true`)
//!
//! The rest of the process is similar to the commit selection above. The only
//! difference is the remote selection. If there is only one, we use it, as
//! before. Otherwise, we try to use `default_remote` from the configuration, if
//! available. If not, we do not set up a remote tracking branch. It works like
//! this:
//!
//! * If a prefix is specified in the configuration, we use
//! `{remote}/{prefix}/{worktree_name}`
//!
//! * If no prefix is specified in the configuration, we use
//! `{remote}/{worktree_name}`
//!
//! Now that we have a remote, we use the same process as above:
//!
//! * If a prefix is specified in the configuration, we use for
//! `{remote}/{prefix}/{worktree_name}`
//! * We use for `{remote}/{worktree_name}`
//!
//! ---
//!
//! All this means that in some weird situation, you may end up with the state
//! of a remote branch while not actually tracking that branch. This can only
//! happen in repositories with more than one remote. Imagine the following:
//!
//! The repository has two remotes (`remote1` and `remote2`) which have the
//! exact same remote state. But there is no `default_remote` in the
//! configuration (or no configuration at all). There is a remote branch
//! `foobar`. As both `remote1/foobar` and `remote2/foobar` as the same, the new
//! worktree will use that as the state of the new branch. But as `grm` cannot
//! tell which remote branch to track, it will not set up remote tracking. This
//! behavior may be a bit confusing, but first, there is no good way to resolve
//! this, and second, the situation should be really rare (when having multiple
//! remotes, you would generally have a `default_remote` configured).
//!
//! # Implementation
//!
//! To reduce the chance of bugs, the implementation uses the [typestate
//! pattern](http://cliffle.com/blog/rust-typestate/). Here are the states we
//! are moving through linearily:
//!
//! * Init
//! * A local branch name is set
//! * A local commit to set the new branch to is selected
//! * A remote tracking branch is selected
//! * The new branch is created with all the required settings
//!
//! Don't worry about the lifetime stuff: There is only one single lifetime, as
//! everything (branches, commits) is derived from the single repo::Repo
//! instance
//!
//! # Testing
//!
//! There are two types of input to the tests:
//!
//! 1) The parameters passed to `grm`, either via command line or via
//! configuration file
//! 2) The circumstances in the repository and remotes
//!
//! ## Parameters
//!
//! * The name of the worktree
//! * Whether it contains slashes or not
//! * Whether it is invalid
//! * `--track` and `--no-track`
//! * Whether there is a configuration file and what it contains
//! * Whether `track.default` is enabled or disabled
//! * Whether `track.default_remote_prefix` is there or missing
//! * Whether `track.default_remote` is there or missing
//! * Whether that remote exists or not
//!
//! ## Situations
//!
//! ### The local branch
//!
//! * Whether the branch already exists
//! * Whether the branch has a remote tracking branch and whether it differs
//! from the desired tracking branch (i.e. `--track` or config)
//!
//! ### Remotes
//!
//! * How many remotes there are, if any
//! * If more than two remotes exist, whether their desired tracking branch
//! differs
//!
//! ### The remote tracking branch branch
//!
//! * Whether a remote branch with the same name as the worktree exists
//! * Whether a remote branch with the same name as the worktree plus prefix
//! exists
//!
//! ## Outcomes
//!
//! We have to check the following afterwards:
//!
//! * Does the worktree exist in the correct location?
//! * Does the local branch have the same name as the worktree?
//! * Does the local branch have the correct commit?
//! * Does the local branch track the correct remote branch?
//! * Does that remote branch also exist?
use std::cell::RefCell;
use std::path::Path;
// use super::output::*;
use super::repo;
pub const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn invalid_worktree_names() {
assert!(add_worktree(Path::new("/tmp/"), "/leadingslash", None, false).is_err());
assert!(add_worktree(Path::new("/tmp/"), "trailingslash/", None, false).is_err());
assert!(add_worktree(Path::new("/tmp/"), "//", None, false).is_err());
assert!(add_worktree(Path::new("/tmp/"), "test//test", None, false).is_err());
assert!(add_worktree(Path::new("/tmp/"), "test test", None, false).is_err());
assert!(add_worktree(Path::new("/tmp/"), "test\ttest", None, false).is_err());
}
}
struct Init;
struct WithLocalBranchName<'a> {
local_branch_name: String,
/// Outer option: Is there a computed value?
/// Inner option: Is there actually a branch?
///
/// None => No computed value yet
/// Some(None) => No branch
/// Some(Some(_)) => Branch
local_branch: RefCell<Option<Option<repo::Branch<'a>>>>,
}
struct WithLocalTargetSelected<'a> {
local_branch_name: String,
local_branch: Option<repo::Branch<'a>>,
target_commit: Option<Box<repo::Commit<'a>>>,
}
struct WithRemoteTrackingBranch<'a> {
local_branch_name: String,
local_branch: Option<repo::Branch<'a>>,
target_commit: Option<Box<repo::Commit<'a>>>,
remote_tracking_branch: Option<(String, String)>,
prefix: Option<String>,
}
struct Worktree<'a, S: WorktreeState> {
repo: &'a repo::RepoHandle,
extra: S,
}
impl<'a> WithLocalBranchName<'a> {
fn new(name: String) -> Self {
Self {
local_branch_name: name,
local_branch: RefCell::new(None),
}
}
}
trait WorktreeState {}
impl WorktreeState for Init {}
impl<'a> WorktreeState for WithLocalBranchName<'a> {}
impl<'a> WorktreeState for WithLocalTargetSelected<'a> {}
impl<'a> WorktreeState for WithRemoteTrackingBranch<'a> {}
impl<'a> Worktree<'a, Init> {
fn new(repo: &'a repo::RepoHandle) -> Self {
Self {
repo,
extra: Init {},
}
}
fn set_local_branch_name(self, name: &str) -> Worktree<'a, WithLocalBranchName<'a>> {
Worktree::<WithLocalBranchName> {
repo: self.repo,
extra: WithLocalBranchName::new(name.to_string()),
}
}
}
impl<'a, 'b> Worktree<'a, WithLocalBranchName<'b>>
where
'a: 'b,
{
fn check_local_branch(&self) {
let mut branchref = self.extra.local_branch.borrow_mut();
if branchref.is_none() {
let branch = self.repo.find_local_branch(&self.extra.local_branch_name);
*branchref = Some(if let Ok(branch) = branch {
Some(branch)
} else {
None
});
}
}
fn local_branch_already_exists(&self) -> bool {
if let Some(branch) = &*self.extra.local_branch.borrow() {
return branch.is_some();
}
self.check_local_branch();
// As we just called `check_local_branch`, we can be sure that
// `self.extra.local_branch` is set to some `Some` value
(*self.extra.local_branch.borrow())
.as_ref()
.unwrap()
.is_some()
}
fn select_commit(
self,
commit: Option<Box<repo::Commit<'b>>>,
) -> Worktree<'a, WithLocalTargetSelected<'b>> {
self.check_local_branch();
Worktree::<'a, WithLocalTargetSelected> {
repo: self.repo,
extra: WithLocalTargetSelected::<'b> {
local_branch_name: self.extra.local_branch_name,
// As we just called `check_local_branch`, we can be sure that
// `self.extra.local_branch` is set to some `Some` value
local_branch: self.extra.local_branch.into_inner().unwrap(),
target_commit: commit,
},
}
}
}
impl<'a> Worktree<'a, WithLocalTargetSelected<'a>> {
fn set_remote_tracking_branch(
self,
branch: Option<(&str, &str)>,
prefix: Option<&str>,
) -> Worktree<'a, WithRemoteTrackingBranch<'a>> {
Worktree::<WithRemoteTrackingBranch> {
repo: self.repo,
extra: WithRemoteTrackingBranch {
local_branch_name: self.extra.local_branch_name,
local_branch: self.extra.local_branch,
target_commit: self.extra.target_commit,
remote_tracking_branch: branch.map(|(s1, s2)| (s1.to_string(), s2.to_string())),
prefix: prefix.map(|prefix| prefix.to_string()),
},
}
}
}
impl<'a> Worktree<'a, WithRemoteTrackingBranch<'a>> {
fn create(self, directory: &Path) -> Result<Option<Vec<String>>, String> {
let mut warnings: Vec<String> = vec![];
let mut branch = if let Some(branch) = self.extra.local_branch {
branch
} else {
self.repo.create_branch(
&self.extra.local_branch_name,
// TECHDEBT
// We must not call this with `Some()` without a valid target.
// I'm sure this can be improved, just not sure how.
&*self.extra.target_commit.unwrap(),
)?
};
if let Some((remote_name, remote_branch_name)) = self.extra.remote_tracking_branch {
let remote_branch_with_prefix = if let Some(ref prefix) = self.extra.prefix {
if let Ok(remote_branch) = self
.repo
.find_remote_branch(&remote_name, &format!("{prefix}/{remote_branch_name}"))
{
Some(remote_branch)
} else {
None
}
} else {
None
};
let remote_branch_without_prefix = if let Ok(remote_branch) = self
.repo
.find_remote_branch(&remote_name, &remote_branch_name)
{
Some(remote_branch)
} else {
None
};
let remote_branch = if let Some(ref _prefix) = self.extra.prefix {
remote_branch_with_prefix
} else {
remote_branch_without_prefix
};
match remote_branch {
Some(remote_branch) => {
if branch.commit()?.id().hex_string()
!= remote_branch.commit()?.id().hex_string()
{
warnings.push(format!("The local branch \"{}\" and the remote branch \"{}/{}\" differ. Make sure to push/pull afterwards!", &self.extra.local_branch_name, &remote_name, &remote_branch_name));
}
branch.set_upstream(&remote_name, &remote_branch.basename()?)?;
}
None => {
let mut remote = match self.repo.find_remote(&remote_name)? {
Some(remote) => remote,
None => return Err(format!("Remote \"{remote_name}\" not found")),
};
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote \"{remote_name}\""
));
}
if let Some(prefix) = self.extra.prefix {
remote.push(
&self.extra.local_branch_name,
&format!("{}/{}", prefix, remote_branch_name),
self.repo,
)?;
branch.set_upstream(
&remote_name,
&format!("{}/{}", prefix, remote_branch_name),
)?;
} else {
remote.push(
&self.extra.local_branch_name,
&remote_branch_name,
self.repo,
)?;
branch.set_upstream(&remote_name, &remote_branch_name)?;
}
}
}
}
// We have to create subdirectories first, otherwise adding the worktree
// will fail
if self.extra.local_branch_name.contains('/') {
let path = Path::new(&self.extra.local_branch_name);
if let Some(base) = path.parent() {
// This is a workaround of a bug in libgit2 (?)
//
// When *not* doing this, we will receive an error from the `Repository::worktree()`
// like this:
//
// > failed to make directory '/{repo}/.git-main-working-tree/worktrees/dir/test
//
// This is a discrepancy between the behavior of libgit2 and the
// git CLI when creating worktrees with slashes:
//
// The git CLI will create the worktree's configuration directory
// inside {git_dir}/worktrees/{last_path_component}. Look at this:
//
// ```
// $ git worktree add 1/2/3 -b 1/2/3
// $ ls .git/worktrees
// 3
// ```
//
// Interesting: When adding a worktree with a different name but the
// same final path component, git starts adding a counter suffix to
// the worktree directories:
//
// ```
// $ git worktree add 1/3/3 -b 1/3/3
// $ git worktree add 1/4/3 -b 1/4/3
// $ ls .git/worktrees
// 3
// 31
// 32
// ```
//
// I *guess* that the mapping back from the worktree directory under .git to the actual
// worktree directory is done via the `gitdir` file inside `.git/worktrees/{worktree}.
// This means that the actual directory would not matter. You can verify this by
// just renaming it:
//
// ```
// $ mv .git/worktrees/3 .git/worktrees/foobar
// $ git worktree list
// /tmp/ fcc8a2a7 [master]
// /tmp/1/2/3 fcc8a2a7 [1/2/3]
// /tmp/1/3/3 fcc8a2a7 [1/3/3]
// /tmp/1/4/3 fcc8a2a7 [1/4/3]
// ```
//
// => Still works
//
// Anyway, libgit2 does not do this: It tries to create the worktree
// directory inside .git with the exact name of the worktree, including
// any slashes. It should be this code:
//
// https://github.com/libgit2/libgit2/blob/f98dd5438f8d7bfd557b612fdf1605b1c3fb8eaf/src/libgit2/worktree.c#L346
//
// As a workaround, we can create the base directory manually for now.
//
// Tracking upstream issue: https://github.com/libgit2/libgit2/issues/6327
std::fs::create_dir_all(
directory
.join(GIT_MAIN_WORKTREE_DIRECTORY)
.join("worktrees")
.join(base),
)
.map_err(|error| error.to_string())?;
std::fs::create_dir_all(base).map_err(|error| error.to_string())?;
}
}
self.repo.new_worktree(
&self.extra.local_branch_name,
&directory.join(&self.extra.local_branch_name),
&branch,
)?;
Ok(if warnings.is_empty() {
None
} else {
Some(warnings)
})
}
}
/// A branch name must never start or end with a slash, and it cannot have two
/// consecutive slashes
fn validate_worktree_name(name: &str) -> Result<(), String> {
if name.starts_with('/') || name.ends_with('/') {
return Err(format!(
"Invalid worktree name: {}. It cannot start or end with a slash",
name
));
}
if name.contains("//") {
return Err(format!(
"Invalid worktree name: {}. It cannot contain two consecutive slashes",
name
));
}
if name.contains(char::is_whitespace) {
return Err(format!(
"Invalid worktree name: {}. It cannot contain whitespace",
name
));
}
Ok(())
}
// TECHDEBT
//
// Instead of opening the repo & reading configuration inside the function, it
// should be done by the caller and given as a parameter
pub fn add_worktree(
directory: &Path,
name: &str,
track: Option<(&str, &str)>,
no_track: bool,
) -> Result<Option<Vec<String>>, String> {
let mut warnings: Vec<String> = vec![];
validate_worktree_name(name)?;
let repo = repo::RepoHandle::open(directory, true).map_err(|error| match error.kind {
repo::RepoErrorKind::NotFound => {
String::from("Current directory does not contain a worktree setup")
}
_ => format!("Error opening repo: {}", error),
})?;
let remotes = &repo.remotes()?;
let config = repo::read_worktree_root_config(directory)?;
if repo.find_worktree(name).is_ok() {
return Err(format!("Worktree {} already exists", &name));
}
let track_config = config.and_then(|config| config.track);
let prefix = track_config
.as_ref()
.and_then(|track| track.default_remote_prefix.as_ref());
let enable_tracking = track_config.as_ref().map_or(false, |track| track.default);
let default_remote = track_config
.as_ref()
.map(|track| track.default_remote.clone());
// Note that we have to define all variables that borrow from `repo`
// *first*, otherwise we'll receive "borrowed value does not live long
// enough" errors. This is due to the `repo` reference inside `Worktree` that is
// passed through each state type.
//
// The `commit` variable will be dropped at the end of the scope, together with all
// worktree variables. It will be done in the opposite direction of delcaration (FILO).
//
// So if we define `commit` *after* the respective worktrees, it will be dropped first while
// still being borrowed by `Worktree`.
let default_branch_head = repo.default_branch()?.commit_owned()?;
let worktree = Worktree::<Init>::new(&repo).set_local_branch_name(name);
let get_remote_head = |remote_name: &str,
remote_branch_name: &str|
-> Result<Option<Box<repo::Commit>>, String> {
if let Ok(remote_branch) = repo.find_remote_branch(remote_name, remote_branch_name) {
Ok(Some(Box::new(remote_branch.commit_owned()?)))
} else {
Ok(None)
}
};
let worktree = if worktree.local_branch_already_exists() {
worktree.select_commit(None)
} else if let Some((remote_name, remote_branch_name)) = if no_track { None } else { track } {
if let Ok(remote_branch) = repo.find_remote_branch(remote_name, remote_branch_name) {
worktree.select_commit(Some(Box::new(remote_branch.commit_owned()?)))
} else {
worktree.select_commit(Some(Box::new(default_branch_head)))
}
} else {
match remotes.len() {
0 => worktree.select_commit(Some(Box::new(default_branch_head))),
1 => {
let remote_name = &remotes[0];
let commit: Option<Box<repo::Commit>> = ({
if let Some(prefix) = prefix {
get_remote_head(remote_name, &format!("{prefix}/{name}"))?
} else {
None
}
})
.or(get_remote_head(remote_name, name)?)
.or_else(|| Some(Box::new(default_branch_head)));
worktree.select_commit(commit)
}
_ => {
let commit = if let Some(ref default_remote) = default_remote {
if let Some(ref prefix) = prefix {
if let Ok(remote_branch) = repo
.find_remote_branch(default_remote, &format!("{prefix}/{name}"))
{
Some(Box::new(remote_branch.commit_owned()?))
} else {
None
}
} else {
None
}
.or({
if let Ok(remote_branch) =
repo.find_remote_branch(default_remote, name)
{
Some(Box::new(remote_branch.commit_owned()?))
} else {
None
}
})
} else {
None
}.or({
let mut commits = vec![];
for remote_name in remotes.iter() {
let remote_head: Option<Box<repo::Commit>> = ({
if let Some(ref prefix) = prefix {
if let Ok(remote_branch) = repo.find_remote_branch(
remote_name,
&format!("{prefix}/{name}"),
) {
Some(Box::new(remote_branch.commit_owned()?))
} else {
None
}
} else {
None
}
})
.or({
if let Ok(remote_branch) =
repo.find_remote_branch(remote_name, name)
{
Some(Box::new(remote_branch.commit_owned()?))
} else {
None
}
})
.or(None);
commits.push(remote_head);
}
let mut commits = commits
.into_iter()
.flatten()
// have to collect first because the `flatten()` return
// typedoes not implement `windows()`
.collect::<Vec<Box<repo::Commit>>>();
// `flatten()` takes care of `None` values here. If all
// remotes return None for the branch, we do *not* abort, we
// continue!
if commits.is_empty() {
Some(Box::new(default_branch_head))
} else if commits.len() == 1 {
Some(commits.swap_remove(0))
} else if commits.windows(2).any(|window| {
let c1 = &window[0];
let c2 = &window[1];
(*c1).id().hex_string() != (*c2).id().hex_string()
}) {
warnings.push(
// TODO this should also include the branch
// name. BUT: the branch name may be different
// between the remotes. Let's just leave it
// until I get around to fix that inconsistency
// (see module-level doc about), which might be
// never, as it's such a rare edge case.
"Branch exists on multiple remotes, but they deviate. Selecting default branch instead".to_string()
);
Some(Box::new(default_branch_head))
} else {
Some(commits.swap_remove(0))
}
});
worktree.select_commit(commit)
}
}
};
let worktree = if no_track {
worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str()))
} else if let Some((remote_name, remote_branch_name)) = track {
worktree.set_remote_tracking_branch(
Some((remote_name, remote_branch_name)),
None, // Always disable prefixing when explicitly given --track
)
} else if !enable_tracking {
worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str()))
} else {
match remotes.len() {
0 => worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str())),
1 => worktree
.set_remote_tracking_branch(Some((&remotes[0], name)), prefix.map(|s| s.as_str())),
_ => {
if let Some(default_remote) = default_remote {
worktree.set_remote_tracking_branch(
Some((&default_remote, name)),
prefix.map(|s| s.as_str()),
)
} else {
worktree.set_remote_tracking_branch(None, prefix.map(|s| s.as_str()))
}
}
}
};
worktree.create(directory)?;
Ok(if warnings.is_empty() {
None
} else {
Some(warnings)
})
}

View File

@@ -8,13 +8,13 @@ use helpers::*;
fn open_empty_repo() {
let tmpdir = init_tmpdir();
assert!(matches!(
Repo::open(tmpdir.path(), true),
RepoHandle::open(tmpdir.path(), true),
Err(RepoError {
kind: RepoErrorKind::NotFound
})
));
assert!(matches!(
Repo::open(tmpdir.path(), false),
RepoHandle::open(tmpdir.path(), false),
Err(RepoError {
kind: RepoErrorKind::NotFound
})
@@ -25,7 +25,7 @@ fn open_empty_repo() {
#[test]
fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
let tmpdir = init_tmpdir();
let repo = Repo::init(tmpdir.path(), false)?;
let repo = RepoHandle::init(tmpdir.path(), false)?;
assert!(!repo.is_bare());
assert!(repo.is_empty()?);
cleanup_tmpdir(tmpdir);
@@ -35,7 +35,7 @@ fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
#[test]
fn create_repo_with_worktree() -> Result<(), Box<dyn std::error::Error>> {
let tmpdir = init_tmpdir();
let repo = Repo::init(tmpdir.path(), true)?;
let repo = RepoHandle::init(tmpdir.path(), true)?;
assert!(repo.is_bare());
assert!(repo.is_empty()?);
cleanup_tmpdir(tmpdir);