32 Commits

Author SHA1 Message Date
31b9757ef3 Merge branch 'develop' 2022-06-14 00:32:08 +02:00
defb3d1b7d Release v0.7.2 2022-06-14 00:32:08 +02:00
e6b654e990 Cargo.lock: Updating libz-sys v1.1.6 -> v1.1.8 2022-06-14 00:15:15 +02:00
29ddc647e3 dependencies: Update comfy-table to 6.0.0 2022-06-14 00:15:15 +02:00
67c3e40108 just: Update check target to be pre-commit ready 2022-06-14 00:15:15 +02:00
7363ed48b4 Add clippy suggestions 2022-06-14 00:15:15 +02:00
96943c1483 Use new cargo fmt 2022-06-14 00:15:15 +02:00
9f7195282f Enable output in rust unit tests 2022-06-14 00:15:15 +02:00
30480fb568 Update handling of branches on worktree setup 2022-06-14 00:15:15 +02:00
c3aaea3332 Quote branch name on output 2022-06-14 00:15:15 +02:00
fad6f71876 Improve default branch guessing 2022-06-14 00:15:15 +02:00
73158e3d47 Print ok-ish stuff to stdout 2022-06-14 00:15:15 +02:00
6f4ae88260 Add some comments about repo syncing 2022-06-14 00:15:15 +02:00
a8f8803a92 Do not fail on empty clone target 2022-06-14 00:15:15 +02:00
581a513ebd Initialize local branches on clone 2022-06-14 00:15:15 +02:00
f1e212ead9 Add function to get all remote branches 2022-06-14 00:15:15 +02:00
bc3001a4e6 Add function to get basename of branch 2022-06-14 00:15:15 +02:00
c4fd1d0452 Refactor default_branch() for readability 2022-06-14 00:15:15 +02:00
1a65a163a1 Use opaque type for auth token
So we cannot accidentially output it, as it does not implement
`Display`.
2022-06-14 00:15:15 +02:00
4f68a563c6 providers: Use references for field access 2022-06-14 00:15:15 +02:00
e04e8ceeeb Use opaque type for auth token
So we cannot accidentially output it, as it does not implement
`Display`.
2022-06-14 00:15:15 +02:00
Max Volk
b2542b341e Reword some of the documentation and spelling fixes 2022-06-14 00:15:15 +02:00
d402c1f8ce Remove accidentially added file 2022-05-28 22:06:52 +02:00
e75aead3a8 Release v0.7.1 2022-05-27 23:37:54 +02:00
dca2b3c9b4 Justfile: Add build targets 2022-05-27 23:37:54 +02:00
a71711978e Make sure we do not expose secrets in output
This is using the RFC-8959 URI scheme to detect secrets. Thanks
hackernews for the idea ;)
2022-05-27 23:37:54 +02:00
90d188e01e Back to pure docker for testing 2022-05-27 23:37:54 +02:00
2e6166e807 Link binary statically with musl 2022-05-27 23:37:54 +02:00
8aaaa55d45 gitlab: Add alternate error field in JSON response 2022-05-27 23:37:54 +02:00
df39bb3076 gitlab: Fix detection of private repositories 2022-05-27 23:37:54 +02:00
bc3d4e1c49 Properly escape URL parameters 2022-05-27 23:37:54 +02:00
32eb4676ee Restructure into smaller modules 2022-05-27 23:37:54 +02:00
30 changed files with 1146 additions and 884 deletions

View File

@@ -37,13 +37,13 @@ a separate e2e test suite in python (`just test-e2e`).
To run all tests, run `just test`.
When contributing, consider whether it makes sense to add tests that to prevent
regressions in the future. When fixing bugs, it makes sense to add tests that
expose the wrong behaviour beforehand.
When contributing, consider whether it makes sense to add tests which could
prevent regressions in the future. When fixing bugs, it makes sense to add
tests that expose the wrong behaviour beforehand.
## Documentation
The documentation lives in `docs` and uses
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
features here!

67
Cargo.lock generated
View File

@@ -101,7 +101,7 @@ version = "3.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c"
dependencies = [
"heck 0.4.0",
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
@@ -119,9 +119,9 @@ dependencies = [
[[package]]
name = "comfy-table"
version = "5.0.1"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b103d85ca6e209388771bfb7aa6b68a7aeec4afbf6f0a0264bfbf50360e5212e"
checksum = "121d8a5b0346092c18a4b2fd6f620d7a06f0eb7ac0a45860939a0884bc579c56"
dependencies = [
"crossterm",
"strum",
@@ -332,7 +332,7 @@ dependencies = [
[[package]]
name = "git-repo-manager"
version = "0.7.0"
version = "0.7.2"
dependencies = [
"clap",
"comfy-table",
@@ -347,6 +347,7 @@ dependencies = [
"shellexpand",
"tempdir",
"toml",
"url-escape",
]
[[package]]
@@ -370,15 +371,6 @@ version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "heck"
version = "0.4.0"
@@ -531,9 +523,9 @@ dependencies = [
[[package]]
name = "libz-sys"
version = "1.1.6"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e7e15d7610cce1d9752e137625f14e61a28cd45929b6e12e47b50fe154ee2e"
checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
dependencies = [
"cc",
"libc",
@@ -609,14 +601,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
version = "0.9.73"
name = "openssl-src"
version = "111.20.0+1.1.1o"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5fd19fb3e0a8191c1e34935718976a3e70c112ab9a24af6d7cadccd9d90bc0"
checksum = "92892c4f87d56e376e469ace79f1128fdaded07646ddf73aa0be4706ff712dec"
dependencies = [
"cc",
]
[[package]]
name = "openssl-sys"
version = "0.9.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "835363342df5fba8354c5b453325b110ffd54044e588c539cf2f20a8014e4cb1"
dependencies = [
"autocfg",
"cc",
"libc",
"openssl-src",
"pkg-config",
"vcpkg",
]
@@ -635,9 +637,9 @@ checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
[[package]]
name = "parking_lot"
version = "0.12.0"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
"parking_lot_core",
@@ -994,17 +996,17 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strum"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cae14b91c7d11c9a851d3fbc80a963198998c2a64eec840477fa92d8ce9b70bb"
checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f"
[[package]]
name = "strum_macros"
version = "0.23.1"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb0dc7ee9c15cea6199cde9a127fa16a4c5819af85395457ad72d68edc85a38"
checksum = "9550962e7cf70d9980392878dfaf1dcc3ece024f4cf3bf3c46b978d0bad61d6c"
dependencies = [
"heck 0.3.3",
"heck",
"proc-macro2",
"quote",
"rustversion",
@@ -1165,12 +1167,6 @@ dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
[[package]]
name = "unicode-width"
version = "0.1.9"
@@ -1189,6 +1185,15 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "url-escape"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44e0ce4d1246d075ca5abec4b41d33e87a6054d08e2366b63205665e950db218"
dependencies = [
"percent-encoding",
]
[[package]]
name = "vcpkg"
version = "0.2.15"

View File

@@ -1,7 +1,8 @@
[package]
name = "git-repo-manager"
version = "0.7.0"
version = "0.7.2"
edition = "2021"
authors = [
"Hannes Körber <hannes@hkoerber.de>",
]
@@ -63,7 +64,7 @@ version = "=0.15.0"
version = "=1.5.6"
[dependencies.comfy-table]
version = "=5.0.1"
version = "=6.0.0"
[dependencies.serde_yaml]
version = "=0.8.24"
@@ -73,10 +74,22 @@ version = "=1.0.81"
[dependencies.isahc]
version = "=1.7.2"
features = ["json"]
default-features = false
features = ["json", "http2", "text-decoding"]
[dependencies.parse_link_header]
version = "=0.3.2"
[dependencies.url-escape]
version = "=0.1.1"
[dev-dependencies.tempdir]
version = "=0.3.7"
[features]
static-build = [
"git2/vendored-openssl",
"git2/vendored-libgit2",
"isahc/static-curl",
"isahc/static-ssl",
]

View File

@@ -1,6 +1,8 @@
set positional-arguments
check: test
target := "x86_64-unknown-linux-musl"
check: fmt-check lint test
cargo check
cargo fmt --check
cargo clippy --no-deps -- -Dwarnings
@@ -9,55 +11,54 @@ fmt:
cargo fmt
git ls-files | grep '\.py$' | xargs black
fmt-check:
cargo fmt --check
git ls-files | grep '\.py$' | xargs black --check
lint:
cargo clippy --no-deps
cargo clippy --no-deps -- -Dwarnings
lint-fix:
cargo clippy --no-deps --fix
release:
cargo build --release
test-binary-docker:
env \
GITHUB_API_BASEURL=http://rest:5000/github \
GITLAB_API_BASEURL=http://rest:5000/gitlab \
cargo build --profile e2e-tests
cargo build --release --target {{target}}
test-binary:
env \
GITHUB_API_BASEURL=http://localhost:5000/github \
GITLAB_API_BASEURL=http://localhost:5000/gitlab \
cargo build --profile e2e-tests
GITHUB_API_BASEURL=http://rest:5000/github \
GITLAB_API_BASEURL=http://rest:5000/gitlab \
cargo build --target {{target}} --profile e2e-tests --features=static-build
install:
cargo install --path .
install-static:
cargo install --target {{target}} --features=static-build --path .
build:
cargo build
build-static:
cargo build --target {{target}} --features=static-build
test: test-unit test-integration test-e2e
test-unit:
cargo test --lib --bins
test-unit +tests="":
cargo test --lib --bins -- --show-output {{tests}}
test-integration:
cargo test --test "*"
test-e2e-docker +tests=".": test-binary-docker
cd ./e2e_tests \
&& docker-compose rm --stop -f \
&& docker-compose build \
&& docker-compose run \
--rm \
-v $PWD/../target/e2e-tests/grm:/grm \
pytest \
"GRM_BINARY=/grm python3 ALTERNATE_DOMAIN=alternate-rest -m pytest -p no:cacheprovider --color=yes "$@"" \
&& docker-compose rm --stop -f
test-e2e +tests=".": test-binary
cd ./e2e_tests \
&& docker-compose rm --stop -f \
&& docker-compose build \
&& docker-compose up -d rest \
&& GRM_BINARY={{justfile_directory()}}/target/e2e-tests/grm ALTERNATE_DOMAIN=127.0.0.1 python3 -m pytest -p no:cacheprovider --color=yes {{tests}} \
&& docker-compose run \
--rm \
-v $PWD/../target/{{target}}/e2e-tests/grm:/grm \
pytest \
"GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest -p no:cacheprovider --color=yes "$@"" \
&& docker-compose rm --stop -f
update-dependencies: update-cargo-dependencies

View File

@@ -1,7 +1,7 @@
# Summary
- [Overview](./overview.md)
- [Getting started](./getting_started.md)
- [Installation](./installation.md)
- [Repository trees](./repos.md)
- [Git Worktrees](./worktrees.md)
- [Forge Integrations](./forge_integration.md)

View File

@@ -17,7 +17,7 @@ You will end up with your projects cloned into `~/projects/{your_github_username
## Authentication
The only currently supported authentication option is using personal access
The only currently supported authentication option is using a personal access
token.
### GitHub
@@ -27,7 +27,7 @@ See the GitHub documentation for personal access tokens:
The only required permission is the "repo" scope.
### GitHub
### GitLab
See the GitLab documentation for personal access tokens:
[Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html).

View File

@@ -1,22 +0,0 @@
# Quickstart
## Installation
Building GRM currently requires the nightly Rust toolchain. The easiest way
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
Make sure that the nightly toolchain is installed:
```
$ rustup toolchain install nightly
```
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch master
```
If you're brave, you can also run the development build:
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
```

56
docs/src/installation.md Normal file
View File

@@ -0,0 +1,56 @@
# Installation
## Installation
Building GRM currently requires the nightly Rust toolchain. The easiest way
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
Make sure that the nightly toolchain is installed:
```
$ rustup toolchain install nightly
```
Then, install the build dependencies:
| Distribution | Command |
| ------------- | ------------------------------------------------------------------------------ |
| Archlinux | `pacman -S --needed gcc openssl pkg-config` |
| Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` |
Then, it's a simple command to install the latest stable version:
```bash
$ cargo +nightly install git-repo-manager
```
If you're brave, you can also run the development build:
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
```
## Static build
Note that by default, you will get a dynamically linked executable.
Alternatively, you can also build a statically linked binary. For this, you
will need `musl` and a few other build dependencies installed installed:
| Distribution | Command |
| ------------- | --------------------------------------------------------------------------- |
| Archlinux | `pacman -S --needed gcc musl perl make` |
| Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` |
(`perl` and `make` are required for the OpenSSL build script)
The, add the musl target via `rustup`:
```
$ rustup +nightly target add x86_64-unknown-linux-musl
```
Then, use a modified build command to get a statically linked binary:
```
$ cargo +nightly install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build
```

View File

@@ -77,6 +77,6 @@ $ grm repos status
## YAML
By default, the repo configuration uses TOML. If you prefer YAML, just give it
a YAML file instead (file ending does not matter, `grm` will figure out the format
itself). For generating a configuration, pass `--format yaml` to `grm repo find`
to generate YAML instead of TOML.
a YAML file instead (file ending does not matter, `grm` will figure out the format).
For generating a configuration, pass `--format yaml` to `grm repo find`
which generates a YAML config instead of a TOML configuration.

View File

@@ -5,11 +5,11 @@
The default workflow when using git is having your repository in a single directory.
Then, you can check out a certain reference (usually a branch), which will update
the files in the directory to match the state of that reference. Most of the time,
this is exactly what you need and works perfectly. But especially when you're using
this is exactly what you need and works perfectly. But especially when you're working
with branches a lot, you may notice that there is a lot of work required to make
everything run smootly.
everything run smoothly.
Maybe you experienced the following: You're working on a feature branch. Then,
Maybe you have experienced the following: You're working on a feature branch. Then,
for some reason, you have to change branches (maybe to investigate some issue).
But you get the following:
@@ -20,7 +20,7 @@ error: Your local changes to the following files would be overwritten by checkou
Now you can create a temporary commit or stash your changes. In any case, you have
some mental overhead before you can work on something else. Especially with stashes,
you'll have to remember to do a `git stash pop` before resuming your work (I
cannot count the number of times where is "rediscovered" some code hidden in some
cannot count the number of times where I "rediscovered" some code hidden in some
old stash I forgot about.
And even worse: If you're currently in the process of resolving merge conflicts or an
@@ -40,7 +40,7 @@ In any case, Git Worktrees are here for the rescue:
independent checkouts of your repository on different directories. You can have
multiple directories that correspond to different references in your repository.
Each worktree has it's independent working tree (duh) and index, so there is no
to run into conflicts. Changing to a different branch is just a `cd` away (if
way to run into conflicts. Changing to a different branch is just a `cd` away (if
the worktree is already set up).
## Worktrees in GRM
@@ -210,7 +210,7 @@ your changes to. I'd rather not delete work that you cannot recover."
Note that `grm` is very cautious here. As your repository will not be deleted,
you could still recover the commits via [`git-reflog`](https://git-scm.com/docs/git-reflog).
But better safe then sorry! Note that you'd get a similar error message if your
But better safe than sorry! Note that you'd get a similar error message if your
worktree had any uncommitted files, for the same reason. Now you can either
commit & push your changes, or your tell `grm` that you know what you're doing:
@@ -241,7 +241,7 @@ calls them "persistent branches" and treats them a bit differently:
`grm wt delete`, which will not require a `--force` flag. Note that of
course, actual changes in the worktree will still block an automatic cleanup!
* As soon as you enable persistent branches, non-persistent branches will only
ever cleaned up when merged into a persistent branch.
ever be cleaned up when merged into a persistent branch.
To elaborate: This is mostly relevant for a feature-branch workflow. Whenever a
feature branch is merged, it can usually be thrown away. As merging is usually
@@ -340,7 +340,7 @@ $ grm wt rebase --pull --rebase
hell is there a `--rebase` flag in the `rebase` command?"
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree
to their remote branch, if possible. `rebase` rabases onto the **default** branch
to their remote branch, if possible. `rebase` rebases onto the **default** branch
instead. The switches to `rebase` are just convenience, so you do not have to
run two commands.

View File

@@ -23,8 +23,6 @@ services:
build: ./docker-rest/
expose:
- "5000"
ports:
- "5000:5000"
networks:
main:
aliases:

View File

@@ -12,7 +12,7 @@ def check_headers():
app.logger.error("Invalid accept header")
abort(500)
auth_header = request.headers.get("authorization")
if auth_header != "token authtoken":
if auth_header != "token secret-token:myauthtoken":
app.logger.error("Invalid authorization header: %s", auth_header)
abort(
make_response(

View File

@@ -12,7 +12,7 @@ def check_headers():
app.logger.error("Invalid accept header")
abort(500)
auth_header = request.headers.get("authorization")
if auth_header != "bearer authtoken":
if auth_header != "bearer secret-token:myauthtoken":
app.logger.error("Invalid authorization header: %s", auth_header)
abort(
make_response(

View File

@@ -18,6 +18,8 @@ def grm(args, cwd=None, is_invalid=False):
print(f"grmcmd: {args}")
print(f"stdout:\n{cmd.stdout}")
print(f"stderr:\n{cmd.stderr}")
assert "secret-token:" not in cmd.stdout
assert "secret-token:" not in cmd.stderr
assert "panicked" not in cmd.stderr
return cmd

View File

@@ -141,7 +141,7 @@ def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
f.write(
f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
)
@@ -157,7 +157,7 @@ def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
]
@@ -193,7 +193,7 @@ def test_repos_find_remote_user_empty(
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
[filters]
@@ -213,7 +213,7 @@ def test_repos_find_remote_user_empty(
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--user",
@@ -264,7 +264,7 @@ def test_repos_find_remote_user(
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
@@ -300,7 +300,7 @@ def test_repos_find_remote_user(
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
]
@@ -378,7 +378,7 @@ def test_repos_find_remote_group_empty(
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
@@ -403,7 +403,7 @@ def test_repos_find_remote_group_empty(
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
@@ -459,7 +459,7 @@ def test_repos_find_remote_group(
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
@@ -488,7 +488,7 @@ def test_repos_find_remote_group(
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
@@ -575,7 +575,7 @@ def test_repos_find_remote_user_and_group(
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
@@ -609,7 +609,7 @@ def test_repos_find_remote_user_and_group(
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
@@ -726,7 +726,7 @@ def test_repos_find_remote_owner(
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo authtoken"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
@@ -761,7 +761,7 @@ def test_repos_find_remote_owner(
"--provider",
provider,
"--token-command",
"echo authtoken",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--access",

View File

@@ -303,7 +303,6 @@ def test_repos_sync_root_is_file(configtype):
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "not a directory" in cmd.stderr.lower()

45
src/auth.rs Normal file
View File

@@ -0,0 +1,45 @@
use std::process;
#[derive(Clone)]
pub struct AuthToken(String);
impl AuthToken {
pub fn access(&self) -> &str {
&self.0
}
}
pub fn get_token_from_command(command: &str) -> Result<AuthToken, String> {
let output = process::Command::new("/usr/bin/env")
.arg("sh")
.arg("-c")
.arg(command)
.output()
.map_err(|error| format!("Failed to run token-command: {}", error))?;
let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?;
let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?;
if !output.status.success() {
if !stderr.is_empty() {
return Err(format!("Token command failed: {}", stderr));
} else {
return Err(String::from("Token command failed."));
}
}
if !stderr.is_empty() {
return Err(format!("Token command produced stderr: {}", stderr));
}
if stdout.is_empty() {
return Err(String::from("Token command did not produce output"));
}
let token = stdout
.split('\n')
.next()
.ok_or_else(|| String::from("Output did not contain any newline"))?;
Ok(AuthToken(token.to_string()))
}

View File

@@ -1,18 +1,19 @@
use serde::{Deserialize, Serialize};
use std::process;
use crate::output::*;
use std::path::Path;
use crate::{get_token_from_command, path_as_string, Remote, Repo, Tree};
use super::auth;
use super::output::*;
use super::path;
use super::provider;
use super::provider::Filter;
use super::provider::Provider;
use super::repo;
use super::tree;
use crate::provider;
use crate::provider::Filter;
use crate::provider::Provider;
pub type RemoteProvider = crate::provider::RemoteProvider;
pub type RemoteType = crate::repo::RemoteType;
pub type RemoteProvider = provider::RemoteProvider;
pub type RemoteType = repo::RemoteType;
fn worktree_setup_default() -> bool {
false
@@ -64,7 +65,7 @@ pub struct RemoteConfig {
}
impl RemoteConfig {
pub fn from_remote(remote: Remote) -> Self {
pub fn from_remote(remote: repo::Remote) -> Self {
Self {
name: remote.name,
url: remote.url,
@@ -72,8 +73,8 @@ impl RemoteConfig {
}
}
pub fn into_remote(self) -> Remote {
Remote {
pub fn into_remote(self) -> repo::Remote {
repo::Remote {
name: self.name,
url: self.url,
remote_type: self.remote_type,
@@ -93,7 +94,7 @@ pub struct RepoConfig {
}
impl RepoConfig {
pub fn from_repo(repo: Repo) -> Self {
pub fn from_repo(repo: repo::Repo) -> Self {
Self {
name: repo.name,
worktree_setup: repo.worktree_setup,
@@ -103,14 +104,14 @@ impl RepoConfig {
}
}
pub fn into_repo(self) -> Repo {
pub fn into_repo(self) -> repo::Repo {
let (namespace, name) = if let Some((namespace, name)) = self.name.rsplit_once('/') {
(Some(namespace.to_string()), name.to_string())
} else {
(None, self.name)
};
Repo {
repo::Repo {
name,
namespace,
worktree_setup: self.worktree_setup,
@@ -133,7 +134,7 @@ impl ConfigTrees {
ConfigTrees { trees: vec }
}
pub fn from_trees(vec: Vec<Tree>) -> Self {
pub fn from_trees(vec: Vec<tree::Tree>) -> Self {
ConfigTrees {
trees: vec.into_iter().map(ConfigTree::from_tree).collect(),
}
@@ -157,7 +158,7 @@ impl Config {
match self {
Config::ConfigTrees(config) => Ok(config.trees),
Config::ConfigProvider(config) => {
let token = match get_token_from_command(&config.token_command) {
let token = match auth::get_token_from_command(&config.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
@@ -217,9 +218,9 @@ impl Config {
.collect();
let tree = ConfigTree {
root: if let Some(namespace) = namespace {
path_as_string(&Path::new(&config.root).join(namespace))
path::path_as_string(&Path::new(&config.root).join(namespace))
} else {
path_as_string(Path::new(&config.root))
path::path_as_string(Path::new(&config.root))
},
repos: Some(repos),
};
@@ -236,7 +237,7 @@ impl Config {
pub fn normalize(&mut self) {
if let Config::ConfigTrees(config) = self {
let home = super::env_home().display().to_string();
let home = path::env_home().display().to_string();
for tree in &mut config.trees_mut().iter_mut() {
if tree.root.starts_with(&home) {
// The tilde is not handled differently, it's just a normal path component for `Path`.
@@ -275,14 +276,14 @@ pub struct ConfigTree {
}
impl ConfigTree {
pub fn from_repos(root: String, repos: Vec<Repo>) -> Self {
pub fn from_repos(root: String, repos: Vec<repo::Repo>) -> Self {
Self {
root,
repos: Some(repos.into_iter().map(RepoConfig::from_repo).collect()),
}
}
pub fn from_tree(tree: Tree) -> Self {
pub fn from_tree(tree: tree::Tree) -> Self {
Self {
root: tree.root,
repos: Some(tree.repos.into_iter().map(RepoConfig::from_repo).collect()),

View File

@@ -181,7 +181,7 @@ pub struct Config {
pub init_worktree: String,
}
pub type RemoteProvider = grm::provider::RemoteProvider;
pub type RemoteProvider = super::provider::RemoteProvider;
#[derive(Parser)]
#[clap()]

View File

@@ -3,12 +3,17 @@ use std::process;
mod cmd;
use grm::auth;
use grm::config;
use grm::find_in_tree;
use grm::output::*;
use grm::path_as_string;
use grm::path;
use grm::provider;
use grm::provider::Provider;
use grm::repo;
use grm::table;
use grm::tree;
use grm::worktree;
fn main() {
let opts = cmd::parse();
@@ -24,7 +29,7 @@ fn main() {
process::exit(1);
}
};
match grm::sync_trees(config, args.init_worktree == "true") {
match tree::sync_trees(config, args.init_worktree == "true") {
Ok(success) => {
if !success {
process::exit(1)
@@ -37,7 +42,7 @@ fn main() {
}
}
cmd::SyncAction::Remote(args) => {
let token = match grm::get_token_from_command(&args.token_command) {
let token = match auth::get_token_from_command(&args.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
@@ -45,18 +50,14 @@ fn main() {
}
};
let filter = grm::provider::Filter::new(
args.users,
args.groups,
args.owner,
args.access,
);
let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access);
let worktree = args.worktree == "true";
let repos = match args.provider {
cmd::RemoteProvider::Github => {
match grm::provider::Github::new(filter, token, args.api_url) {
match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
@@ -66,7 +67,7 @@ fn main() {
.get_repos(worktree, args.force_ssh)
}
cmd::RemoteProvider::Gitlab => {
match grm::provider::Gitlab::new(filter, token, args.api_url) {
match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
@@ -83,9 +84,9 @@ fn main() {
for (namespace, repolist) in repos {
let root = if let Some(namespace) = namespace {
path_as_string(&Path::new(&args.root).join(namespace))
path::path_as_string(&Path::new(&args.root).join(namespace))
} else {
path_as_string(Path::new(&args.root))
path::path_as_string(Path::new(&args.root))
};
let tree = config::ConfigTree::from_repos(root, repolist);
@@ -94,7 +95,7 @@ fn main() {
let config = config::Config::from_trees(trees);
match grm::sync_trees(config, args.init_worktree == "true") {
match tree::sync_trees(config, args.init_worktree == "true") {
Ok(success) => {
if !success {
process::exit(1)
@@ -122,7 +123,7 @@ fn main() {
process::exit(1);
}
};
match grm::table::get_status_table(config) {
match table::get_status_table(config) {
Ok((tables, errors)) => {
for table in tables {
println!("{}", table);
@@ -146,7 +147,7 @@ fn main() {
}
};
match grm::table::show_single_repo_status(&dir) {
match table::show_single_repo_status(&dir) {
Ok((table, warnings)) => {
println!("{}", table);
for warning in warnings {
@@ -184,7 +185,7 @@ fn main() {
}
};
let (found_repos, warnings) = match grm::find_in_tree(&path) {
let (found_repos, warnings) = match find_in_tree(&path) {
Ok((repos, warnings)) => (repos, warnings),
Err(error) => {
print_error(&error);
@@ -192,7 +193,7 @@ fn main() {
}
};
let trees = grm::config::ConfigTrees::from_trees(vec![found_repos]);
let trees = config::ConfigTrees::from_trees(vec![found_repos]);
if trees.trees_ref().iter().all(|t| match &t.repos {
None => false,
Some(r) => r.is_empty(),
@@ -237,16 +238,15 @@ fn main() {
}
}
cmd::FindAction::Config(args) => {
let config: crate::config::ConfigProvider =
match config::read_config(&args.config) {
Ok(config) => config,
Err(error) => {
print_error(&error);
process::exit(1);
}
};
let config: config::ConfigProvider = match config::read_config(&args.config) {
Ok(config) => config,
Err(error) => {
print_error(&error);
process::exit(1);
}
};
let token = match grm::get_token_from_command(&config.token_command) {
let token = match auth::get_token_from_command(&config.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
@@ -254,7 +254,7 @@ fn main() {
}
};
let filters = config.filters.unwrap_or(grm::config::ConfigProviderFilter {
let filters = config.filters.unwrap_or(config::ConfigProviderFilter {
access: Some(false),
owner: Some(false),
users: Some(vec![]),
@@ -314,14 +314,14 @@ fn main() {
for (namespace, namespace_repos) in repos {
let tree = config::ConfigTree {
root: if let Some(namespace) = namespace {
path_as_string(&Path::new(&config.root).join(namespace))
path::path_as_string(&Path::new(&config.root).join(namespace))
} else {
path_as_string(Path::new(&config.root))
path::path_as_string(Path::new(&config.root))
},
repos: Some(
namespace_repos
.into_iter()
.map(grm::config::RepoConfig::from_repo)
.map(config::RepoConfig::from_repo)
.collect(),
),
};
@@ -360,7 +360,7 @@ fn main() {
}
}
cmd::FindAction::Remote(args) => {
let token = match grm::get_token_from_command(&args.token_command) {
let token = match auth::get_token_from_command(&args.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
@@ -368,18 +368,14 @@ fn main() {
}
};
let filter = grm::provider::Filter::new(
args.users,
args.groups,
args.owner,
args.access,
);
let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access);
let worktree = args.worktree == "true";
let repos = match args.provider {
cmd::RemoteProvider::Github => {
match grm::provider::Github::new(filter, token, args.api_url) {
match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
@@ -389,7 +385,7 @@ fn main() {
.get_repos(worktree, args.force_ssh)
}
cmd::RemoteProvider::Gitlab => {
match grm::provider::Gitlab::new(filter, token, args.api_url) {
match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
@@ -410,14 +406,14 @@ fn main() {
for (namespace, repolist) in repos {
let tree = config::ConfigTree {
root: if let Some(namespace) = namespace {
path_as_string(&Path::new(&args.root).join(namespace))
path::path_as_string(&Path::new(&args.root).join(namespace))
} else {
path_as_string(Path::new(&args.root))
path::path_as_string(Path::new(&args.root))
},
repos: Some(
repolist
.into_iter()
.map(grm::config::RepoConfig::from_repo)
.map(config::RepoConfig::from_repo)
.collect(),
),
};
@@ -503,7 +499,13 @@ fn main() {
}
}
match grm::add_worktree(&cwd, name, subdirectory, track, action_args.no_track) {
match worktree::add_worktree(
&cwd,
name,
subdirectory,
track,
action_args.no_track,
) {
Ok(_) => print_success(&format!("Worktree {} created", &action_args.name)),
Err(error) => {
print_error(&format!("Error creating worktree: {}", error));
@@ -525,7 +527,7 @@ fn main() {
}
};
let repo = grm::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
print_error(&format!("Error opening repository: {}", error));
process::exit(1);
});
@@ -539,17 +541,17 @@ fn main() {
Ok(_) => print_success(&format!("Worktree {} deleted", &action_args.name)),
Err(error) => {
match error {
grm::WorktreeRemoveFailureReason::Error(msg) => {
repo::WorktreeRemoveFailureReason::Error(msg) => {
print_error(&msg);
process::exit(1);
}
grm::WorktreeRemoveFailureReason::Changes(changes) => {
repo::WorktreeRemoveFailureReason::Changes(changes) => {
print_warning(&format!(
"Changes in worktree: {}. Refusing to delete",
changes
));
}
grm::WorktreeRemoveFailureReason::NotMerged(message) => {
repo::WorktreeRemoveFailureReason::NotMerged(message) => {
print_warning(&message);
}
}
@@ -558,12 +560,12 @@ fn main() {
}
}
cmd::WorktreeAction::Status(_args) => {
let repo = grm::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
print_error(&format!("Error opening repository: {}", error));
process::exit(1);
});
match grm::table::get_worktree_status_table(&repo, &cwd) {
match table::get_worktree_status_table(&repo, &cwd) {
Ok((table, errors)) => {
println!("{}", table);
for error in errors {
@@ -583,8 +585,8 @@ fn main() {
// * Remove all files
// * Set `core.bare` to `true`
let repo = grm::RepoHandle::open(&cwd, false).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, false).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -611,8 +613,8 @@ fn main() {
}
}
cmd::WorktreeAction::Clean(_args) => {
let repo = grm::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -645,8 +647,8 @@ fn main() {
}
}
cmd::WorktreeAction::Fetch(_args) => {
let repo = grm::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -661,8 +663,8 @@ fn main() {
print_success("Fetched from all remotes");
}
cmd::WorktreeAction::Pull(args) => {
let repo = grm::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -702,8 +704,8 @@ fn main() {
print_error("There is no point in using --rebase without --pull");
process::exit(1);
}
let repo = grm::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound {
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository");
} else {
print_error(&format!("Opening repository failed: {}", error));
@@ -718,14 +720,10 @@ fn main() {
});
}
let config =
grm::repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
print_error(&format!(
"Failed to read worktree configuration: {}",
error
));
process::exit(1);
});
let config = repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
print_error(&format!("Failed to read worktree configuration: {}", error));
process::exit(1);
});
let worktrees = repo.get_worktrees().unwrap_or_else(|error| {
print_error(&format!("Error getting worktrees: {}", error));

View File

@@ -1,423 +1,37 @@
#![feature(io_error_more)]
#![feature(const_option_ext)]
use std::fs;
use std::path::{Path, PathBuf};
use std::process;
use std::path::Path;
pub mod auth;
pub mod config;
pub mod output;
pub mod path;
pub mod provider;
pub mod repo;
pub mod table;
pub mod tree;
pub mod worktree;
use config::Config;
use output::*;
use repo::{clone_repo, detect_remote_type, Remote, RemoteType};
pub use repo::{
RemoteTrackingStatus, Repo, RepoErrorKind, RepoHandle, WorktreeRemoveFailureReason,
};
const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
const BRANCH_NAMESPACE_SEPARATOR: &str = "/";
const GIT_CONFIG_BARE_KEY: &str = "core.bare";
const GIT_CONFIG_PUSH_DEFAULT: &str = "push.default";
pub struct Tree {
root: String,
repos: Vec<Repo>,
}
#[cfg(test)]
mod tests {
use super::*;
fn setup() {
std::env::set_var("HOME", "/home/test");
}
#[test]
fn check_expand_tilde() {
setup();
assert_eq!(
expand_path(Path::new("~/file")),
Path::new("/home/test/file")
);
}
#[test]
fn check_expand_invalid_tilde() {
setup();
assert_eq!(
expand_path(Path::new("/home/~/file")),
Path::new("/home/~/file")
);
}
#[test]
fn check_expand_home() {
setup();
assert_eq!(
expand_path(Path::new("$HOME/file")),
Path::new("/home/test/file")
);
assert_eq!(
expand_path(Path::new("${HOME}/file")),
Path::new("/home/test/file")
);
}
}
pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap()
}
pub fn env_home() -> PathBuf {
match std::env::var("HOME") {
Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => {
print_error(&format!("Unable to read HOME: {}", e));
process::exit(1);
}
}
}
fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context(
&path_as_string(path),
home_dir,
|name| -> Result<Option<String>, &'static str> {
match name {
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None),
}
},
) {
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
Ok(std::borrow::Cow::Owned(path)) => path,
Err(e) => {
print_error(&format!("Unable to expand root: {}", e));
process::exit(1);
}
};
Path::new(&expanded_path).to_path_buf()
}
pub fn get_token_from_command(command: &str) -> Result<String, String> {
let output = std::process::Command::new("/usr/bin/env")
.arg("sh")
.arg("-c")
.arg(command)
.output()
.map_err(|error| format!("Failed to run token-command: {}", error))?;
let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?;
let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?;
if !output.status.success() {
if !stderr.is_empty() {
return Err(format!("Token command failed: {}", stderr));
} else {
return Err(String::from("Token command failed."));
}
}
if !stderr.is_empty() {
return Err(format!("Token command produced stderr: {}", stderr));
}
if stdout.is_empty() {
return Err(String::from("Token command did not produce output"));
}
let token = stdout
.split('\n')
.next()
.ok_or_else(|| String::from("Output did not contain any newline"))?;
Ok(token.to_string())
}
fn sync_repo(root_path: &Path, repo: &Repo, init_worktree: bool) -> Result<(), String> {
let repo_path = root_path.join(&repo.fullname());
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
let mut newly_created = false;
if repo_path.exists() {
if repo.worktree_setup && !actual_git_directory.exists() {
return Err(String::from(
"Repo already exists, but is not using a worktree setup",
));
};
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action(
&repo.name,
"Repository does not have remotes configured, initializing new",
);
match RepoHandle::init(&repo_path, repo.worktree_setup) {
Ok(r) => {
print_repo_success(&repo.name, "Repository created");
Some(r)
}
Err(e) => {
return Err(format!("Repository failed during init: {}", e));
}
};
} else {
let first = repo.remotes.as_ref().unwrap().first().unwrap();
match clone_repo(first, &repo_path, repo.worktree_setup) {
Ok(_) => {
print_repo_success(&repo.name, "Repository successfully cloned");
}
Err(e) => {
return Err(format!("Repository failed during clone: {}", e));
}
};
newly_created = true;
}
let repo_handle = match RepoHandle::open(&repo_path, repo.worktree_setup) {
Ok(repo) => repo,
Err(error) => {
if !repo.worktree_setup && RepoHandle::open(&repo_path, true).is_ok() {
return Err(String::from(
"Repo already exists, but is using a worktree setup",
));
} else {
return Err(format!("Opening repository failed: {}", error));
}
}
};
if newly_created && repo.worktree_setup && init_worktree {
match repo_handle.default_branch() {
Ok(branch) => {
add_worktree(&repo_path, &branch.name()?, None, None, false)?;
}
Err(_error) => print_repo_error(
&repo.name,
"Could not determine default branch, skipping worktree initializtion",
),
}
}
if let Some(remotes) = &repo.remotes {
let current_remotes: Vec<String> = repo_handle
.remotes()
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
for remote in remotes {
let current_remote = repo_handle.find_remote(&remote.name)?;
match current_remote {
Some(current_remote) => {
let current_url = current_remote.url();
if remote.url != current_url {
print_repo_action(
&repo.name,
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
);
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
};
}
}
None => {
print_repo_action(
&repo.name,
&format!(
"Setting up new remote \"{}\" to \"{}\"",
&remote.name, &remote.url
),
);
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
return Err(format!(
"Repository failed during setting the remotes: {}",
e
));
}
}
}
}
for current_remote in &current_remotes {
if !remotes.iter().any(|r| &r.name == current_remote) {
print_repo_action(
&repo.name,
&format!("Deleting remote \"{}\"", &current_remote,),
);
if let Err(e) = repo_handle.remote_delete(current_remote) {
return Err(format!(
"Repository failed during deleting remote \"{}\": {}",
&current_remote, e
));
}
}
}
}
Ok(())
}
pub fn find_unmanaged_repos(
root_path: &Path,
managed_repos: &[Repo],
) -> Result<Vec<PathBuf>, String> {
let mut unmanaged_repos = Vec::new();
for repo_path in find_repo_paths(root_path)? {
if !managed_repos
.iter()
.any(|r| Path::new(root_path).join(r.fullname()) == repo_path)
{
unmanaged_repos.push(repo_path);
}
}
Ok(unmanaged_repos)
}
pub fn sync_trees(config: Config, init_worktree: bool) -> Result<bool, String> {
let mut failures = false;
let mut unmanaged_repos_absolute_paths = vec![];
let mut managed_repos_absolute_paths = vec![];
let trees = config.trees()?;
for tree in trees {
let repos: Vec<Repo> = tree
.repos
.unwrap_or_default()
.into_iter()
.map(|repo| repo.into_repo())
.collect();
let root_path = expand_path(Path::new(&tree.root));
for repo in &repos {
managed_repos_absolute_paths.push(root_path.join(repo.fullname()));
match sync_repo(&root_path, repo, init_worktree) {
Ok(_) => print_repo_success(&repo.name, "OK"),
Err(error) => {
print_repo_error(&repo.name, &error);
failures = true;
}
}
}
match find_unmanaged_repos(&root_path, &repos) {
Ok(repos) => {
unmanaged_repos_absolute_paths.extend(repos);
}
Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error));
failures = true;
}
}
}
for unmanaged_repo_absolute_path in &unmanaged_repos_absolute_paths {
if managed_repos_absolute_paths
.iter()
.any(|managed_repo_absolute_path| {
managed_repo_absolute_path == unmanaged_repo_absolute_path
})
{
continue;
}
print_warning(&format!(
"Found unmanaged repository: \"{}\"",
path_as_string(unmanaged_repo_absolute_path)
));
}
Ok(!failures)
}
/// Finds repositories recursively, returning their path
fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
let mut repos = Vec::new();
let git_dir = path.join(".git");
let git_worktree = path.join(GIT_MAIN_WORKTREE_DIRECTORY);
if git_dir.exists() || git_worktree.exists() {
repos.push(path.to_path_buf());
} else {
match fs::read_dir(path) {
Ok(contents) => {
for content in contents {
match content {
Ok(entry) => {
let path = entry.path();
if path.is_symlink() {
continue;
}
if path.is_dir() {
match find_repo_paths(&path) {
Ok(ref mut r) => repos.append(r),
Err(error) => return Err(error),
}
}
}
Err(e) => {
return Err(format!("Error accessing directory: {}", e));
}
};
}
}
Err(e) => {
return Err(format!(
"Failed to open \"{}\": {}",
&path.display(),
match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()),
}
));
}
};
}
Ok(repos)
}
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
match is_worktree {
false => path.to_path_buf(),
true => path.join(GIT_MAIN_WORKTREE_DIRECTORY),
}
}
/// Find all git repositories under root, recursively
///
/// The bool in the return value specifies whether there is a repository
/// in root itself.
#[allow(clippy::type_complexity)]
fn find_repos(root: &Path) -> Result<Option<(Vec<Repo>, Vec<String>, bool)>, String> {
let mut repos: Vec<Repo> = Vec::new();
fn find_repos(root: &Path) -> Result<Option<(Vec<repo::Repo>, Vec<String>, bool)>, String> {
let mut repos: Vec<repo::Repo> = Vec::new();
let mut repo_in_root = false;
let mut warnings = Vec::new();
for path in find_repo_paths(root)? {
let is_worktree = RepoHandle::detect_worktree(&path);
for path in tree::find_repo_paths(root)? {
let is_worktree = repo::RepoHandle::detect_worktree(&path);
if path == root {
repo_in_root = true;
}
match RepoHandle::open(&path, is_worktree) {
match repo::RepoHandle::open(&path, is_worktree) {
Err(error) => {
warnings.push(format!(
"Error opening repo {}{}: {}",
@@ -436,32 +50,32 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<Repo>, Vec<String>, bool)>, Str
Err(error) => {
warnings.push(format!(
"{}: Error getting remotes: {}",
&path_as_string(&path),
&path::path_as_string(&path),
error
));
continue;
}
};
let mut results: Vec<Remote> = Vec::new();
let mut results: Vec<repo::Remote> = Vec::new();
for remote_name in remotes.iter() {
match repo.find_remote(remote_name)? {
Some(remote) => {
let name = remote.name();
let url = remote.url();
let remote_type = match detect_remote_type(&url) {
let remote_type = match repo::detect_remote_type(&url) {
Some(t) => t,
None => {
warnings.push(format!(
"{}: Could not detect remote type of \"{}\"",
&path_as_string(&path),
&path::path_as_string(&path),
&url
));
continue;
}
};
results.push(Remote {
results.push(repo::Remote {
name,
url,
remote_type,
@@ -470,7 +84,7 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<Repo>, Vec<String>, bool)>, Str
None => {
warnings.push(format!(
"{}: Remote {} not found",
&path_as_string(&path),
&path::path_as_string(&path),
remote_name
));
continue;
@@ -483,7 +97,9 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<Repo>, Vec<String>, bool)>, Str
(
None,
match &root.parent() {
Some(parent) => path_as_string(path.strip_prefix(parent).unwrap()),
Some(parent) => {
path::path_as_string(path.strip_prefix(parent).unwrap())
}
None => {
warnings.push(String::from("Getting name of the search root failed. Do you have a git repository in \"/\"?"));
continue;
@@ -495,15 +111,15 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<Repo>, Vec<String>, bool)>, Str
let namespace = name.parent().unwrap();
(
if namespace != Path::new("") {
Some(path_as_string(namespace).to_string())
Some(path::path_as_string(namespace).to_string())
} else {
None
},
path_as_string(name),
path::path_as_string(name),
)
};
repos.push(Repo {
repos.push(repo::Repo {
name,
namespace,
remotes: Some(remotes),
@@ -515,10 +131,10 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<Repo>, Vec<String>, bool)>, Str
Ok(Some((repos, warnings, repo_in_root)))
}
pub fn find_in_tree(path: &Path) -> Result<(Tree, Vec<String>), String> {
pub fn find_in_tree(path: &Path) -> Result<(tree::Tree, Vec<String>), String> {
let mut warnings = Vec::new();
let (repos, repo_in_root): (Vec<Repo>, bool) = match find_repos(path)? {
let (repos, repo_in_root): (Vec<repo::Repo>, bool) = match find_repos(path)? {
Some((vec, mut repo_warnings, repo_in_root)) => {
warnings.append(&mut repo_warnings);
(vec, repo_in_root)
@@ -539,171 +155,10 @@ pub fn find_in_tree(path: &Path) -> Result<(Tree, Vec<String>), String> {
}
Ok((
Tree {
tree::Tree {
root: root.into_os_string().into_string().unwrap(),
repos,
},
warnings,
))
}
pub fn add_worktree(
directory: &Path,
name: &str,
subdirectory: Option<&Path>,
track: Option<(&str, &str)>,
no_track: bool,
) -> Result<(), String> {
let repo = RepoHandle::open(directory, true).map_err(|error| match error.kind {
RepoErrorKind::NotFound => {
String::from("Current directory does not contain a worktree setup")
}
_ => format!("Error opening repo: {}", error),
})?;
let config = repo::read_worktree_root_config(directory)?;
if repo.find_worktree(name).is_ok() {
return Err(format!("Worktree {} already exists", &name));
}
let path = match subdirectory {
Some(dir) => directory.join(dir).join(name),
None => directory.join(Path::new(name)),
};
let mut remote_branch_exists = false;
let default_checkout = || repo.default_branch()?.to_commit();
let checkout_commit;
if no_track {
checkout_commit = default_checkout()?;
} else {
match track {
Some((remote_name, remote_branch_name)) => {
let remote_branch = repo.find_remote_branch(remote_name, remote_branch_name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
remote_branch_exists = false;
checkout_commit = default_checkout()?;
}
}
}
None => match &config {
None => checkout_commit = default_checkout()?,
Some(config) => match &config.track {
None => checkout_commit = default_checkout()?,
Some(track_config) => {
if track_config.default {
let remote_branch =
repo.find_remote_branch(&track_config.default_remote, name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
checkout_commit = default_checkout()?;
}
}
} else {
checkout_commit = default_checkout()?;
}
}
},
},
};
}
let mut target_branch = match repo.find_local_branch(name) {
Ok(branchref) => branchref,
Err(_) => repo.create_branch(name, &checkout_commit)?,
};
fn push(
remote: &mut repo::RemoteHandle,
branch_name: &str,
remote_branch_name: &str,
repo: &repo::RepoHandle,
) -> Result<(), String> {
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
remote.push(branch_name, remote_branch_name, repo)
}
if !no_track {
if let Some((remote_name, remote_branch_name)) = track {
if remote_branch_exists {
target_branch.set_upstream(remote_name, remote_branch_name)?;
} else {
let mut remote = repo
.find_remote(remote_name)
.map_err(|error| format!("Error getting remote {}: {}", remote_name, error))?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
push(
&mut remote,
&target_branch.name()?,
remote_branch_name,
&repo,
)?;
target_branch.set_upstream(remote_name, remote_branch_name)?;
}
} else if let Some(config) = config {
if let Some(track_config) = config.track {
if track_config.default {
let remote_name = track_config.default_remote;
if remote_branch_exists {
target_branch.set_upstream(&remote_name, name)?;
} else {
let remote_branch_name = match track_config.default_remote_prefix {
Some(prefix) => {
format!("{}{}{}", &prefix, BRANCH_NAMESPACE_SEPARATOR, &name)
}
None => name.to_string(),
};
let mut remote = repo
.find_remote(&remote_name)
.map_err(|error| {
format!("Error getting remote {}: {}", remote_name, error)
})?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
push(
&mut remote,
&target_branch.name()?,
&remote_branch_name,
&repo,
)?;
target_branch.set_upstream(&remote_name, &remote_branch_name)?;
}
}
}
}
}
if let Some(subdirectory) = subdirectory {
std::fs::create_dir_all(subdirectory).map_err(|error| error.to_string())?;
}
repo.new_worktree(name, &path, &target_branch)?;
Ok(())
}

View File

@@ -20,12 +20,12 @@ pub fn print_repo_action(repo: &str, message: &str) {
}
pub fn print_action(message: &str) {
let stderr = Term::stderr();
let stdout = Term::stdout();
let mut style = Style::new().yellow();
if stderr.is_term() {
if stdout.is_term() {
style = style.force_styling(true);
}
stderr
stdout
.write_line(&format!("[{}] {}", style.apply_to('\u{2699}'), &message))
.unwrap();
}
@@ -46,13 +46,13 @@ pub fn print_repo_success(repo: &str, message: &str) {
}
pub fn print_success(message: &str) {
let stderr = Term::stderr();
let stdout = Term::stdout();
let mut style = Style::new().green();
if stderr.is_term() {
if stdout.is_term() {
style = style.force_styling(true);
}
stderr
stdout
.write_line(&format!("[{}] {}", style.apply_to('\u{2714}'), &message))
.unwrap();
}

84
src/path.rs Normal file
View File

@@ -0,0 +1,84 @@
use std::path::{Path, PathBuf};
use std::process;
use super::output::*;
#[cfg(test)]
mod tests {
use super::*;
fn setup() {
std::env::set_var("HOME", "/home/test");
}
#[test]
fn check_expand_tilde() {
setup();
assert_eq!(
expand_path(Path::new("~/file")),
Path::new("/home/test/file")
);
}
#[test]
fn check_expand_invalid_tilde() {
setup();
assert_eq!(
expand_path(Path::new("/home/~/file")),
Path::new("/home/~/file")
);
}
#[test]
fn check_expand_home() {
setup();
assert_eq!(
expand_path(Path::new("$HOME/file")),
Path::new("/home/test/file")
);
assert_eq!(
expand_path(Path::new("${HOME}/file")),
Path::new("/home/test/file")
);
}
}
pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap()
}
pub fn env_home() -> PathBuf {
match std::env::var("HOME") {
Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => {
print_error(&format!("Unable to read HOME: {}", e));
process::exit(1);
}
}
}
pub fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context(
&path_as_string(path),
home_dir,
|name| -> Result<Option<String>, &'static str> {
match name {
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None),
}
},
) {
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
Ok(std::borrow::Cow::Owned(path)) => path,
Err(e) => {
print_error(&format!("Unable to expand root: {}", e));
process::exit(1);
}
};
Path::new(&expanded_path).to_path_buf()
}

View File

@@ -1,11 +1,12 @@
use serde::Deserialize;
use super::auth;
use super::escape;
use super::ApiErrorResponse;
use super::Filter;
use super::JsonError;
use super::Project;
use super::Provider;
use super::SecretToken;
const PROVIDER_NAME: &str = "github";
const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json";
@@ -66,7 +67,7 @@ impl JsonError for GithubApiErrorResponse {
pub struct Github {
filter: Filter,
secret_token: SecretToken,
secret_token: auth::AuthToken,
}
impl Provider for Github {
@@ -75,7 +76,7 @@ impl Provider for Github {
fn new(
filter: Filter,
secret_token: SecretToken,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
) -> Result<Self, String> {
if api_url_override.is_some() {
@@ -87,20 +88,20 @@ impl Provider for Github {
})
}
fn name(&self) -> String {
String::from(PROVIDER_NAME)
fn name(&self) -> &str {
PROVIDER_NAME
}
fn filter(&self) -> Filter {
self.filter.clone()
fn filter(&self) -> &Filter {
&self.filter
}
fn secret_token(&self) -> SecretToken {
self.secret_token.clone()
fn secret_token(&self) -> &auth::AuthToken {
&self.secret_token
}
fn auth_header_key() -> String {
"token".to_string()
fn auth_header_key() -> &'static str {
"token"
}
fn get_user_projects(
@@ -108,7 +109,7 @@ impl Provider for Github {
user: &str,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/users/{user}/repos"),
&format!("{GITHUB_API_BASEURL}/users/{}/repos", escape(user)),
Some(ACCEPT_HEADER_JSON),
)
}
@@ -118,7 +119,7 @@ impl Provider for Github {
group: &str,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/orgs/{group}/repos?type=all"),
&format!("{GITHUB_API_BASEURL}/orgs/{}/repos?type=all", escape(group)),
Some(ACCEPT_HEADER_JSON),
)
}
@@ -135,8 +136,8 @@ impl Provider for Github {
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> {
Ok(super::call::<GithubUser, GithubApiErrorResponse>(
&format!("{GITHUB_API_BASEURL}/user"),
&Self::auth_header_key(),
&self.secret_token(),
Self::auth_header_key(),
self.secret_token(),
Some(ACCEPT_HEADER_JSON),
)?
.username)

View File

@@ -1,11 +1,12 @@
use serde::Deserialize;
use super::auth;
use super::escape;
use super::ApiErrorResponse;
use super::Filter;
use super::JsonError;
use super::Project;
use super::Provider;
use super::SecretToken;
const PROVIDER_NAME: &str = "gitlab";
const ACCEPT_HEADER_JSON: &str = "application/json";
@@ -56,13 +57,13 @@ impl Project for GitlabProject {
}
fn private(&self) -> bool {
matches!(self.visibility, GitlabVisibility::Private)
!matches!(self.visibility, GitlabVisibility::Public)
}
}
#[derive(Deserialize)]
pub struct GitlabApiErrorResponse {
#[serde(alias = "error_description")]
#[serde(alias = "error_description", alias = "error")]
pub message: String,
}
@@ -74,7 +75,7 @@ impl JsonError for GitlabApiErrorResponse {
pub struct Gitlab {
filter: Filter,
secret_token: SecretToken,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
}
@@ -94,7 +95,7 @@ impl Provider for Gitlab {
fn new(
filter: Filter,
secret_token: SecretToken,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
) -> Result<Self, String> {
Ok(Self {
@@ -104,20 +105,20 @@ impl Provider for Gitlab {
})
}
fn name(&self) -> String {
String::from(PROVIDER_NAME)
fn name(&self) -> &str {
PROVIDER_NAME
}
fn filter(&self) -> Filter {
self.filter.clone()
fn filter(&self) -> &Filter {
&self.filter
}
fn secret_token(&self) -> SecretToken {
self.secret_token.clone()
fn secret_token(&self) -> &auth::AuthToken {
&self.secret_token
}
fn auth_header_key() -> String {
"bearer".to_string()
fn auth_header_key() -> &'static str {
"bearer"
}
fn get_user_projects(
@@ -125,7 +126,7 @@ impl Provider for Gitlab {
user: &str,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!("{}/api/v4/users/{}/projects", self.api_url(), user),
&format!("{}/api/v4/users/{}/projects", self.api_url(), escape(user)),
Some(ACCEPT_HEADER_JSON),
)
}
@@ -138,7 +139,7 @@ impl Provider for Gitlab {
&format!(
"{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false",
self.api_url(),
group
escape(group),
),
Some(ACCEPT_HEADER_JSON),
)
@@ -156,8 +157,8 @@ impl Provider for Gitlab {
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> {
Ok(super::call::<GitlabUser, GitlabApiErrorResponse>(
&format!("{}/api/v4/user", self.api_url()),
&Self::auth_header_key(),
&self.secret_token(),
Self::auth_header_key(),
self.secret_token(),
Some(ACCEPT_HEADER_JSON),
)?
.username)

View File

@@ -9,7 +9,8 @@ pub mod gitlab;
pub use github::Github;
pub use gitlab::Gitlab;
use crate::{Remote, RemoteType, Repo};
use super::auth;
use super::repo;
use std::collections::HashMap;
@@ -28,16 +29,25 @@ enum ProjectResponse<T, U> {
Failure(U),
}
pub fn escape(s: &str) -> String {
url_escape::encode_component(s).to_string()
}
pub trait Project {
fn into_repo_config(self, provider_name: &str, worktree_setup: bool, force_ssh: bool) -> Repo
fn into_repo_config(
self,
provider_name: &str,
worktree_setup: bool,
force_ssh: bool,
) -> repo::Repo
where
Self: Sized,
{
Repo {
repo::Repo {
name: self.name(),
namespace: self.namespace(),
worktree_setup,
remotes: Some(vec![Remote {
remotes: Some(vec![repo::Remote {
name: String::from(provider_name),
url: if force_ssh || self.private() {
self.ssh_url()
@@ -45,9 +55,9 @@ pub trait Project {
self.http_url()
},
remote_type: if force_ssh || self.private() {
RemoteType::Ssh
repo::RemoteType::Ssh
} else {
RemoteType::Https
repo::RemoteType::Https
},
}]),
}
@@ -60,8 +70,6 @@ pub trait Project {
fn private(&self) -> bool;
}
type SecretToken = String;
#[derive(Clone)]
pub struct Filter {
users: Vec<String>,
@@ -108,16 +116,16 @@ pub trait Provider {
fn new(
filter: Filter,
secret_token: SecretToken,
secret_token: auth::AuthToken,
api_url_override: Option<String>,
) -> Result<Self, String>
where
Self: Sized;
fn name(&self) -> String;
fn filter(&self) -> Filter;
fn secret_token(&self) -> SecretToken;
fn auth_header_key() -> String;
fn name(&self) -> &str;
fn filter(&self) -> &Filter;
fn secret_token(&self) -> &auth::AuthToken;
fn auth_header_key() -> &'static str;
fn get_user_projects(
&self,
@@ -158,7 +166,11 @@ pub trait Provider {
.header("accept", accept_header.unwrap_or("application/json"))
.header(
"authorization",
format!("{} {}", Self::auth_header_key(), &self.secret_token()),
format!(
"{} {}",
Self::auth_header_key(),
&self.secret_token().access()
),
)
.body(())
.map_err(|error| error.to_string())?;
@@ -201,7 +213,7 @@ pub trait Provider {
&self,
worktree_setup: bool,
force_ssh: bool,
) -> Result<HashMap<Option<String>, Vec<Repo>>, String> {
) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> {
let mut repos = vec![];
if self.filter().owner {
@@ -278,12 +290,12 @@ pub trait Provider {
}
}
let mut ret: HashMap<Option<String>, Vec<Repo>> = HashMap::new();
let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new();
for repo in repos {
let namespace = repo.namespace();
let mut repo = repo.into_repo_config(&self.name(), worktree_setup, force_ssh);
let mut repo = repo.into_repo_config(self.name(), worktree_setup, force_ssh);
// Namespace is already part of the hashmap key. I'm not too happy
// about the data exchange format here.
@@ -299,7 +311,7 @@ pub trait Provider {
fn call<T, U>(
uri: &str,
auth_header_key: &str,
secret_token: &str,
secret_token: &auth::AuthToken,
accept_header: Option<&str>,
) -> Result<T, ApiErrorResponse<U>>
where
@@ -313,7 +325,7 @@ where
.header("accept", accept_header.unwrap_or("application/json"))
.header(
"authorization",
format!("{} {}", &auth_header_key, &secret_token),
format!("{} {}", &auth_header_key, &secret_token.access()),
)
.body(())
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;

View File

@@ -3,9 +3,13 @@ use std::path::Path;
use git2::Repository;
use crate::output::*;
use super::output::*;
use super::path;
use super::worktree;
const WORKTREE_CONFIG_FILE_NAME: &str = "grm.toml";
const GIT_CONFIG_BARE_KEY: &str = "core.bare";
const GIT_CONFIG_PUSH_DEFAULT: &str = "push.default";
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
@@ -506,7 +510,7 @@ impl RepoHandle {
false => Repository::open,
};
let path = match is_worktree {
true => path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
false => path.to_path_buf(),
};
match open_func(path) {
@@ -655,6 +659,14 @@ impl RepoHandle {
.collect::<Result<Vec<Branch>, String>>()
}
pub fn remote_branches(&self) -> Result<Vec<Branch>, String> {
self.0
.branches(Some(git2::BranchType::Remote))
.map_err(convert_libgit2_error)?
.map(|branch| Ok(Branch(branch.map_err(convert_libgit2_error)?.0)))
.collect::<Result<Vec<Branch>, String>>()
}
pub fn fetch(&self, remote_name: &str) -> Result<(), String> {
let mut remote = self
.0
@@ -679,7 +691,7 @@ impl RepoHandle {
pub fn init(path: &Path, is_worktree: bool) -> Result<Self, String> {
let repo = match is_worktree {
false => Repository::init(path).map_err(convert_libgit2_error)?,
true => Repository::init_bare(path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY))
true => Repository::init_bare(path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY))
.map_err(convert_libgit2_error)?,
};
@@ -742,8 +754,8 @@ impl RepoHandle {
let mut config = self.config()?;
config
.set_bool(crate::GIT_CONFIG_BARE_KEY, value)
.map_err(|error| format!("Could not set {}: {}", crate::GIT_CONFIG_BARE_KEY, error))
.set_bool(GIT_CONFIG_BARE_KEY, value)
.map_err(|error| format!("Could not set {}: {}", GIT_CONFIG_BARE_KEY, error))
}
pub fn convert_to_worktree(
@@ -766,7 +778,7 @@ impl RepoHandle {
return Err(WorktreeConversionFailureReason::Ignored);
}
std::fs::rename(".git", crate::GIT_MAIN_WORKTREE_DIRECTORY).map_err(|error| {
std::fs::rename(".git", worktree::GIT_MAIN_WORKTREE_DIRECTORY).map_err(|error| {
WorktreeConversionFailureReason::Error(format!(
"Error moving .git directory: {}",
error
@@ -786,7 +798,7 @@ impl RepoHandle {
Ok(entry) => {
let path = entry.path();
// unwrap is safe here, the path will ALWAYS have a file component
if path.file_name().unwrap() == crate::GIT_MAIN_WORKTREE_DIRECTORY {
if path.file_name().unwrap() == worktree::GIT_MAIN_WORKTREE_DIRECTORY {
continue;
}
if path.is_file() || path.is_symlink() {
@@ -835,18 +847,12 @@ impl RepoHandle {
config
.set_str(
crate::GIT_CONFIG_PUSH_DEFAULT,
GIT_CONFIG_PUSH_DEFAULT,
match value {
GitPushDefaultSetting::Upstream => "upstream",
},
)
.map_err(|error| {
format!(
"Could not set {}: {}",
crate::GIT_CONFIG_PUSH_DEFAULT,
error
)
})
.map_err(|error| format!("Could not set {}: {}", GIT_CONFIG_PUSH_DEFAULT, error))
}
pub fn has_untracked_files(&self, is_worktree: bool) -> Result<bool, String> {
@@ -1036,14 +1042,80 @@ impl RepoHandle {
})
}
pub fn default_branch(&self) -> Result<Branch, String> {
match self.0.find_branch("main", git2::BranchType::Local) {
Ok(branch) => Ok(Branch(branch)),
Err(_) => match self.0.find_branch("master", git2::BranchType::Local) {
Ok(branch) => Ok(Branch(branch)),
Err(_) => Err(String::from("Could not determine default branch")),
},
pub fn get_remote_default_branch(&self, remote_name: &str) -> Result<Option<Branch>, String> {
// libgit2's `git_remote_default_branch()` and `Remote::default_branch()`
// need an actual connection to the remote, so they may fail.
if let Some(mut remote) = self.find_remote(remote_name)? {
if remote.connected() {
let remote = remote; // unmut
if let Ok(remote_default_branch) = remote.default_branch() {
return Ok(Some(self.find_local_branch(&remote_default_branch)?));
};
}
}
// Note that <remote>/HEAD only exists after a normal clone, there is no way to get the
// remote HEAD afterwards. So this is a "best effort" approach.
if let Ok(remote_head) = self.find_remote_branch(remote_name, "HEAD") {
if let Some(pointer_name) = remote_head.as_reference().symbolic_target() {
if let Some(local_branch_name) =
pointer_name.strip_prefix(&format!("refs/remotes/{}/", remote_name))
{
return Ok(Some(self.find_local_branch(local_branch_name)?));
} else {
eprintln!("Remote HEAD ({}) pointer is invalid", pointer_name);
}
} else {
eprintln!("Remote HEAD does not point to a symbolic target");
}
}
Ok(None)
}
pub fn default_branch(&self) -> Result<Branch, String> {
// This is a bit of a guessing game.
//
// In the best case, there is only one remote. Then, we can check <remote>/HEAD to get the
// default remote branch.
//
// If there are multiple remotes, we first check whether they all have the same
// <remote>/HEAD branch. If yes, good! If not, we use whatever "origin" uses, if that
// exists. If it does not, there is no way to reliably get a remote default branch.
//
// In this case, we just try to guess a local branch from a list. If even that does not
// work, well, bad luck.
let remotes = self.remotes()?;
if remotes.len() == 1 {
let remote_name = &remotes[0];
if let Some(default_branch) = self.get_remote_default_branch(remote_name)? {
return Ok(default_branch);
}
} else {
let mut default_branches: Vec<Branch> = vec![];
for remote_name in remotes {
if let Some(default_branch) = self.get_remote_default_branch(&remote_name)? {
default_branches.push(default_branch)
}
}
if !default_branches.is_empty()
&& (default_branches.len() == 1
|| default_branches
.windows(2)
.all(|w| w[0].name() == w[1].name()))
{
return Ok(default_branches.remove(0));
}
}
for branch_name in &vec!["main", "master"] {
if let Ok(branch) = self.0.find_branch(branch_name, git2::BranchType::Local) {
return Ok(Branch(branch));
}
}
Err(String::from("Could not determine default branch"))
}
// Looks like there is no distinguishing between the error cases
@@ -1105,10 +1177,10 @@ impl RepoHandle {
})?;
if branch_name != name
&& !branch_name.ends_with(&format!("{}{}", crate::BRANCH_NAMESPACE_SEPARATOR, name))
&& !branch_name.ends_with(&format!("{}{}", super::BRANCH_NAMESPACE_SEPARATOR, name))
{
return Err(WorktreeRemoveFailureReason::Error(format!(
"Branch {} is checked out in worktree, this does not look correct",
"Branch \"{}\" is checked out in worktree, this does not look correct",
&branch_name
)));
}
@@ -1275,7 +1347,7 @@ impl RepoHandle {
let mut unmanaged_worktrees = Vec::new();
for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? {
let dirname = crate::path_as_string(
let dirname = path::path_as_string(
entry
.map_err(|error| error.to_string())?
.path()
@@ -1308,7 +1380,7 @@ impl RepoHandle {
},
};
if dirname == crate::GIT_MAIN_WORKTREE_DIRECTORY {
if dirname == worktree::GIT_MAIN_WORKTREE_DIRECTORY {
continue;
}
if dirname == WORKTREE_CONFIG_FILE_NAME {
@@ -1327,7 +1399,7 @@ impl RepoHandle {
}
pub fn detect_worktree(path: &Path) -> bool {
path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY).exists()
path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY).exists()
}
}
@@ -1396,6 +1468,15 @@ impl Branch<'_> {
self.0.delete().map_err(convert_libgit2_error)
}
pub fn basename(&self) -> Result<String, String> {
let name = self.name()?;
if let Some((_prefix, basename)) = name.split_once('/') {
Ok(basename.to_string())
} else {
Ok(name)
}
}
// only used internally in this module, exposes libgit2 details
fn as_reference(&self) -> &git2::Reference {
self.0.get()
@@ -1441,6 +1522,20 @@ impl RemoteHandle<'_> {
.to_string()
}
pub fn connected(&mut self) -> bool {
self.0.connected()
}
pub fn default_branch(&self) -> Result<String, String> {
Ok(self
.0
.default_branch()
.map_err(convert_libgit2_error)?
.as_str()
.expect("Remote branch name is not valid utf-8")
.to_string())
}
pub fn is_pushable(&self) -> Result<bool, String> {
let remote_type = detect_remote_type(self.0.url().expect("Remote name is not valid utf-8"))
.ok_or_else(|| String::from("Could not detect remote type"))?;
@@ -1486,7 +1581,7 @@ pub fn clone_repo(
) -> Result<(), Box<dyn std::error::Error>> {
let clone_target = match is_worktree {
false => path.to_path_buf(),
true => path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
};
print_action(&format!(
@@ -1531,6 +1626,24 @@ pub fn clone_repo(
repo.rename_remote(&origin, &remote.name)?;
}
// Initialize local branches. For all remote branches, we set up local
// tracking branches with the same name (just without the remote prefix).
for remote_branch in repo.remote_branches()? {
let local_branch_name = remote_branch.basename()?;
if repo.find_local_branch(&local_branch_name).is_ok() {
continue;
}
// Ignore <remote>/HEAD, as this is not something we can check out
if local_branch_name == "HEAD" {
continue;
}
let mut local_branch = repo.create_branch(&local_branch_name, &remote_branch.commit()?)?;
local_branch.set_upstream(&remote.name, &local_branch_name)?;
}
// If there is no head_branch, we most likely cloned an empty repository and
// there is no point in setting any upstreams.
if let Ok(mut active_branch) = repo.head_branch() {

View File

@@ -1,4 +1,6 @@
use crate::RepoHandle;
use super::config;
use super::path;
use super::repo;
use comfy_table::{Cell, Table};
@@ -21,7 +23,7 @@ fn add_table_header(table: &mut Table) {
fn add_repo_status(
table: &mut Table,
repo_name: &str,
repo_handle: &crate::RepoHandle,
repo_handle: &repo::RepoHandle,
is_worktree: bool,
) -> Result<(), String> {
let repo_status = repo_handle.status(is_worktree)?;
@@ -65,11 +67,11 @@ fn add_repo_status(
" <{}>{}",
remote_branch_name,
&match remote_tracking_status {
crate::RemoteTrackingStatus::UpToDate =>
repo::RemoteTrackingStatus::UpToDate =>
String::from(" \u{2714}"),
crate::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
crate::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
crate::RemoteTrackingStatus::Diverged(d1, d2) =>
repo::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
repo::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
repo::RemoteTrackingStatus::Diverged(d1, d2) =>
format!(" [+{}/-{}]", &d1, &d2),
}
)
@@ -99,7 +101,7 @@ fn add_repo_status(
// Don't return table, return a type that implements Display(?)
pub fn get_worktree_status_table(
repo: &crate::RepoHandle,
repo: &repo::RepoHandle,
directory: &Path,
) -> Result<(impl std::fmt::Display, Vec<String>), String> {
let worktrees = repo.get_worktrees()?;
@@ -111,7 +113,7 @@ pub fn get_worktree_status_table(
for worktree in &worktrees {
let worktree_dir = &directory.join(&worktree.name());
if worktree_dir.exists() {
let repo = match crate::RepoHandle::open(worktree_dir, false) {
let repo = match repo::RepoHandle::open(worktree_dir, false) {
Ok(repo) => repo,
Err(error) => {
errors.push(format!(
@@ -132,7 +134,7 @@ pub fn get_worktree_status_table(
));
}
}
for worktree in RepoHandle::find_unmanaged_worktrees(repo, directory)? {
for worktree in repo::RepoHandle::find_unmanaged_worktrees(repo, directory)? {
errors.push(format!(
"Found {}, which is not a valid worktree directory!",
&worktree
@@ -141,13 +143,13 @@ pub fn get_worktree_status_table(
Ok((table, errors))
}
pub fn get_status_table(config: crate::Config) -> Result<(Vec<Table>, Vec<String>), String> {
pub fn get_status_table(config: config::Config) -> Result<(Vec<Table>, Vec<String>), String> {
let mut errors = Vec::new();
let mut tables = Vec::new();
for tree in config.trees()? {
let repos = tree.repos.unwrap_or_default();
let root_path = crate::expand_path(Path::new(&tree.root));
let root_path = path::expand_path(Path::new(&tree.root));
let mut table = Table::new();
add_table_header(&mut table);
@@ -163,12 +165,12 @@ pub fn get_status_table(config: crate::Config) -> Result<(Vec<Table>, Vec<String
continue;
}
let repo_handle = crate::RepoHandle::open(&repo_path, repo.worktree_setup);
let repo_handle = repo::RepoHandle::open(&repo_path, repo.worktree_setup);
let repo_handle = match repo_handle {
Ok(repo) => repo,
Err(error) => {
if error.kind == crate::RepoErrorKind::NotFound {
if error.kind == repo::RepoErrorKind::NotFound {
errors.push(format!(
"{}: No git repository found. Run sync?",
&repo.name
@@ -206,8 +208,8 @@ fn add_worktree_table_header(table: &mut Table) {
fn add_worktree_status(
table: &mut Table,
worktree: &crate::repo::Worktree,
repo: &crate::RepoHandle,
worktree: &repo::Worktree,
repo: &repo::RepoHandle,
) -> Result<(), String> {
let repo_status = repo.status(false)?;
@@ -272,13 +274,13 @@ pub fn show_single_repo_status(
let mut table = Table::new();
let mut warnings = Vec::new();
let is_worktree = crate::RepoHandle::detect_worktree(path);
let is_worktree = repo::RepoHandle::detect_worktree(path);
add_table_header(&mut table);
let repo_handle = crate::RepoHandle::open(path, is_worktree);
let repo_handle = repo::RepoHandle::open(path, is_worktree);
if let Err(error) = repo_handle {
if error.kind == crate::RepoErrorKind::NotFound {
if error.kind == repo::RepoErrorKind::NotFound {
return Err(String::from("Directory is not a git directory"));
} else {
return Err(format!("Opening repository failed: {}", error));

296
src/tree.rs Normal file
View File

@@ -0,0 +1,296 @@
use std::fs;
use std::path::{Path, PathBuf};
use super::config;
use super::output::*;
use super::path;
use super::repo;
use super::worktree;
pub struct Tree {
pub root: String,
pub repos: Vec<repo::Repo>,
}
pub fn find_unmanaged_repos(
root_path: &Path,
managed_repos: &[repo::Repo],
) -> Result<Vec<PathBuf>, String> {
let mut unmanaged_repos = Vec::new();
for repo_path in find_repo_paths(root_path)? {
if !managed_repos
.iter()
.any(|r| Path::new(root_path).join(r.fullname()) == repo_path)
{
unmanaged_repos.push(repo_path);
}
}
Ok(unmanaged_repos)
}
pub fn sync_trees(config: config::Config, init_worktree: bool) -> Result<bool, String> {
let mut failures = false;
let mut unmanaged_repos_absolute_paths = vec![];
let mut managed_repos_absolute_paths = vec![];
let trees = config.trees()?;
for tree in trees {
let repos: Vec<repo::Repo> = tree
.repos
.unwrap_or_default()
.into_iter()
.map(|repo| repo.into_repo())
.collect();
let root_path = path::expand_path(Path::new(&tree.root));
for repo in &repos {
managed_repos_absolute_paths.push(root_path.join(repo.fullname()));
match sync_repo(&root_path, repo, init_worktree) {
Ok(_) => print_repo_success(&repo.name, "OK"),
Err(error) => {
print_repo_error(&repo.name, &error);
failures = true;
}
}
}
match find_unmanaged_repos(&root_path, &repos) {
Ok(repos) => {
unmanaged_repos_absolute_paths.extend(repos);
}
Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error));
failures = true;
}
}
}
for unmanaged_repo_absolute_path in &unmanaged_repos_absolute_paths {
if managed_repos_absolute_paths
.iter()
.any(|managed_repo_absolute_path| {
managed_repo_absolute_path == unmanaged_repo_absolute_path
})
{
continue;
}
print_warning(&format!(
"Found unmanaged repository: \"{}\"",
path::path_as_string(unmanaged_repo_absolute_path)
));
}
Ok(!failures)
}
/// Finds repositories recursively, returning their path
pub fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
let mut repos = Vec::new();
let git_dir = path.join(".git");
let git_worktree = path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY);
if git_dir.exists() || git_worktree.exists() {
repos.push(path.to_path_buf());
} else {
match fs::read_dir(path) {
Ok(contents) => {
for content in contents {
match content {
Ok(entry) => {
let path = entry.path();
if path.is_symlink() {
continue;
}
if path.is_dir() {
match find_repo_paths(&path) {
Ok(ref mut r) => repos.append(r),
Err(error) => return Err(error),
}
}
}
Err(e) => {
return Err(format!("Error accessing directory: {}", e));
}
};
}
}
Err(e) => {
return Err(format!(
"Failed to open \"{}\": {}",
&path.display(),
match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()),
}
));
}
};
}
Ok(repos)
}
fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result<(), String> {
let repo_path = root_path.join(&repo.fullname());
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
let mut newly_created = false;
// Syncing a repository can have a few different flows, depending on the repository
// that is to be cloned and the local directory:
//
// * If the local directory already exists, we have to make sure that it matches the
// worktree configuration, as there is no way to convert. If the sync is supposed
// to be worktree-aware, but the local directory is not, we abort. Note that we could
// also automatically convert here. In any case, the other direction (converting a
// worktree repository to non-worktree) cannot work, as we'd have to throw away the
// worktrees.
//
// * If the local directory does not yet exist, we have to actually do something ;). If
// no remote is specified, we just initialize a new repository (git init) and are done.
//
// If there are (potentially multiple) remotes configured, we have to clone. We assume
// that the first remote is the canonical one that we do the first clone from. After
// cloning, we just add the other remotes as usual (as if they were added to the config
// afterwards)
//
// Branch handling:
//
// Handling the branches on checkout is a bit magic. For minimum surprises, we just set
// up local tracking branches for all remote branches.
if repo_path.exists()
&& repo_path
.read_dir()
.map_err(|error| error.to_string())?
.next()
.is_some()
{
if repo.worktree_setup && !actual_git_directory.exists() {
return Err(String::from(
"Repo already exists, but is not using a worktree setup",
));
};
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action(
&repo.name,
"Repository does not have remotes configured, initializing new",
);
match repo::RepoHandle::init(&repo_path, repo.worktree_setup) {
Ok(r) => {
print_repo_success(&repo.name, "Repository created");
Some(r)
}
Err(e) => {
return Err(format!("Repository failed during init: {}", e));
}
};
} else {
let first = repo.remotes.as_ref().unwrap().first().unwrap();
match repo::clone_repo(first, &repo_path, repo.worktree_setup) {
Ok(_) => {
print_repo_success(&repo.name, "Repository successfully cloned");
}
Err(e) => {
return Err(format!("Repository failed during clone: {}", e));
}
};
newly_created = true;
}
let repo_handle = match repo::RepoHandle::open(&repo_path, repo.worktree_setup) {
Ok(repo) => repo,
Err(error) => {
if !repo.worktree_setup && repo::RepoHandle::open(&repo_path, true).is_ok() {
return Err(String::from(
"Repo already exists, but is using a worktree setup",
));
} else {
return Err(format!("Opening repository failed: {}", error));
}
}
};
if newly_created && repo.worktree_setup && init_worktree {
match repo_handle.default_branch() {
Ok(branch) => {
worktree::add_worktree(&repo_path, &branch.name()?, None, None, false)?;
}
Err(_error) => print_repo_error(
&repo.name,
"Could not determine default branch, skipping worktree initializtion",
),
}
}
if let Some(remotes) = &repo.remotes {
let current_remotes: Vec<String> = repo_handle
.remotes()
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
for remote in remotes {
let current_remote = repo_handle.find_remote(&remote.name)?;
match current_remote {
Some(current_remote) => {
let current_url = current_remote.url();
if remote.url != current_url {
print_repo_action(
&repo.name,
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
);
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
};
}
}
None => {
print_repo_action(
&repo.name,
&format!(
"Setting up new remote \"{}\" to \"{}\"",
&remote.name, &remote.url
),
);
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
return Err(format!(
"Repository failed during setting the remotes: {}",
e
));
}
}
}
}
for current_remote in &current_remotes {
if !remotes.iter().any(|r| &r.name == current_remote) {
print_repo_action(
&repo.name,
&format!("Deleting remote \"{}\"", &current_remote,),
);
if let Err(e) = repo_handle.remote_delete(current_remote) {
return Err(format!(
"Repository failed during deleting remote \"{}\": {}",
&current_remote, e
));
}
}
}
}
Ok(())
}
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
match is_worktree {
false => path.to_path_buf(),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
}
}

202
src/worktree.rs Normal file
View File

@@ -0,0 +1,202 @@
use std::path::Path;
use super::output::*;
use super::repo;
pub const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
// The logic about the base branch and the tracking branch is as follows:
//
// * If a branch with the same name does not exist and no track is given, use the default
// branch
//
// * If a branch with the same name exists and no track is given, use that
//
// * If a branch with the same name does not exist and track is given, use the
// local branch that tracks that branch
//
// * If a branch with the same name exists and track is given, use the locally
// existing branch. If the locally existing branch is not the local branch to
// the remote tracking branch, issue a warning
pub fn add_worktree(
directory: &Path,
name: &str,
subdirectory: Option<&Path>,
track: Option<(&str, &str)>,
no_track: bool,
) -> Result<(), String> {
let repo = repo::RepoHandle::open(directory, true).map_err(|error| match error.kind {
repo::RepoErrorKind::NotFound => {
String::from("Current directory does not contain a worktree setup")
}
_ => format!("Error opening repo: {}", error),
})?;
let config = repo::read_worktree_root_config(directory)?;
if repo.find_worktree(name).is_ok() {
return Err(format!("Worktree {} already exists", &name));
}
let path = match subdirectory {
Some(dir) => directory.join(dir).join(name),
None => directory.join(Path::new(name)),
};
let mut remote_branch_exists = false;
let mut target_branch = match repo.find_local_branch(name) {
Ok(branchref) => {
if !no_track {
if let Some((remote_name, remote_branch_name)) = track {
let remote_branch = repo.find_remote_branch(remote_name, remote_branch_name);
if let Ok(remote_branch) = remote_branch {
remote_branch_exists = true;
if let Ok(local_upstream_branch) = branchref.upstream() {
if remote_branch.name()? != local_upstream_branch.name()? {
print_warning(&format!(
"You specified a tracking branch ({}/{}) for an existing branch ({}), but \
it differs from the current upstream ({}). Will keep current upstream"
, remote_name, remote_branch_name, branchref.name()?, local_upstream_branch.name()?))
}
}
}
}
}
branchref
}
Err(_) => {
let default_checkout = || repo.default_branch()?.to_commit();
let checkout_commit;
if no_track {
checkout_commit = default_checkout()?;
} else {
match track {
Some((remote_name, remote_branch_name)) => {
let remote_branch =
repo.find_remote_branch(remote_name, remote_branch_name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
remote_branch_exists = false;
checkout_commit = default_checkout()?;
}
}
}
None => match &config {
None => checkout_commit = default_checkout()?,
Some(config) => match &config.track {
None => checkout_commit = default_checkout()?,
Some(track_config) => {
if track_config.default {
let remote_branch =
repo.find_remote_branch(&track_config.default_remote, name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
checkout_commit = default_checkout()?;
}
}
} else {
checkout_commit = default_checkout()?;
}
}
},
},
};
}
repo.create_branch(name, &checkout_commit)?
}
};
fn push(
remote: &mut repo::RemoteHandle,
branch_name: &str,
remote_branch_name: &str,
repo: &repo::RepoHandle,
) -> Result<(), String> {
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
remote.push(branch_name, remote_branch_name, repo)
}
if !no_track {
if let Some((remote_name, remote_branch_name)) = track {
if remote_branch_exists {
target_branch.set_upstream(remote_name, remote_branch_name)?;
} else {
let mut remote = repo
.find_remote(remote_name)
.map_err(|error| format!("Error getting remote {}: {}", remote_name, error))?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
push(
&mut remote,
&target_branch.name()?,
remote_branch_name,
&repo,
)?;
target_branch.set_upstream(remote_name, remote_branch_name)?;
}
} else if let Some(config) = config {
if let Some(track_config) = config.track {
if track_config.default {
let remote_name = track_config.default_remote;
if remote_branch_exists {
target_branch.set_upstream(&remote_name, name)?;
} else {
let remote_branch_name = match track_config.default_remote_prefix {
Some(prefix) => {
format!("{}{}{}", &prefix, super::BRANCH_NAMESPACE_SEPARATOR, &name)
}
None => name.to_string(),
};
let mut remote = repo
.find_remote(&remote_name)
.map_err(|error| {
format!("Error getting remote {}: {}", remote_name, error)
})?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
push(
&mut remote,
&target_branch.name()?,
&remote_branch_name,
&repo,
)?;
target_branch.set_upstream(&remote_name, &remote_branch_name)?;
}
}
}
}
}
if let Some(subdirectory) = subdirectory {
std::fs::create_dir_all(subdirectory).map_err(|error| error.to_string())?;
}
repo.new_worktree(name, &path, &target_branch)?;
Ok(())
}