17 Commits

Author SHA1 Message Date
d73314cefb tmp: tests 2022-06-13 22:38:43 +02:00
239e587cbc Use new cargo fmt 2022-06-13 22:38:43 +02:00
742c8587ce Enable output in rust unit tests 2022-06-13 22:38:27 +02:00
Hannes Körber
fb45087477 Update handling of branches on worktree setup 2022-06-13 22:37:58 +02:00
Hannes Körber
a3dc2df37d Quote branch name on output 2022-06-13 22:37:44 +02:00
Hannes Körber
0f45708e81 Improve default branch guessing 2022-06-13 22:37:36 +02:00
Hannes Körber
d036b53037 Print ok-ish stuff to stdout 2022-06-13 22:37:27 +02:00
2be1dec818 Add some comments about repo syncing 2022-06-13 22:37:19 +02:00
f89c9c2ca5 Do not fail on empty clone target 2022-06-13 22:37:11 +02:00
62c2fc24cd Initialize local branches on clone 2022-06-13 22:37:06 +02:00
d0425e2fdb Add function to get all remote branches 2022-06-13 22:37:03 +02:00
a8665ae741 Add function to get basename of branch 2022-06-13 22:36:54 +02:00
d26b6e799c Refactor default_branch() for readability 2022-06-13 22:36:47 +02:00
60eb059f60 providers: Use references for field access 2022-06-13 22:32:31 +02:00
d9f416018d Use opaque type for auth token
So we cannot accidentially output it, as it does not implement
`Display`.
2022-06-13 22:32:31 +02:00
Max Volk
461c69dacb Reword some of the documentation and spelling fixes 2022-06-13 22:32:26 +02:00
ef21f9cad4 Remove accidentially added file 2022-06-13 22:32:26 +02:00
60 changed files with 2036 additions and 3833 deletions

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
github: hakoerber

View File

@@ -1,3 +1,49 @@
Check out [the developer # Contributing
documentation](https://hakoerber.github.io/git-repo-manager/developing.html) it
you want to contribute! GRM is still in very early development. I started GRM mainly to scratch my own
itches (and am heavily dogfooding it). If you have a new use case for GRM, go
for it!
The branching strategy is a simplified
[git-flow](https://nvie.com/posts/a-successful-git-branching-model/).
* `master` is the "production" branch. Each commit is a new release.
* `develop` is the branch where new stuff is coming in.
* feature branches branch off of `develop` and merge back into it.
So to contribute, just fork the repo and create a pull request against
`develop`. If you plan bigger changes, please consider opening an issue first,
so we can discuss it.
If you want, add yourself to the `CONTRIBUTORS` file in your pull request.
## Code formatting
For Rust, just use `cargo fmt`. For Python, use
[black](https://github.com/psf/black). I'd rather not spend any effort in
configuring the formatters (not possible for black anyway).
## Tooling
GRM uses [`just`](https://github.com/casey/just) as a command runner. See
[here](https://github.com/casey/just#installation) for installation
instructions (it's most likely just a simple `cargo install just`).
## Testing
There are two distinct test suites: One for unit test (`just test-unit`) and
integration tests (`just test-integration`) that is part of the rust crate, and
a separate e2e test suite in python (`just test-e2e`).
To run all tests, run `just test`.
When contributing, consider whether it makes sense to add tests which could
prevent regressions in the future. When fixing bugs, it makes sense to add
tests that expose the wrong behaviour beforehand.
## Documentation
The documentation lives in `docs` and uses
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
features here!

View File

@@ -1,3 +1 @@
nonnominandus nonnominandus
Maximilian Volk
Baptiste (@BapRx)

894
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "git-repo-manager" name = "git-repo-manager"
version = "0.7.15" version = "0.7.1"
edition = "2021" edition = "2021"
authors = [ authors = [
@@ -23,12 +23,12 @@ repository = "https://github.com/hakoerber/git-repo-manager"
readme = "README.md" readme = "README.md"
# Required for `std::path::Path::is_symlink()`. Will be released with 1.57. # Required for `std::path::Path::is_symlink()`. Will be released with 1.57.
rust-version = "1.58" rust-version = "1.57"
license = "GPL-3.0-only" license = "GPL-3.0-only"
[profile.e2e-tests] [profile.e2e-tests]
inherits = "dev" inherits = "release"
[lib] [lib]
name = "grm" name = "grm"
@@ -41,36 +41,36 @@ path = "src/grm/main.rs"
[dependencies] [dependencies]
[dependencies.toml] [dependencies.toml]
version = "=0.8.6" version = "=0.5.9"
[dependencies.serde] [dependencies.serde]
version = "=1.0.190" version = "=1.0.137"
features = ["derive"] features = ["derive"]
[dependencies.git2] [dependencies.git2]
version = "=0.18.1" version = "=0.14.4"
[dependencies.shellexpand] [dependencies.shellexpand]
version = "=3.1.0" version = "=2.1.0"
[dependencies.clap] [dependencies.clap]
version = "=4.4.7" version = "=3.1.18"
features = ["derive", "cargo"] features = ["derive", "cargo"]
[dependencies.console] [dependencies.console]
version = "=0.15.7" version = "=0.15.0"
[dependencies.regex] [dependencies.regex]
version = "=1.10.2" version = "=1.5.6"
[dependencies.comfy-table] [dependencies.comfy-table]
version = "=7.1.0" version = "=5.0.1"
[dependencies.serde_yaml] [dependencies.serde_yaml]
version = "=0.9.27" version = "=0.8.24"
[dependencies.serde_json] [dependencies.serde_json]
version = "=1.0.108" version = "=1.0.81"
[dependencies.isahc] [dependencies.isahc]
version = "=1.7.2" version = "=1.7.2"
@@ -78,7 +78,7 @@ default-features = false
features = ["json", "http2", "text-decoding"] features = ["json", "http2", "text-decoding"]
[dependencies.parse_link_header] [dependencies.parse_link_header]
version = "=0.3.3" version = "=0.3.2"
[dependencies.url-escape] [dependencies.url-escape]
version = "=0.1.1" version = "=0.1.1"

View File

@@ -1,89 +1,61 @@
set positional-arguments set positional-arguments
set shell := ["/bin/bash", "-c"] target := "x86_64-unknown-linux-musl"
static_target := "x86_64-unknown-linux-musl" check: test
cargo check
cargo := "cargo" cargo fmt --check
cargo clippy --no-deps -- -Dwarnings
check: fmt-check lint test
{{cargo}} check
clean:
{{cargo}} clean
git clean -f -d -X
fmt: fmt:
{{cargo}} fmt cargo fmt
git ls-files | grep '\.py$' | xargs isort
git ls-files | grep '\.py$' | xargs black git ls-files | grep '\.py$' | xargs black
git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --write
fmt-check:
{{cargo}} fmt --check
git ls-files | grep '\.py$' | xargs black --check
git ls-files | grep '\.sh$' | xargs -L 1 shfmt --indent 4 --diff
lint: lint:
{{cargo}} clippy --no-deps -- -Dwarnings cargo clippy --no-deps
git ls-files | grep '\.py$' | xargs ruff --ignore E501
git ls-files | grep '\.sh$' | xargs -L 1 shellcheck --norc
lint-fix: lint-fix:
{{cargo}} clippy --no-deps --fix cargo clippy --no-deps --fix
build-release: release:
{{cargo}} build --release cargo build --release --target {{target}}
build-release-static:
{{cargo}} build --release --target {{static_target}} --features=static-build
pushall:
for r in $(git remote) ; do \
for branch in develop master ; do \
git push $r $branch ; \
done ; \
done
release-patch:
./release.sh patch
test-binary: test-binary:
env \ env \
GITHUB_API_BASEURL=http://rest:5000/github \ GITHUB_API_BASEURL=http://rest:5000/github \
GITLAB_API_BASEURL=http://rest:5000/gitlab \ GITLAB_API_BASEURL=http://rest:5000/gitlab \
{{cargo}} build --profile e2e-tests --target {{static_target}} --features=static-build cargo build --target {{target}} --profile e2e-tests --features=static-build
install: install:
{{cargo}} install --path . cargo install --path .
install-static: install-static:
{{cargo}} install --target {{static_target}} --features=static-build --path . cargo install --target {{target}} --features=static-build --path .
build: build:
{{cargo}} build cargo build
build-static: build-static:
{{cargo}} build --target {{static_target}} --features=static-build cargo build --target {{target}} --features=static-build
test: test-unit test-integration test-e2e test: test-unit test-integration test-e2e
test-unit +tests="": test-unit +tests="":
{{cargo}} test --lib --bins -- --show-output {{tests}} cargo test --lib --bins -- --show-output {{tests}}
test-integration: test-integration:
{{cargo}} test --test "*" cargo test --test "*"
test-e2e +tests=".": test-binary test-e2e +tests=".": test-binary
cd ./e2e_tests \ cd ./e2e_tests \
&& docker compose rm --stop -f \ && docker-compose rm --stop -f \
&& docker compose build \ && docker-compose build \
&& docker compose run \ && docker-compose run \
--rm \ --rm \
-v $PWD/../target/x86_64-unknown-linux-musl/e2e-tests/grm:/grm \ -v $PWD/../target/{{target}}/e2e-tests/grm:/grm \
pytest \ pytest \
"GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest --exitfirst -p no:cacheprovider --color=yes "$@"" \ "GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest -p no:cacheprovider --color=yes "$@"" \
&& docker compose rm --stop -f && docker-compose rm --stop -f
update-dependencies: update-cargo-dependencies update-dependencies: update-cargo-dependencies
@@ -93,6 +65,3 @@ update-cargo-dependencies:
&& . ./venv/bin/activate \ && . ./venv/bin/activate \
&& pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \ && pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \
&& ./update-cargo-dependencies.py && ./update-cargo-dependencies.py
wait:
read -p "[ENTER] to continue "

View File

@@ -1,10 +1,7 @@
# GRM — Git Repository Manager # GRM — Git Repository Manager
GRM helps you manage git repositories in a declarative way. Configure your GRM helps you manage git repositories in a declarative way. Configure your
repositories in a [TOML](https://toml.io/) or YAML file, GRM does the rest. repositories in a [TOML](https://toml.io/) file, GRM does the rest.
Also, GRM can be used to work with git worktrees in an opinionated,
straightforward fashion.
**Take a look at the [official documentation](https://hakoerber.github.io/git-repo-manager/) **Take a look at the [official documentation](https://hakoerber.github.io/git-repo-manager/)
for installation & quickstart.** for installation & quickstart.**
@@ -34,29 +31,26 @@ in once place?
This is how GRM came to be. I'm a fan of infrastructure-as-code, and GRM is a bit This is how GRM came to be. I'm a fan of infrastructure-as-code, and GRM is a bit
like Terraform for your local git repositories. Write a config, run the tool, and like Terraform for your local git repositories. Write a config, run the tool, and
your repos are ready. The only thing that is tracked by git is the list of your repos are ready. The only thing that is tracked by git it the list of
repositories itself. repositories itself.
# Future & Ideas
* Operations over all repos (e.g. pull)
* Show status of managed repositories (dirty, compare to remotes, ...)
# Optional Features
* Support multiple file formats (YAML, JSON).
* Add systemd timer unit to run regular syncs
# Crates # Crates
* [`toml`](https://docs.rs/toml/) for the configuration file. * [`toml`](https://docs.rs/toml/) for the configuration file
* [`serde`](https://docs.rs/serde/), together with * [`serde`](https://docs.rs/serde/) because we're using Rust, after all
[`serde_yaml`](https://docs.rs/serde_yaml/) and * [`git2`](https://docs.rs/git2/), a safe wrapper around `libgit2`, for all git operations
[`serde_json`](https://docs.rs/serde_json/). Because we're using Rust, after * [`clap`](https://docs.rs/clap/), [`console`](https://docs.rs/console/) and [`shellexpand`](https://docs.rs/shellexpand) for good UX
all.
* [`git2`](https://docs.rs/git2/), a safe wrapper around `libgit2`, for all git operations.
* [`clap`](https://docs.rs/clap/), [`console`](https://docs.rs/console/), [`comfy_table`](https://docs.rs/comfy-table/) and [`shellexpand`](https://docs.rs/shellexpand) for good UX.
* [`isahc`](https://docs.rs/isahc/) as the HTTP client for forge integrations.
# Links # Links
* [crates.io](https://crates.io/crates/git-repo-manager) * [crates.io](https://crates.io/crates/git-repo-manager)
# Mirrors
This repository can be found on multiple forges:
* https://github.com/hakoerber/git-repo-manager
* https://code.hkoerber.de/hannes/git-repo-manager/
* https://codeberg.org/hakoerber/git-repo-manager
* https://git.sr.ht/~hkoerber/git-repo-manager

View File

@@ -1,8 +1,9 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import json
import os
import subprocess import subprocess
import os
import json
import sys
import semver import semver
import tomlkit import tomlkit
@@ -93,7 +94,15 @@ for tier in ["dependencies", "dev-dependencies"]:
try: try:
cmd = subprocess.run( cmd = subprocess.run(
["cargo", "update", "--offline", "--aggressive", "--package", name], [
"cargo",
"update",
"-Z",
"no-index-update",
"--aggressive",
"--package",
name,
],
check=True, check=True,
capture_output=True, capture_output=True,
text=True, text=True,
@@ -127,7 +136,15 @@ while True:
spec = f"{package['name']}:{package['version']}" spec = f"{package['name']}:{package['version']}"
try: try:
cmd = subprocess.run( cmd = subprocess.run(
["cargo", "update", "--offline", "--aggressive", "--package", spec], [
"cargo",
"update",
"-Z",
"no-index-update",
"--aggressive",
"--package",
spec,
],
check=True, check=True,
capture_output=True, capture_output=True,
text=True, text=True,

View File

@@ -7,8 +7,3 @@ title = "Git Repo Manager"
[output.html] [output.html]
mathjax-support = true mathjax-support = true
# [output.linkcheck]
# follow-web-links = true
# traverse-parent-directories = false
# warning-policy = "error"

View File

@@ -1,20 +1,9 @@
# Summary # Summary
[Overview](./overview.md) - [Overview](./overview.md)
- [Installation](./installation.md) - [Installation](./installation.md)
- [Tutorial](./tutorial.md) - [Repository trees](./repos.md)
- [Managing Repositories](./repos.md)
- [Local Configuration](./local_configuration.md)
- [Forge Integrations](./forge_integration.md)
- [Git Worktrees](./worktrees.md) - [Git Worktrees](./worktrees.md)
- [Working with Worktrees](./worktree_working.md) - [Forge Integrations](./forge_integration.md)
- [Worktrees and Remotes](./worktree_remotes.md)
- [Behavior Details](./worktree_behavior.md)
- [FAQ](./faq.md) - [FAQ](./faq.md)
- [Developer Documentation](./developing.md) - [Contributing](./contributing.md)
- [Testing](./testing.md)
- [Dependency updates](./dependency_updates.md)
- [Releases](./releases.md)
- [Formatting & Style](./formatting_and_style.md)
- [The Docs Themselves](./documentation.md)

1
docs/src/contributing.md Symbolic link
View File

@@ -0,0 +1 @@
../../CONTRIBUTING.md

View File

@@ -1,10 +0,0 @@
# Dependency updates
Rust has the same problem as the node ecosystem, just a few magnitudes smaller:
Dependency sprawl. GRM has a dozen direct dependencies, but over 150 transitive
ones.
To keep them up to date, there is a script:
`depcheck/update-cargo-dependencies.py`. It updates direct dependencies to the
latest stable version and updates transitive dependencies where possible. To run
it, use `just update-dependencies`, which will create commits for each update.

View File

@@ -1,54 +0,0 @@
# Overview
GRM is still in very early development. I started GRM mainly to scratch my own
itches (and am heavily dogfooding it). If you have a new use case for GRM, go
for it!
## Contributing
To contribute, just fork the repo and create a pull request against `develop`.
If you plan bigger changes, please consider opening an issue first, so we can
discuss it.
If you want, add yourself to the `CONTRIBUTORS` file in your pull request.
## Branching strategy
The branching strategy is a simplified
[git-flow](https://nvie.com/posts/a-successful-git-branching-model/).
* `master` is the "production" branch. Each commit is a new release.
* `develop` is the branch where new stuff is coming in.
* feature branches branch off of `develop` and merge back into it.
Feature branches are not required, there are also changes happening directly on
`develop`.
## Required tooling
You will need the following tools:
* Rust (obviously) (easiest via `rustup`)
* Python3
* [`just`](https://github.com/casey/just), a command runner like `make`. See
[here](https://github.com/casey/just#installation) for installation
instructions (it's most likely just a simple `cargo install just`).
* Docker & docker-compose for the e2e tests
* `isort`, `black` and `shfmt` for formatting.
* `ruff` and `shellcheck` for linting.
* `mdbook` for the documentation
Here are the tools:
| Distribution | Command |
| ------------- | --------------------------------------------------------------------------------------------------- |
| Arch Linux | `pacman -S --needed python3 rustup just docker docker-compose python-black shfmt shellcheck mdbook` |
| Ubuntu/Debian | `apt-get install --no-install-recommends python3 docker.io docker-compose black shellcheck` |
Note that you will have to install `just` and `mdbook` manually on Ubuntu (e.g.
via `cargo install just mdbook` if your rust build environment is set up
correctly). Same for `shfmt`, which may just be a `go install
mvdan.cc/sh/v3/cmd/shfmt@latest`, depending on your go build environment.
For details about rustup and the toolchains, see [the installation
section](./installation.md).

View File

@@ -1,11 +0,0 @@
# Documentation
The documentation lives in the `docs` folder and uses
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
features here!
Using [GitHub actions](https://github.com/features/actions), the documentation
on `master` is automatically published to [the project
homepage](https://hakoerber.github.io/git-repo-manager/) via GitHub pages. See
`.github/workflows/gh-pages.yml` for the configuration of GitHub Actions.

View File

@@ -1,3 +1,10 @@
# FAQ # FAQ
Currently empty, as there are no questions that are asked frequently :D ## Why is the nightly toolchain required?
Building GRM currently requires nightly features due to the usage of
[`std::path::Path::is_symlink()`](https://doc.rust-lang.org/std/fs/struct.FileType.html#method.is_symlink).
See the [tracking issue](https://github.com/rust-lang/rust/issues/85748).
`is_symlink()` is actually available in rustc 1.57, so it will be on stable in
the near future. This would mean that GRM can be built using the stable toolchain!

View File

@@ -1,20 +1,19 @@
# Forge Integrations # Forge Integrations
In addition to managing repositories locally, `grm` also integrates with source In addition to manging repositories locally, `grm` also integrates with source
code hosting platforms. Right now, the following platforms are supported: code hosting platforms. Right now, the following platforms are supported:
* [GitHub](https://github.com/) * [GitHub](https://github.com/)
* [GitLab](https://gitlab.com/) * [GitLab](https://gitlab.com/)
Imagine you are just starting out with `grm` and want to clone all your Imagine you are just starting out with `grm` and want to clone all your repositories
repositories from GitHub. This is as simple as: from GitHub. This is as simple as:
```bash ```bash
$ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects $ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects
``` ```
You will end up with your projects cloned into You will end up with your projects cloned into `~/projects/{your_github_username}/`
`~/projects/{your_github_username}/`
## Authentication ## Authentication
@@ -35,8 +34,8 @@ See the GitLab documentation for personal access tokens:
The required scopes are a bit weird. Actually, the following should suffice: The required scopes are a bit weird. Actually, the following should suffice:
* `read_user` to get user information (required to get the current * * `read_user` to get user information (required to get the current authenticated
authenticated user name for the `--owner` filter. user name for the `--owner` filter.
* A scope that allows reading private repositories. (`read_repository` is just * A scope that allows reading private repositories. (`read_repository` is just
for *cloning* private repos). This unfortunately does not exist. for *cloning* private repos). This unfortunately does not exist.
@@ -107,7 +106,7 @@ The options in the file map to the command line options of the `grm repos sync
remote` command. remote` command.
You'd then run the `grm repos sync` command the same way as with a list of You'd then run the `grm repos sync` command the same way as with a list of
repositories in a configuration: repositories in a config:
```bash ```bash
$ grm repos sync --config example.config.toml $ grm repos sync --config example.config.toml
@@ -121,11 +120,11 @@ $ grm repos find config --config example.config.toml > repos.toml
$ grm repos sync config --config repos.toml $ grm repos sync config --config repos.toml
``` ```
## Using with self-hosted GitLab ## Using with selfhosted GitLab
By default, `grm` uses the default GitLab API endpoint By default, `grm` uses the default GitLab API endpoint
([https://gitlab.com](https://gitlab.com)). You can override the endpoint by ([https://gitlab.com](https://gitlab.com)). You can override the
specifying the `--api-url` parameter. Like this: endpoint by specifying the `--api-url` parameter. Like this:
```bash ```bash
$ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...] $ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...]
@@ -139,28 +138,26 @@ can be overridden with the `--force-ssh` switch.
## About the token command ## About the token command
To ensure maximum flexibility, `grm` has a single way to get the token it uses To ensure maximum flexibility, `grm` has a single way to get the token it uses
to authenticate: Specify a command that returns the token via stdout. This to authenticate: Specify a command that returns the token via stdout. This easily
easily integrates with password managers like integrates with password managers like [`pass`](https://www.passwordstore.org/).
[`pass`](https://www.passwordstore.org/).
Of course, you are also free to specify something like `echo mytoken` as the Of course, you are also free to specify something like `echo mytoken` as the
command, as long as you are OK with the security implications (like having the command, as long as you are ok with the security implications (like having the
token in clear text in your shell history). It may be better to have the token token in cleartext in your shell history). It may be better to have the token
in a file instead and read it: `cat ~/.gitlab_token`. in a file instead and read it: `cat ~/.gitlab_token`.
Generally, use whatever you want. The command just has to return successfully Generally, use whatever you want. The command just has to return sucessfully and
and return the token as the first line of stdout. return the token as the first line of stdout.
## Examples ## Examples
Maybe you just want to locally clone all repos from your GitHub user? Maybe you just want to locally clone all repos from your github user?
```bash ```bash
$ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token" $ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token"
``` ```
This will clone all repositories into This will clone all repositories into `~/github_projects/{your_github_username}`.
`~/github_projects/{your_github_username}`.
If instead you want to clone **all** repositories you have access to (e.g. via If instead you want to clone **all** repositories you have access to (e.g. via
organizations or other users' private repos you have access to), just change the organizations or other users' private repos you have access to), just change the
@@ -175,13 +172,12 @@ $ grm repos sync remote --provider github --access --root ~/github_projects --to
### GitHub ### GitHub
Unfortunately, GitHub does not have a nice API endpoint to get **private** Unfortunately, GitHub does not have a nice API endpoint to get **private**
repositories for a certain user repositories for a certain user ([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user) only returns public
([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user) repositories).
only returns public repositories).
Therefore, using `--user {user}` will only show public repositories for GitHub. Therefore, using `--user {user}` will only show public repositories for GitHub.
Note that this does not apply to `--access`: If you have access to another Note that this does not apply to `--access`: If you have access to another user's
user's private repository, it will be listed. private repository, it will be listed.
## Adding integrations ## Adding integrations
@@ -201,9 +197,9 @@ Each repo has to have the following properties:
* A name (which also acts as the identifier for diff between local and remote * A name (which also acts as the identifier for diff between local and remote
repositories) repositories)
* An SSH URL to push to * An SSH url to push to
* An HTTPS URL to clone and fetch from * An HTTPS url to clone and fetch from
* A flag that marks the repository as private * A flag that marks the repository as private
If you plan to implement another forge, please first open an issue so we can go If you plan to implement another forge, please first open an issue so we can
through the required setup. I'm happy to help! go through the required setup. I'm happy to help!

View File

@@ -1,45 +0,0 @@
# Formatting & Style
## Code formatting
I'm allergic to discussions about formatting. I'd rather make the computer do it
for me.
For Rust, just use `cargo fmt`. For Python, use
[black](https://github.com/psf/black). I'd rather not spend any effort in
configuring the formatters (not possible for black anyway). For shell scripts,
use [`shfmt`](https://github.com/mvdan/sh).
To autoformat all code, use `just fmt`
## Style
Honestly, no idea about style. I'm still learning Rust, so I'm trying to find a
good style. Just try to keep it consistent when you add code.
## Linting
You can use `just lint` to run all lints.
### Rust
Clippy is the guard that prevents shitty code from getting into the code base.
When running `just check`, any clippy suggestions will make the command fail.
So make clippy happy! The easiest way:
* Commit your changes (so clippy can change safely).
* Run `cargo clippy --fix` to do the easy changes automatically.
* Run `cargo clippy` and take a look at the messages.
Until now, I had no need to override or silence any clippy suggestions.
### Shell
`shellcheck` lints all shell scripts. As they change very rarely, this is not
too important.
## Unsafe code
Any `unsafe` code is forbidden for now globally via `#![forbid(unsafe_code)]`.
I cannot think of any reason GRM may need `unsafe`. If it comes up, it needs to
be discussed.

View File

@@ -2,44 +2,43 @@
## Installation ## Installation
Building GRM requires the Rust toolchain to be installed. The easiest way is Building GRM currently requires the nightly Rust toolchain. The easiest way
using [`rustup`](https://rustup.rs/). Make sure that rustup is properly is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
installed.
Make sure that the stable toolchain is installed: Make sure that the nightly toolchain is installed:
``` ```
$ rustup toolchain install stable $ rustup toolchain install nightly
``` ```
Then, install the build dependencies: Then, install the build dependencies:
| Distribution | Command | | Distribution | Command |
| ------------- | ------------------------------------------------------------------------------ | | ------------- | ------------------------------------------------------------------------------ |
| Arch Linux | `pacman -S --needed gcc openssl pkg-config` | | Archlinux | `pacman -S --needed gcc openssl pkg-config` |
| Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` | | Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` |
Then, it's a simple command to install the latest stable version: Then, it's a simple command to install the latest stable version:
```bash ```bash
$ cargo install git-repo-manager $ cargo +nightly install git-repo-manager
``` ```
If you're brave, you can also run the development build: If you're brave, you can also run the development build:
```bash ```bash
$ cargo install --git https://github.com/hakoerber/git-repo-manager.git --branch develop $ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
``` ```
## Static build ## Static build
Note that by default, you will get a dynamically linked executable. Note that by default, you will get a dynamically linked executable.
Alternatively, you can also build a statically linked binary. For this, you will Alternatively, you can also build a statically linked binary. For this, you
need `musl` and a few other build dependencies installed installed: will need `musl` and a few other build dependencies installed installed:
| Distribution | Command | | Distribution | Command |
| ------------- | --------------------------------------------------------------------------- | | ------------- | --------------------------------------------------------------------------- |
| Arch Linux | `pacman -S --needed gcc musl perl make` | | Archlinux | `pacman -S --needed gcc musl perl make` |
| Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` | | Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` |
(`perl` and `make` are required for the OpenSSL build script) (`perl` and `make` are required for the OpenSSL build script)
@@ -47,11 +46,11 @@ need `musl` and a few other build dependencies installed installed:
The, add the musl target via `rustup`: The, add the musl target via `rustup`:
``` ```
$ rustup target add x86_64-unknown-linux-musl $ rustup +nightly target add x86_64-unknown-linux-musl
``` ```
Then, use a modified build command to get a statically linked binary: Then, use a modified build command to get a statically linked binary:
``` ```
$ cargo install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build $ cargo +nightly install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build
``` ```

View File

@@ -1,90 +0,0 @@
# Local Configuration
When managing multiple git repositories with GRM, you'll generally have a
configuration file containing information about all the repos you have. GRM then
makes sure that you repositories match that configuration. If they don't exist
yet, it will clone them. It will also make sure that all remotes are configured
properly.
Let's try it out:
## Get the example configuration
```bash
curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml
```
Then, you're ready to run the first sync. This will clone all configured
repositories and set up the remotes.
```bash
$ grm repos sync config --config example.config.toml
[] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: OK
[] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git"
[] dotfiles: Repository successfully cloned
[] dotfiles: OK
```
If you run it again, it will report no changes:
```bash
$ grm repos sync config -c example.config.toml
[] git-repo-manager: OK
[] dotfiles: OK
```
### Generate your own configuration
Now, if you already have a few repositories, it would be quite laborious to
write a configuration from scratch. Luckily, GRM has a way to generate a
configuration from an existing file tree:
```bash
grm repos find local ~/your/project/root > config.toml
```
This will detect all repositories and remotes and write them to `config.toml`.
You can exclude repositories from the generated configuration by providing
a regex that will be test against the path of each discovered repository:
```bash
grm repos find local ~/your/project/root --exclude "^.*/subdir/match-(foo|bar)/.*$" > config.toml
```
### Show the state of your projects
```bash
$ grm repos status --config example.config.toml
╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡
│ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │
│ ┆ ┆ ┆ <origin/master> ✔ ┆ ┆ origin │
├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯
```
You can also use `status` without `--config` to check the repository you're
currently in:
```bash
$ cd ~/example-projects/dotfiles
$ grm repos status
╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
```
## YAML
By default, the repo configuration uses TOML. If you prefer YAML, just give it a
YAML file instead (file ending does not matter, `grm` will figure out the
format). For generating a configuration, pass `--format yaml` to `grm repo
find` which generates a YAML configuration instead of a TOML configuration.

View File

@@ -1,8 +1,8 @@
# Overview # Overview
Welcome! This is the documentation for [Git Repo Welcome! This is the documentation for [Git Repo
Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a tool Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a
that helps you manage git repositories in a declarative way. tool that helps you manage git repositories.
GRM helps you manage git repositories in a declarative way. Configure your GRM helps you manage git repositories in a declarative way. Configure your
repositories in a TOML or YAML file, GRM does the rest. Take a look at [the repositories in a TOML or YAML file, GRM does the rest. Take a look at [the
@@ -12,12 +12,12 @@ to get a feel for the way you configure your repositories. See the [repository
tree chapter](./repos.md) for details. tree chapter](./repos.md) for details.
GRM also provides some tooling to work with single git repositories using GRM also provides some tooling to work with single git repositories using
`git-worktree`. See [the worktree chapter](./worktrees.md) for more details. `git-worktree`. See [the worktree chapter](./worktree.md) for more details.
## Why use GRM? ## Why use GRM?
If you're working with a lot of git repositories, GRM can help you to manage If you're working with a lot of git repositories, GRM can help you to manage them
them in an easy way: in an easy way:
* You want to easily clone many repositories to a new machine. * You want to easily clone many repositories to a new machine.
* You want to change remotes for multiple repositories (e.g. because your GitLab * You want to change remotes for multiple repositories (e.g. because your GitLab

View File

@@ -1,27 +0,0 @@
# Releases
To make a release, make sure you are on a clean `develop` branch, sync your
remotes and then run `./release (major|minor|patch)`. It will handle a
git-flow-y release, meaning that it will perform a merge from `develop` to
`master`, create a git tag, sync all remotes and run `cargo publish`.
Make sure to run `just check` before releasing to make sure that nothing is
broken.
As GRM is still `v0.x`, there is not much consideration for backwards
compatibility. Generally, update the patch version for small stuff and the minor
version for bigger / backwards incompatible changes.
Generally, it's good to regularly release a new patch release with [updated
dependencies](./dependency_updates.md). As `./release.sh patch` is exposed as a
Justfile target (`release-patch`), it's possible to do both in one step:
```bash
$ just update-dependencies check release-patch
```
## Release notes
There are currently no release notes. Things are changing quite quickly and
there is simply no need for a record of changes (except the git history of
course).

View File

@@ -1,13 +1,82 @@
# Managing Repositories # Managing tree of git repositories
GRM helps you manage a bunch of git repositories easily. There are generally two When managing multiple git repositories with GRM, you'll generally have a
ways to go about that: configuration file containing information about all the repos you have. GRM then
makes sure that you repositories match that config. If they don't exist yet, it
will clone them. It will also make sure that all remotes are configured properly.
You can either manage a list of repositories in a TOML or YAML file, and use GRM Let's try it out:
to sync the configuration with the state of the repository.
Or, you can pull repository information from a forge (e.g. GitHub, GitLab) and ## Get the example configuration
clone the repositories.
There are also hybrid modes where you pull information from a forge and create a ```bash
configuration file that you can use later. $ curl --proto '=https' --tlsv1.2 -sSfO https://raw.githubusercontent.com/hakoerber/git-repo-manager/master/example.config.toml
```
Then, you're ready to run the first sync. This will clone all configured repositories
and set up the remotes.
```bash
$ grm repos sync config --config example.config.toml
[] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: OK
[] Cloning into "/home/me/projects/dotfiles" from "https://github.com/hakoerber/dotfiles.git"
[] dotfiles: Repository successfully cloned
[] dotfiles: OK
```
If you run it again, it will report no changes:
```
$ grm repos sync config -c example.config.toml
[✔] git-repo-manager: OK
[✔] dotfiles: OK
```
### Generate your own configuration
Now, if you already have a few repositories, it would be quite laborious to write
a configuration from scratch. Luckily, GRM has a way to generate a configuration
from an existing file tree:
```bash
$ grm repos find local ~/your/project/root > config.toml
```
This will detect all repositories and remotes and write them to `config.toml`.
### Show the state of your projects
```bash
$ grm repos status --config example.config.toml
╭──────────────────┬──────────┬────────┬───────────────────┬────────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════════════╪══════════╪════════╪═══════════════════╪════════╪═════════╡
│ git-repo-manager ┆ ┆ ✔ ┆ branch: master ┆ master ┆ github │
│ ┆ ┆ ┆ <origin/master> ✔ ┆ ┆ origin │
├╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌┤
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────────────┴──────────┴────────┴───────────────────┴────────┴─────────╯
```
You can also use `status` without `--config` to check the repository you're currently
in:
```
$ cd ~/example-projects/dotfiles
$ grm repos status
╭──────────┬──────────┬────────┬──────────┬───────┬─────────╮
│ Repo ┆ Worktree ┆ Status ┆ Branches ┆ HEAD ┆ Remotes │
╞══════════╪══════════╪════════╪══════════╪═══════╪═════════╡
│ dotfiles ┆ ┆ ✔ ┆ ┆ Empty ┆ origin │
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
```
## YAML
By default, the repo configuration uses TOML. If you prefer YAML, just give it
a YAML file instead (file ending does not matter, `grm` will figure out the format).
For generating a configuration, pass `--format yaml` to `grm repo find`
which generates a YAML config instead of a TOML configuration.

View File

@@ -1,124 +0,0 @@
# Testing
There are two distinct test suites: One for unit test (`just test-unit`) and
integration tests (`just test-integration`) that is part of the rust crate, and
a separate e2e test suite in python (`just test-e2e`).
To run all tests, run `just test`.
When contributing, consider whether it makes sense to add tests which could
prevent regressions in the future. When fixing bugs, it makes sense to add tests
that expose the wrong behavior beforehand.
The unit and integration tests are very small and only test a few self-contained
functions (like validation of certain input).
## E2E tests
The main focus of the testing setup lays on the e2e tests. Each user-facing
behavior *should* have a corresponding e2e test. These are the most important
tests, as they test functionality the user will use in the end.
The test suite is written in python and uses
[pytest](https://docs.pytest.org/en/stable/). There are helper functions that
set up temporary git repositories and remotes in a `tmpfs`.
Effectively, each tests works like this:
* Set up some prerequisites (e.g. different git repositories or configuration
files)
* Run `grm`
* Check that everything is according to expected behavior (e.g. that `grm` had
certain output and exit code, that the target repositories have certain
branches, heads and remotes, ...)
As there are many different scenarios, the tests make heavy use of the
[`@pytest.mark.parametrize`](https://docs.pytest.org/en/stable/how-to/parametrize.html#pytest-mark-parametrize)
decorator to get all permutations of input parameters (e.g. whether a
configuration exists, what a config value is set to, how the repository looks
like, ...)
Whenever you write a new test, think about the different circumstances that can
happen. What are the failure modes? What affects the behavior? Parametrize each
of these behaviors.
### Optimization
Note: You will most likely not need to read this.
Each test parameter will exponentially increase the number of tests that will be
run. As a general rule, comprehensiveness is more important than test suite
runtime (so if in doubt, better to add another parameter to catch every edge
case). But try to keep the total runtime sane. Currently, the whole `just test-e2e`
target runs ~8'000 tests and takes around 5 minutes on my machine, exlucding
binary and docker build time. I'd say that keeping it under 10 minutes is a good
idea.
To optimize tests, look out for two patterns: Dependency and Orthogonality
#### Dependency
If a parameter depends on another one, it makes little sense to handle them
independently. Example: You have a paramter that specifies whether a
configuration is used, and another parameter that sets a certain value in that
configuration file. It might look something like this:
```python
@pytest.mark.parametrize("use_config", [True, False])
@pytest.mark.parametrize("use_value", ["0", "1"])
def test(...):
```
This leads to 4 tests being instantiated. But there is little point in setting a
configuration value when no config is used, so the combinations `(False, "0")`
and `(False, "1")` are redundant. To remedy this, spell out the optimized
permutation manually:
```python
@pytest.mark.parametrize("config", ((True, "0"), (True, "1"), (False, None)))
def test(...):
(use_config, use_value) = config
```
This cuts down the number of tests by 25%. If you have more dependent parameters
(e.g. additional configuration values), this gets even better. Generally, this
will cut down the number of tests to
\\[ \frac{1}{o \cdot c} + \frac{1}{(o \cdot c) ^ {(n + 1)}} \\]
with \\( o \\) being the number of values of a parent parameters a parameter is
dependent on, \\( c \\) being the cardinality of the test input (so you can
assume \\( o = 1 \\) and \\( c = 2 \\) for boolean parameters), and \\( n \\)
being the number of parameters that are optimized, i.e. folded into their
dependent parameter.
As an example: Folding down two boolean parameters into one dependent parent
boolean parameter will cut down the number of tests to 62.5%!
#### Orthogonality
If different test parameters are independent of each other, there is little
point in testing their combinations. Instead, split them up into different test
functions. For boolean parameters, this will cut the number of tests in half.
So instead of this:
```python
@pytest.mark.parametrize("param1", [True, False])
@pytest.mark.parametrize("param2", [True, False])
def test(...):
```
Rather do this:
```python
@pytest.mark.parametrize("param1", [True, False])
def test_param1(...):
@pytest.mark.parametrize("param2", [True, False])
def test_param2(...):
```
The tests are running in Docker via docker-compose. This is mainly needed to
test networking functionality like GitLab integration, with the GitLab API being
mocked by a simple flask container.

View File

@@ -1,183 +0,0 @@
# Tutorial
Here, you'll find a quick overview over the most common functionality of GRM.
## Managing existing repositories
Let's say you have your git repositories at `~/code`. To start managing them via
GRM, first create a configuration:
```bash
grm repos find local ~/code --format yaml > ~/code/config.yml
```
The result may look something like this:
```yaml
---
trees:
- root: ~/code
repos:
- name: git-repo-manager
worktree_setup: true
remotes:
- name: origin
url: "https://github.com/hakoerber/git-repo-manager.git"
type: https
```
To apply the configuration and check whether all repositories are in sync, run
the following:
```bash
$ grm repos sync config --config ~/code/config.yml
[] git-repo-manager: OK
```
Well, obiously there are no changes. To check how changes would be applied,
let's change the name of the remote (currently `origin`):
```bash
$ sed -i 's/name: origin/name: github/' ~/code/config.yml
$ grm repos sync config --config ~/code/config.yml
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: Deleting remote "origin"
[] git-repo-manager: OK
```
GRM replaced the `origin` remote with `github`.
The configuration (`~/code/config.yml` in this example) would usually be
something you'd track in git or synchronize between machines via some other
means. Then, on every machine, all your repositories are a single `grm repos
sync` away!
## Getting repositories from a forge
Let's say you have a bunch of repositories on GitHub and you'd like to clone
them all to your local machine.
To authenticate, you'll need to get a personal access token, as described in
[the forge documentation](./forge_integration.md#github). Let's assume you put
your token into `~/.github_token` (please don't if you're doing this "for
real"!)
Let's first see what kind of repos we can find:
```bash
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml
---
trees: []
$
```
Ummm, ok? No repos? This is because you have to *tell* GRM what to look for (if
you don't, GRM will just relax, as it's lazy).
There are different filters (see [the forge
documentation](./forge_integration.md#filters) for more info). In our case,
we'll just use the `--owner` filter to get all repos that belong to us:
```bash
$ grm repos find remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/ --format yaml
---
trees:
- root: ~/code/github.com
repos:
- name: git-repo-manager
worktree_setup: false
remotes:
- name: origin
url: "https://github.com/hakoerber/git-repo-manager.git"
type: https
```
Nice! The format is the same as we got from `grm repos find local` above. So if
we wanted, we could save this file and use it with `grm repos sync config` as
above. But there is an even easier way: We can directly clone the repositories!
```bash
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/
[] Cloning into "~/code/github.com/git-repo-manager" from "https://github.com/hakoerber/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned
[] git-repo-manager: OK
```
Nice! Just to make sure, let's run the same command again:
```bash
$ grm repos sync remote --provider github --token-command "cat ~/.github_token" --root ~/code/github.com/
[] git-repo-manager: OK
```
GRM saw that the repository is already there and did nothing (remember, it's
lazy).
## Using worktrees
Worktrees are something that make it easier to work with multiple branches at
the same time in a repository. Let's say we wanted to hack on the codebase of
GRM:
```bash
$ cd ~/code/github.com/git-repo-manager
$ ls
.gitignore
Cargo.toml
...
```
Well, this is just a normal git repository. But let's try worktrees! First, we
have to convert the existing repository to use the special worktree setup. For
all worktree operations, we will use `grm worktree` (or `grm wt` for short):
```bash
$ grm wt convert
[] Conversion done
$ ls
$
```
So, the code is gone? Not really, there is just no active worktree right now. So
let's add one for `master`:
```bash
$ grm wt add master --track origin/master
[] Conversion done
$ ls
master
$ (cd ./master && git status)
On branch master
nothing to commit, working tree clean
```
Now, a single worktree is kind of pointless (if we only have one, we could also
just use the normal setup, without worktrees). So let's another one for
`develop`:
```bash
$ grm wt add develop --track origin/develop
[] Conversion done
$ ls
develop
master
$ (cd ./develop && git status)
On branch develop
nothing to commit, working tree clean
```
What's the point? The cool thing is that we can now start working in the
`develop` worktree, without affecting the `master` worktree at all. If you're
working on `develop` and want to quickly see what a certain file looks like in
`master`, just look inside `./master`, it's all there!
This becomes especially interesting when you have many feature branches and are
working on multiple features at the same time.
There are a lot of options that influence how worktrees are handled. Maybe you
want to automatically track `origin/master` when you add a worktree called
`master`? Maybe you want your feature branches to have a prefix, so when you're
working on the `feature1` worktree, the remote branch will be
`origin/awesomefeatures/feature1`? Check out [the chapter on
worktrees](./worktrees.md) for all the things that are possible.

View File

@@ -1,32 +0,0 @@
# Behavior Details
When working with worktrees and GRM, there is a lot going on under the hood.
Each time you create a new worktree, GRM has to figure out what commit to set
your new branch to and how to configure any potential remote branches.
To state again, the most important guideline is the following:
**The branch inside the worktree is always the same as the directory name of the
worktree.**
The second set of guidelines relates to the commit to check out, and the remote
branches to use:
* When a branch already exists, you will get a worktree for that branch
* Existing local branches are never changed
* Only do remote operations if specifically requested (via configuration file or
command line parameters)
* When you specify `--track`, you will get that exact branch as the tracking
branch
* When you specify `--no-track`, you will get no tracking branch
Apart from that, GRM tries to do The Right Thing<sup>TM</sup>. It should be as
little surprising as possible.
In 99% of the cases, you will not have to care about the details, as the normal
workflows are covered by the rules above. In case you want to know the exact
behavior "specification", take a look at the [module documentation for
`grm::worktree`](https://docs.rs/git-repo-manager/latest/grm/worktree/index.html).
If you think existing behavior is super-duper confusing and you have a better
idea, do not hesitate to open a GitHub issue to discuss this!

View File

@@ -1,75 +0,0 @@
# Worktrees and Remotes
To fetch all remote references from all remotes in a worktree setup, you can use
the following command:
```
$ grm wt fetch
[✔] Fetched from all remotes
```
This is equivalent to running `git fetch --all` in any of the worktrees.
Often, you may want to pull all remote changes into your worktrees. For this,
use the `git pull` equivalent:
```
$ grm wt pull
[✔] master: Done
[✔] my-cool-branch: Done
```
This will refuse when there are local changes, or if the branch cannot be fast
forwarded. If you want to rebase your local branches, use the `--rebase` switch:
```
$ grm wt pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
As noted, this will fail if there are any local changes in your worktree. If you
want to stash these changes automatically before the pull (and unstash them
afterwards), use the `--stash` option.
This will rebase your changes onto the upstream branch. This is mainly helpful
for persistent branches that change on the remote side.
There is a similar rebase feature that rebases onto the **default** branch
instead:
```
$ grm wt rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
This is super helpful for feature branches. If you want to incorporate changes
made on the remote branches, use `grm wt rebase` and all your branches will be
up to date. If you want to also update to remote tracking branches in one go,
use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
on non-fast-forwards:
```
$ grm wt rebase --pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
"So, what's the difference between `pull --rebase` and `rebase --pull`? Why the
hell is there a `--rebase` flag in the `rebase` command?"
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree to
their remote branch, if possible. `rebase` rebases onto the **default** branch
instead. The switches to `rebase` are just convenience, so you do not have to
run two commands.
* `rebase --pull` is the same as `pull` && `rebase`
* `rebase --pull --rebase` is the same as `pull --rebase` && `rebase`
I understand that the UX is not the most intuitive. If you can think of an
improvement, please let me know (e.g. via an GitHub issue)!
As with `pull`, `rebase` will also refuse to run when there are changes in your
worktree. And you can also use the `--stash` option to stash/unstash changes
automatically.

View File

@@ -1,173 +0,0 @@
# Working with Worktrees
## Creating a new worktree
To actually work, you'll first have to create a new worktree checkout. All
worktree-related commands are available as subcommands of `grm worktree` (or
`grm wt` for short):
```
$ grm wt add mybranch
[✔] Worktree mybranch created
```
You'll see that there is now a directory called `mybranch` that contains a
checkout of your repository, using the branch `mybranch`
```bash
$ cd ./mybranch && git status
On branch mybranch
nothing to commit, working tree clean
```
You can work in this repository as usual. Make changes, commit them, revert
them, whatever you're up to :)
Just note that you *should* not change the branch inside the worktree directory.
There is nothing preventing you from doing so, but you will notice that you'll
run into problems when trying to remove a worktree (more on that later). It may
also lead to confusing behavior, as there can be no two worktrees that have the
same branch checked out. So if you decide to use the worktree setup, go all in,
let `grm` manage your branches and bury `git branch` (and `git checkout -b`).
You will notice that there is no tracking branch set up for the new branch. You
can of course set up one manually after creating the worktree, but there is an
easier way, using the `--track` flag during creation. Let's create another
worktree. Go back to the root of the repository, and run:
```bash
$ grm wt add mybranch2 --track origin/mybranch2
[] Worktree mybranch2 created
```
You'll see that this branch is now tracking `mybranch` on the `origin` remote:
```bash
$ cd ./mybranch2 && git status
On branch mybranch
Your branch is up to date with 'origin/mybranch2'.
nothing to commit, working tree clean
```
The behavior of `--track` differs depending on the existence of the remote
branch:
* If the remote branch already exists, `grm` uses it as the base of the new
local branch.
* If the remote branch does not exist (as in our example), `grm` will create a
new remote tracking branch, using the default branch (either `main` or
`master`) as the base
Often, you'll have a workflow that uses tracking branches by default. It would
be quite tedious to add `--track` every single time. Luckily, the `grm.toml`
file supports defaults for the tracking behavior. See this for an example:
```toml
[track]
default = true
default_remote = "origin"
```
This will set up a tracking branch on `origin` that has the same name as the
local branch.
Sometimes, you might want to have a certain prefix for all your tracking
branches. Maybe to prevent collisions with other contributors. You can simply
set `default_remote_prefix` in `grm.toml`:
```toml
[track]
default = true
default_remote = "origin"
default_remote_prefix = "myname"
```
When using branch `my-feature-branch`, the remote tracking branch would be
`origin/myname/my-feature-branch` in this case.
Note that `--track` overrides any configuration in `grm.toml`. If you want to
disable tracking, use `--no-track`.
## Showing the status of your worktrees
There is a handy little command that will show your an overview over all
worktrees in a repository, including their status (i.e. changes files). Just run
the following in the root of your repository:
```
$ grm wt status
╭───────────┬────────┬──────────┬──────────────────╮
│ Worktree ┆ Status ┆ Branch ┆ Remote branch │
╞═══════════╪════════╪══════════╪══════════════════╡
│ mybranch ┆ ✔ ┆ mybranch ┆ │
│ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │
╰───────────┴────────┴──────────┴──────────────────╯
```
The "Status" column would show any uncommitted changes (new / modified / deleted
files) and the "Remote branch" would show differences to the remote branch (e.g.
if there are new pushes to the remote branch that are not yet incorporated into
your local branch).
## Deleting worktrees
If you're done with your worktrees, use `grm wt delete` to delete them. Let's
start with `mybranch2`:
```
$ grm wt delete mybranch2
[✔] Worktree mybranch2 deleted
```
Easy. On to `mybranch`:
```
$ grm wt delete mybranch
[!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete
```
Hmmm. `grm` tells you:
"Hey, there is no remote branch that you could have pushed your changes to. I'd
rather not delete work that you cannot recover."
Note that `grm` is very cautious here. As your repository will not be deleted,
you could still recover the commits via
[`git-reflog`](https://git-scm.com/docs/git-reflog). But better safe than
sorry! Note that you'd get a similar error message if your worktree had any
uncommitted files, for the same reason. Now you can either commit & push your
changes, or your tell `grm` that you know what you're doing:
```
$ grm wt delete mybranch --force
[✔] Worktree mybranch deleted
```
If you just want to delete all worktrees that do not contain any changes, you
can also use the following:
```
$ grm wt clean
```
Note that this will not delete the default branch of the repository. It can of
course still be delete with `grm wt delete` if necessary.
### Converting an existing repository
It is possible to convert an existing directory to a worktree setup, using `grm
wt convert`. This command has to be run in the root of the repository you want
to convert:
```
$ grm wt convert
[✔] Conversion successful
```
This command will refuse to run if you have any changes in your repository.
Commit them and try again!
Afterwards, the directory is empty, as there are no worktrees checked out yet.
Now you can use the usual commands to set up worktrees.

View File

@@ -1,60 +1,58 @@
# Git Worktrees # Git Worktrees
## Why? ## Why?
The default workflow when using git is having your repository in a single
directory. Then, you can check out a certain reference (usually a branch),
which will update the files in the directory to match the state of that
reference. Most of the time, this is exactly what you need and works perfectly.
But especially when you're working with branches a lot, you may notice that
there is a lot of work required to make everything run smoothly.
Maybe you have experienced the following: You're working on a feature branch. The default workflow when using git is having your repository in a single directory.
Then, for some reason, you have to change branches (maybe to investigate some Then, you can check out a certain reference (usually a branch), which will update
issue). But you get the following: the files in the directory to match the state of that reference. Most of the time,
this is exactly what you need and works perfectly. But especially when you're working
with branches a lot, you may notice that there is a lot of work required to make
everything run smoothly.
Maybe you have experienced the following: You're working on a feature branch. Then,
for some reason, you have to change branches (maybe to investigate some issue).
But you get the following:
``` ```
error: Your local changes to the following files would be overwritten by checkout error: Your local changes to the following files would be overwritten by checkout
``` ```
Now you can create a temporary commit or stash your changes. In any case, you Now you can create a temporary commit or stash your changes. In any case, you have
have some mental overhead before you can work on something else. Especially with some mental overhead before you can work on something else. Especially with stashes,
stashes, you'll have to remember to do a `git stash pop` before resuming your you'll have to remember to do a `git stash pop` before resuming your work (I
work (I cannot count the number of times where I "rediscovered" some code hidden cannot count the number of times where I "rediscovered" some code hidden in some
in some old stash I forgot about). Also, conflicts on a `git stash pop` are just old stash I forgot about.
horrible.
And even worse: If you're currently in the process of resolving merge conflicts And even worse: If you're currently in the process of resolving merge conflicts or an
or an interactive rebase, there is just no way to "pause" this work to check out interactive rebase, there is just no way to "pause" this work to check out a
a different branch. different branch.
Sometimes, it's crucial to have an unchanging state of your repository until Sometimes, it's crucial to have an unchanging state of your repository until some
some long-running process finishes. I'm thinking of Ansible and Terraform runs. long-running process finishes. I'm thinking of Ansible and Terraform runs. I'd
I'd rather not change to a different branch while ansible or Terraform are rather not change to a different branch while ansible or Terraform are running as
running as I have no idea how those tools would behave (and I'm not too eager to I have no idea how those tools would behave (and I'm not too eager to find out).
find out).
In any case, Git Worktrees are here for the rescue: In any case, Git Worktrees are here for the rescue:
## What are git worktrees? ## What are git worktrees?
[Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have [Git Worktrees](https://git-scm.com/docs/git-worktree) allow you to have multiple
multiple independent checkouts of your repository on different directories. You independent checkouts of your repository on different directories. You can have
can have multiple directories that correspond to different references in your multiple directories that correspond to different references in your repository.
repository. Each worktree has it's independent working tree (duh) and index, so Each worktree has it's independent working tree (duh) and index, so there is no
there is no way to run into conflicts. Changing to a different branch is just a way to run into conflicts. Changing to a different branch is just a `cd` away (if
`cd` away (if the worktree is already set up). the worktree is already set up).
## Worktrees in GRM ## Worktrees in GRM
GRM exposes an opinionated way to use worktrees in your repositories. GRM exposes an opinionated way to use worktrees in your repositories. Opinionated,
Opinionated, because there is a single invariant that makes reasoning about your because there is a single invariant that makes reasoning about your worktree
worktree setup quite easy: setup quite easy:
**The branch inside the worktree is always the same as the directory name of the **The branch inside the worktree is always the same as the directory name of the worktree.**
worktree.**
In other words: If you're checking out branch `mybranch` into a new worktree, In other words: If you're checking out branch `mybranch` into a new worktree, the
the worktree directory will be named `mybranch`. worktree directory will be named `mybranch`.
GRM can be used with both "normal" and worktree-enabled repositories. But note GRM can be used with both "normal" and worktree-enabled repositories. But note
that a single repository can be either the former or the latter. You'll have to that a single repository can be either the former or the latter. You'll have to
@@ -69,27 +67,303 @@ name = "git-repo-manager"
worktree_setup = true worktree_setup = true
``` ```
Now, when you run a `grm sync`, you'll notice that the directory of the Now, when you run a `grm sync`, you'll notice that the directory of the repository
repository is empty! Well, not totally, there is a hidden directory called is empty! Well, not totally, there is a hidden directory called `.git-main-working-tree`.
`.git-main-working-tree`. This is where the repository actually "lives" (it's a This is where the repository actually "lives" (it's a bare checkout).
bare checkout).
Note that there are few specific things you can configure for a certain Note that there are few specific things you can configure for a certain
workspace. This is all done in an optional `grm.toml` file right in the root of workspace. This is all done in an optional `grm.toml` file right in the root
the worktree. More on that later. of the worktree. More on that later.
### Creating a new worktree
To actually work, you'll first have to create a new worktree checkout. All
worktree-related commands are available as subcommands of `grm worktree` (or
`grm wt` for short):
```
$ grm wt add mybranch
[✔] Worktree mybranch created
```
You'll see that there is now a directory called `mybranch` that contains a checkout
of your repository, using the branch `mybranch`
```bash
$ cd ./mybranch && git status
On branch mybranch
nothing to commit, working tree clean
```
You can work in this repository as usual. Make changes, commit them, revert them,
whatever you're up to :)
Just note that you *should* not change the branch inside the worktree
directory. There is nothing preventing you from doing so, but you will notice
that you'll run into problems when trying to remove a worktree (more on that
later). It may also lead to confusing behaviour, as there can be no two
worktrees that have the same branch checked out. So if you decide to use the
worktree setup, go all in, let `grm` manage your branches and bury `git branch`
(and `git checkout -b`).
You will notice that there is no tracking branch set up for the new branch. You
can of course set up one manually after creating the worktree, but there is an
easier way, using the `--track` flag during creation. Let's create another
worktree. Go back to the root of the repository, and run:
```bash
$ grm wt add mybranch2 --track origin/mybranch2
[] Worktree mybranch2 created
```
You'll see that this branch is now tracking `mybranch` on the `origin` remote:
```bash
$ cd ./mybranch2 && git status
On branch mybranch
Your branch is up to date with 'origin/mybranch2'.
nothing to commit, working tree clean
```
The behaviour of `--track` differs depending on the existence of the remote branch:
* If the remote branch already exists, `grm` uses it as the base of the new
local branch.
* If the remote branch does not exist (as in our example), `grm` will create a
new remote tracking branch, using the default branch (either `main` or `master`)
as the base
Often, you'll have a workflow that uses tracking branches by default. It would
be quite tedious to add `--track` every single time. Luckily, the `grm.toml` file
supports defaults for the tracking behaviour. See this for an example:
```toml
[track]
default = true
default_remote = "origin"
```
This will set up a tracking branch on `origin` that has the same name as the local
branch.
Sometimes, you might want to have a certain prefix for all your tracking branches.
Maybe to prevent collissions with other contributors. You can simply set
`default_remote_prefix` in `grm.toml`:
```toml
[track]
default = true
default_remote = "origin"
default_remote_prefix = "myname"
```
When using branch `my-feature-branch`, the remote tracking branch would be
`origin/myname/my-feature-branch` in this case.
Note that `--track` overrides any configuration in `grm.toml`. If you want to
disable tracking, use `--no-track`.
### Showing the status of your worktrees
There is a handy little command that will show your an overview over all worktrees
in a repository, including their status (i.e. changes files). Just run the following
in the root of your repository:
```
$ grm wt status
╭───────────┬────────┬──────────┬──────────────────╮
│ Worktree ┆ Status ┆ Branch ┆ Remote branch │
╞═══════════╪════════╪══════════╪══════════════════╡
│ mybranch ┆ ✔ ┆ mybranch ┆ │
│ mybranch2 ┆ ✔ ┆ mybranch ┆ origin/mybranch2 │
╰───────────┴────────┴──────────┴──────────────────╯
```
The "Status" column would show any uncommitted changes (new / modified / deleted
files) and the "Remote branch" would show differences to the remote branch (e.g.
if there are new pushes to the remote branch that are not yet incorporated into
your local branch).
## Manual access ### Deleting worktrees
GRM isn't doing any magic, it's just git under the hood. If you need to have If you're done with your worktrees, use `grm wt delete` to delete them. Let's
access to the underlying git repository, you can always do this: start with `mybranch2`:
```
$ grm wt delete mybranch2
[✔] Worktree mybranch2 deleted
```
Easy. On to `mybranch`:
```
$ grm wt delete mybranch
[!] Changes in worktree: No remote tracking branch for branch mybranch found. Refusing to delete
```
Hmmm. `grm` tells you:
"Hey, there is no remote branch that you could have pushed
your changes to. I'd rather not delete work that you cannot recover."
Note that `grm` is very cautious here. As your repository will not be deleted,
you could still recover the commits via [`git-reflog`](https://git-scm.com/docs/git-reflog).
But better safe than sorry! Note that you'd get a similar error message if your
worktree had any uncommitted files, for the same reason. Now you can either
commit & push your changes, or your tell `grm` that you know what you're doing:
```
$ grm wt delete mybranch --force
[✔] Worktree mybranch deleted
```
If you just want to delete all worktrees that do not contain any changes, you
can also use the following:
```
$ grm wt clean
```
Note that this will not delete the default branch of the repository. It can of
course still be delete with `grm wt delete` if neccessary.
### Persistent branches
You most likely have a few branches that are "special", that you don't want to
clean up and that are the usual target for feature branches to merge into. GRM
calls them "persistent branches" and treats them a bit differently:
* Their worktrees will never be deleted by `grm wt clean`
* If the branches in other worktrees are merged into them, they will be cleaned
up, even though they may not be in line with their upstream. Same goes for
`grm wt delete`, which will not require a `--force` flag. Note that of
course, actual changes in the worktree will still block an automatic cleanup!
* As soon as you enable persistent branches, non-persistent branches will only
ever be cleaned up when merged into a persistent branch.
To elaborate: This is mostly relevant for a feature-branch workflow. Whenever a
feature branch is merged, it can usually be thrown away. As merging is usually
done on some remote code management platform (GitHub, GitLab, ...), this means
that you usually keep a branch around until it is merged into one of the "main"
branches (`master`, `main`, `develop`, ...)
Enable persistent branches by setting the following in the `grm.toml` in the
worktree root:
```toml
persistent_branches = [
"master",
"develop",
]
```
Note that setting persistent branches will disable any detection of "default"
branches. The first entry will be considered your repositories' default branch.
### Converting an existing repository
It is possible to convert an existing directory to a worktree setup, using `grm
wt convert`. This command has to be run in the root of the repository you want
to convert:
```
$ grm wt convert
[✔] Conversion successful
```
This command will refuse to run if you have any changes in your repository.
Commit them and try again!
Afterwards, the directory is empty, as there are no worktrees checked out yet.
Now you can use the usual commands to set up worktrees.
### Working with remotes
To fetch all remote references from all remotes in a worktree setup, you can
use the following command:
```
$ grm wt fetch
[✔] Fetched from all remotes
```
This is equivalent to running `git fetch --all` in any of the worktrees.
Often, you may want to pull all remote changes into your worktrees. For this,
use the `git pull` equivalent:
```
$ grm wt pull
[✔] master: Done
[✔] my-cool-branch: Done
```
This will refuse when there are local changes, or if the branch cannot be fast
forwarded. If you want to rebase your local branches, use the `--rebase` switch:
```
$ grm wt pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
As noted, this will fail if there are any local changes in your worktree. If you
want to stash these changes automatically before the pull (and unstash them
afterwards), use the `--stash` option.
This will rebase your changes onto the upstream branch. This is mainly helpful
for persistent branches that change on the remote side.
There is a similar rebase feature that rebases onto the **default** branch instead:
```
$ grm wt rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
This is super helpful for feature branches. If you want to incorporate changes
made on the remote branches, use `grm wt rebase` and all your branches will
be up to date. If you want to also update to remote tracking branches in one go,
use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
on non-fast-forwards:
```
$ grm wt rebase --pull --rebase
[✔] master: Done
[✔] my-cool-branch: Done
```
"So, what's the difference between `pull --rebase` and `rebase --pull`? Why the
hell is there a `--rebase` flag in the `rebase` command?"
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree
to their remote branch, if possible. `rebase` rebases onto the **default** branch
instead. The switches to `rebase` are just convenience, so you do not have to
run two commands.
* `rebase --pull` is the same as `pull` && `rebase`
* `rebase --pull --rebase` is the same as `pull --rebase` && `rebase`
I understand that the UX is not the most intuitive. If you can think of an
improvement, please let me know (e.g. via an GitHub issue)!
As with `pull`, `rebase` will also refuse to run when there are changes in your
worktree. And you can also use the `--stash` option to stash/unstash changes
automatically.
### Manual access
GRM isn't doing any magic, it's just git under the hood. If you need to have access
to the underlying git repository, you can always do this:
``` ```
$ git --git-dir ./.git-main-working-tree [...] $ git --git-dir ./.git-main-working-tree [...]
``` ```
This should never be required (whenever you have to do this, you can consider This should never be required (whenever you have to do this, you can consider
this a bug in GRM and open an this a bug in GRM and open an [issue](https://github.com/hakoerber/git-repo-manager/issues/new),
[issue](https://github.com/hakoerber/git-repo-manager/issues/new), but it may but it may help in a pinch.
help in a pinch.

View File

@@ -6,7 +6,3 @@ def pytest_configure(config):
os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com" os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com"
os.environ["GIT_COMMITTER_NAME"] = "Example user" os.environ["GIT_COMMITTER_NAME"] = "Example user"
os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com" os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com"
def pytest_unconfigure(config):
pass

View File

@@ -3,5 +3,5 @@ from flask import Flask
app = Flask(__name__) app = Flask(__name__)
app.url_map.strict_slashes = False app.url_map.strict_slashes = False
import github # noqa: E402,F401 import github
import gitlab # noqa: E402,F401 import gitlab

View File

@@ -1,8 +1,10 @@
import os.path import os.path
import jinja2
from app import app from app import app
from flask import abort, jsonify, make_response, request
from flask import Flask, request, abort, jsonify, make_response
import jinja2
def check_headers(): def check_headers():
@@ -46,7 +48,7 @@ def add_pagination(response, page, last_page):
def read_project_files(namespaces=[]): def read_project_files(namespaces=[]):
last_page = 4 last_page = 4
page = int(request.args.get("page", "1")) page = username = int(request.args.get("page", "1"))
response_file = f"./github_api_page_{page}.json.j2" response_file = f"./github_api_page_{page}.json.j2"
if not os.path.exists(response_file): if not os.path.exists(response_file):
return jsonify([]) return jsonify([])

View File

@@ -1,8 +1,10 @@
import os.path import os.path
import jinja2
from app import app from app import app
from flask import abort, jsonify, make_response, request
from flask import Flask, request, abort, jsonify, make_response
import jinja2
def check_headers(): def check_headers():
@@ -46,7 +48,7 @@ def add_pagination(response, page, last_page):
def read_project_files(namespaces=[]): def read_project_files(namespaces=[]):
last_page = 4 last_page = 4
page = int(request.args.get("page", "1")) page = username = int(request.args.get("page", "1"))
response_file = f"./gitlab_api_page_{page}.json" response_file = f"./gitlab_api_page_{page}.json"
if not os.path.exists(response_file): if not os.path.exists(response_file):
return jsonify([]) return jsonify([])

View File

@@ -1,7 +1,7 @@
FROM docker.io/debian:11.3 FROM docker.io/debian:11.3
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y --no-install-recommends \ && apt-get install -y \
python3-pytest \ python3-pytest \
python3-toml \ python3-toml \
python3-git \ python3-git \

View File

@@ -1,34 +1,20 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import hashlib
import inspect
import os import os
import os.path import os.path
import shutil
import subprocess import subprocess
import tempfile import tempfile
import hashlib
import git import git
binary = os.environ["GRM_BINARY"] binary = os.environ["GRM_BINARY"]
def funcname():
return inspect.stack()[1][3]
def copytree(src, dest):
shutil.copytree(src, dest, dirs_exist_ok=True)
def get_temporary_directory(dir=None):
return tempfile.TemporaryDirectory(dir=dir)
def grm(args, cwd=None, is_invalid=False): def grm(args, cwd=None, is_invalid=False):
cmd = subprocess.run([binary] + args, cwd=cwd, capture_output=True, text=True) cmd = subprocess.run([binary] + args, cwd=cwd, capture_output=True, text=True)
if not is_invalid: if not is_invalid:
assert "usage" not in cmd.stderr.lower() assert "USAGE" not in cmd.stderr
print(f"grmcmd: {args}") print(f"grmcmd: {args}")
print(f"stdout:\n{cmd.stdout}") print(f"stdout:\n{cmd.stdout}")
print(f"stderr:\n{cmd.stderr}") print(f"stderr:\n{cmd.stderr}")
@@ -39,12 +25,8 @@ def grm(args, cwd=None, is_invalid=False):
def shell(script): def shell(script):
script = "set -o errexit\nset -o nounset\nset -o pipefail\n" + script script = "set -o errexit\nset -o nounset\n" + script
cmd = subprocess.run(["bash"], input=script, text=True, capture_output=True) subprocess.run(["bash"], input=script, text=True, check=True)
if cmd.returncode != 0:
print(cmd.stdout)
print(cmd.stderr)
cmd.check_returncode()
def checksum_directory(path): def checksum_directory(path):
@@ -130,203 +112,78 @@ def checksum_directory(path):
class TempGitRepository: class TempGitRepository:
def __init__(self, dir=None): def __init__(self, dir=None):
self.dir = dir self.dir = dir
pass
def __enter__(self): def __enter__(self):
self.tmpdir = get_temporary_directory(self.dir) self.tmpdir = tempfile.TemporaryDirectory(dir=self.dir)
self.remote_1 = get_temporary_directory() self.remote_1_dir = tempfile.TemporaryDirectory()
self.remote_2 = get_temporary_directory() self.remote_2_dir = tempfile.TemporaryDirectory()
cmd = f""" shell(
f"""
cd {self.tmpdir.name} cd {self.tmpdir.name}
git -c init.defaultBranch=master init git init
echo test > root-commit echo test > root-commit
git add root-commit git add root-commit
git commit -m "root-commit" git commit -m "root-commit"
git remote add origin file://{self.remote_1.name} git remote add origin file://{self.remote_1_dir.name}
git remote add otherremote file://{self.remote_2.name} git remote add otherremote file://{self.remote_2_dir.name}
""" """
)
shell(cmd)
return self.tmpdir.name return self.tmpdir.name
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
pass del self.tmpdir
del self.remote_1_dir
del self.remote_2_dir
class TempGitRemote:
obj = {}
def __init__(self, tmpdir, remoteid=None):
self.tmpdir = tmpdir
self.remoteid = remoteid
@classmethod
def get(cls, cachekey=None, initfunc=None):
if cachekey is None:
tmpdir = get_temporary_directory()
shell(
f"""
cd {tmpdir.name}
git -c init.defaultBranch=master init --bare
"""
)
newobj = cls(tmpdir)
remoteid = None
if initfunc is not None:
remoteid = newobj.init(initfunc)
newobj.remoteid = remoteid
return newobj, remoteid
else:
if cachekey not in cls.obj:
tmpdir = get_temporary_directory()
shell(
f"""
cd {tmpdir.name}
git -c init.defaultBranch=master init --bare
"""
)
newobj = cls(tmpdir)
remoteid = newobj.init(initfunc)
newobj.remoteid = remoteid
cls.obj[cachekey] = newobj
return cls.clone(cls.obj[cachekey])
@classmethod
def clone(cls, source):
new_remote = get_temporary_directory()
copytree(source.tmpdir.name, new_remote.name)
return cls(new_remote, source.remoteid), source.remoteid
def init(self, func):
return func(self.tmpdir.name)
def __enter__(self):
return self.tmpdir
def __exit__(self, exc_type, exc_val, exc_tb):
pass
class TempGitRepositoryWorktree: class TempGitRepositoryWorktree:
obj = {} def __init__(self):
pass
def __init__(self, remotes, tmpdir, commit, remote1, remote2, remote1id, remote2id):
self.remotes = remotes
self.tmpdir = tmpdir
self.commit = commit
self.remote1 = remote1
self.remote2 = remote2
self.remote1id = remote1id
self.remote2id = remote2id
@classmethod
def get(cls, cachekey, branch=None, remotes=2, basedir=None, remote_setup=None):
if cachekey not in cls.obj:
tmpdir = get_temporary_directory()
shell(
f"""
cd {tmpdir.name}
git -c init.defaultBranch=master init
echo test > root-commit-in-worktree-1
git add root-commit-in-worktree-1
git commit -m "root-commit-in-worktree-1"
echo test > root-commit-in-worktree-2
git add root-commit-in-worktree-2
git commit -m "root-commit-in-worktree-2"
git ls-files | xargs rm -rf
mv .git .git-main-working-tree
git --git-dir .git-main-working-tree config core.bare true
"""
)
repo = git.Repo(f"{tmpdir.name}/.git-main-working-tree")
commit = repo.head.commit.hexsha
if branch is not None:
repo.create_head(branch)
remote1 = None
remote2 = None
remote1id = None
remote2id = None
if remotes >= 1:
cachekeyremote, initfunc = (remote_setup or ((None, None),))[0]
remote1, remote1id = TempGitRemote.get(
cachekey=cachekeyremote, initfunc=initfunc
)
remote1 = remote1
remote1id = remote1id
shell(
f"""
cd {tmpdir.name}
git --git-dir .git-main-working-tree remote add origin file://{remote1.tmpdir.name}
"""
)
repo.remotes.origin.fetch()
repo.remotes.origin.push("master")
if remotes >= 2:
cachekeyremote, initfunc = (remote_setup or (None, (None, None)))[1]
remote2, remote2id = TempGitRemote.get(
cachekey=cachekeyremote, initfunc=initfunc
)
remote2 = remote2
remote2id = remote2id
shell(
f"""
cd {tmpdir.name}
git --git-dir .git-main-working-tree remote add otherremote file://{remote2.tmpdir.name}
"""
)
repo.remotes.otherremote.fetch()
repo.remotes.otherremote.push("master")
cls.obj[cachekey] = cls(
remotes, tmpdir, commit, remote1, remote2, remote1id, remote2id
)
return cls.clone(cls.obj[cachekey], remote_setup=remote_setup)
@classmethod
def clone(cls, source, remote_setup):
newdir = get_temporary_directory()
copytree(source.tmpdir.name, newdir.name)
remote1 = None
remote2 = None
remote1id = None
remote2id = None
repo = git.Repo(os.path.join(newdir.name, ".git-main-working-tree"))
if source.remotes >= 1:
cachekey, initfunc = (remote_setup or ((None, None),))[0]
remote1, remote1id = TempGitRemote.get(cachekey=cachekey, initfunc=initfunc)
if remote1id != source.remote1id:
repo.remotes.origin.fetch()
repo.remotes.origin.push("master")
if source.remotes >= 2:
cachekey, initfunc = (remote_setup or (None, (None, None)))[1]
remote2, remote2id = TempGitRemote.get(cachekey=cachekey, initfunc=initfunc)
if remote2id != source.remote2id:
repo.remotes.otherremote.fetch()
repo.remotes.otherremote.push("master")
return cls(
source.remotes,
newdir,
source.commit,
remote1,
remote2,
remote1id,
remote2id,
)
def __enter__(self): def __enter__(self):
return (self.tmpdir.name, self.commit) self.tmpdir = tempfile.TemporaryDirectory()
self.remote_1_dir = tempfile.TemporaryDirectory()
self.remote_2_dir = tempfile.TemporaryDirectory()
shell(
f"""
cd {self.remote_1_dir.name}
git init --bare
"""
)
shell(
f"""
cd {self.remote_2_dir.name}
git init --bare
"""
)
shell(
f"""
cd {self.tmpdir.name}
git init
echo test > root-commit-in-worktree-1
git add root-commit-in-worktree-1
git commit -m "root-commit-in-worktree-1"
echo test > root-commit-in-worktree-2
git add root-commit-in-worktree-2
git commit -m "root-commit-in-worktree-2"
git remote add origin file://{self.remote_1_dir.name}
git remote add otherremote file://{self.remote_2_dir.name}
git push origin HEAD:master
git ls-files | xargs rm -rf
mv .git .git-main-working-tree
git --git-dir .git-main-working-tree config core.bare true
"""
)
commit = git.Repo(
f"{self.tmpdir.name}/.git-main-working-tree"
).head.commit.hexsha
return (self.tmpdir.name, commit)
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
pass del self.tmpdir
del self.remote_1_dir
del self.remote_2_dir
class RepoTree: class RepoTree:
@@ -334,7 +191,7 @@ class RepoTree:
pass pass
def __enter__(self): def __enter__(self):
self.root = get_temporary_directory() self.root = tempfile.TemporaryDirectory()
self.config = tempfile.NamedTemporaryFile() self.config = tempfile.NamedTemporaryFile()
with open(self.config.name, "w") as f: with open(self.config.name, "w") as f:
f.write( f.write(
@@ -365,7 +222,7 @@ class EmptyDir:
pass pass
def __enter__(self): def __enter__(self):
self.tmpdir = get_temporary_directory() self.tmpdir = tempfile.TemporaryDirectory()
return self.tmpdir.name return self.tmpdir.name
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
@@ -377,7 +234,7 @@ class NonGitDir:
pass pass
def __enter__(self): def __enter__(self):
self.tmpdir = get_temporary_directory() self.tmpdir = tempfile.TemporaryDirectory()
shell( shell(
f""" f"""
cd {self.tmpdir.name} cd {self.tmpdir.name}
@@ -397,11 +254,11 @@ class TempGitFileRemote:
pass pass
def __enter__(self): def __enter__(self):
self.tmpdir = get_temporary_directory() self.tmpdir = tempfile.TemporaryDirectory()
shell( shell(
f""" f"""
cd {self.tmpdir.name} cd {self.tmpdir.name}
git -c init.defaultBranch=master init git init
echo test > root-commit-in-remote-1 echo test > root-commit-in-remote-1
git add root-commit-in-remote-1 git add root-commit-in-remote-1
git commit -m "root-commit-in-remote-1" git commit -m "root-commit-in-remote-1"

View File

@@ -1,13 +1,13 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from helpers import grm from helpers import *
def test_invalid_command(): def test_invalid_command():
cmd = grm(["whatever"], is_invalid=True) cmd = grm(["whatever"], is_invalid=True)
assert "usage" in cmd.stderr.lower() assert "USAGE" in cmd.stderr
def test_help(): def test_help():
cmd = grm(["--help"]) cmd = grm(["--help"])
assert "usage" in cmd.stdout.lower() assert "USAGE" in cmd.stdout

View File

@@ -1,13 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import re
import tempfile import tempfile
import pytest
import toml import toml
import pytest
import yaml import yaml
from helpers import NonExistentPath, TempGitRepository, grm, shell
from helpers import *
def test_repos_find_nonexistent(): def test_repos_find_nonexistent():
@@ -41,7 +40,7 @@ def test_repos_find_invalid_format():
) )
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert "invalid value 'invalidformat'" in cmd.stderr assert "isn't a valid value" in cmd.stderr
def test_repos_find_non_git_repos(): def test_repos_find_non_git_repos():
@@ -64,10 +63,9 @@ def test_repos_find_non_git_repos():
assert len(cmd.stderr) != 0 assert len(cmd.stderr) != 0
@pytest.mark.parametrize("default_format", [True, False]) @pytest.mark.parametrize("default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"]) @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("exclude", [None, "^.*/repo2$", "^not_matching$"]) def test_repos_find(configtype, default):
def test_repos_find(configtype, exclude, default_format):
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
shell( shell(
f""" f"""
@@ -75,7 +73,7 @@ def test_repos_find(configtype, exclude, default_format):
mkdir repo1 mkdir repo1
( (
cd ./repo1 cd ./repo1
git -c init.defaultBranch=master init git init
echo test > test echo test > test
git add test git add test
git commit -m "commit1" git commit -m "commit1"
@@ -85,7 +83,7 @@ def test_repos_find(configtype, exclude, default_format):
mkdir repo2 mkdir repo2
( (
cd ./repo2 cd ./repo2
git -c init.defaultBranch=master init git init
git checkout -b main git checkout -b main
echo test > test echo test > test
git add test git add test
@@ -101,151 +99,143 @@ def test_repos_find(configtype, exclude, default_format):
) )
args = ["repos", "find", "local", tmpdir] args = ["repos", "find", "local", tmpdir]
if not default_format: if not default:
args += ["--format", configtype]
if exclude:
args += ["--exclude", exclude]
cmd = grm(args)
assert cmd.returncode == 0
if exclude == "^.*/repo2$":
assert re.match(r"^.*\[skipped\] .*\/repo2$", cmd.stderr.lower())
assert "repo2" in cmd.stderr.lower()
else:
assert len(cmd.stderr) == 0
if default_format or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 1
for tree in output["trees"]:
assert set(tree.keys()) == {"root", "repos"}
assert tree["root"] == tmpdir
assert isinstance(tree["repos"], list)
assert len(tree["repos"]) == (1 if exclude == "^.*/repo2$" else 2)
repo1 = [r for r in tree["repos"] if r["name"] == "repo1"][0]
assert repo1["worktree_setup"] is False
assert isinstance(repo1["remotes"], list)
assert len(repo1["remotes"]) == 2
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
assert set(origin.keys()) == {"name", "type", "url"}
assert origin["type"] == "https"
assert origin["url"] == "https://example.com/repo2.git"
someremote = [r for r in repo1["remotes"] if r["name"] == "someremote"][0]
assert set(origin.keys()) == {"name", "type", "url"}
assert someremote["type"] == "ssh"
assert someremote["url"] == "ssh://example.com/repo2.git"
if exclude == "^.*/repo2$":
assert [r for r in tree["repos"] if r["name"] == "repo2"] == []
else:
repo2 = [r for r in tree["repos"] if r["name"] == "repo2"][0]
assert repo2["worktree_setup"] is False
assert isinstance(repo1["remotes"], list)
assert len(repo2["remotes"]) == 1
origin = [r for r in repo2["remotes"] if r["name"] == "origin"][0]
assert set(origin.keys()) == {"name", "type", "url"}
assert origin["type"] == "https"
assert origin["url"] == "https://example.com/repo2.git"
@pytest.mark.parametrize("default_format", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_find_in_root(configtype, default_format):
with TempGitRepository() as repo_dir:
args = ["repos", "find", "local", repo_dir]
if not default_format:
args += ["--format", configtype] args += ["--format", configtype]
cmd = grm(args) cmd = grm(args)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert len(cmd.stderr) == 0 assert len(cmd.stderr) == 0
if default_format or configtype == "toml": if default or configtype == "toml":
output = toml.loads(cmd.stdout) output = toml.loads(cmd.stdout)
elif configtype == "yaml": elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout) output = yaml.safe_load(cmd.stdout)
else: else:
raise NotImplementedError() raise NotImplementedError()
assert isinstance(output, dict) assert isinstance(output, dict)
assert set(output.keys()) == {"trees"} assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list) assert isinstance(output["trees"], list)
assert len(output["trees"]) == 1 assert len(output["trees"]) == 1
for tree in output["trees"]: for tree in output["trees"]:
assert set(tree.keys()) == {"root", "repos"} assert set(tree.keys()) == {"root", "repos"}
assert tree["root"] == os.path.dirname(repo_dir) assert tree["root"] == tmpdir
assert isinstance(tree["repos"], list) assert isinstance(tree["repos"], list)
assert len(tree["repos"]) == 1 assert len(tree["repos"]) == 2
repo1 = [ repo1 = [r for r in tree["repos"] if r["name"] == "repo1"][0]
r for r in tree["repos"] if r["name"] == os.path.basename(repo_dir) assert repo1["worktree_setup"] is False
][0] assert isinstance(repo1["remotes"], list)
assert repo1["worktree_setup"] is False assert len(repo1["remotes"]) == 2
assert isinstance(repo1["remotes"], list)
assert len(repo1["remotes"]) == 2 origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
assert set(origin.keys()) == {"name", "type", "url"}
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0] assert origin["type"] == "https"
assert set(origin.keys()) == {"name", "type", "url"} assert origin["url"] == "https://example.com/repo2.git"
assert origin["type"] == "file"
someremote = [r for r in repo1["remotes"] if r["name"] == "someremote"][0]
someremote = [r for r in repo1["remotes"] if r["name"] == "otherremote"][0] assert set(origin.keys()) == {"name", "type", "url"}
assert set(origin.keys()) == {"name", "type", "url"} assert someremote["type"] == "ssh"
assert someremote["type"] == "file" assert someremote["url"] == "ssh://example.com/repo2.git"
repo2 = [r for r in tree["repos"] if r["name"] == "repo2"][0]
@pytest.mark.parametrize("configtype", ["toml", "yaml"]) assert repo2["worktree_setup"] is False
@pytest.mark.parametrize("default_format", [True, False]) assert isinstance(repo1["remotes"], list)
def test_repos_find_with_invalid_repo(configtype, default_format): assert len(repo2["remotes"]) == 1
with tempfile.TemporaryDirectory() as tmpdir:
shell( origin = [r for r in repo2["remotes"] if r["name"] == "origin"][0]
f""" assert set(origin.keys()) == {"name", "type", "url"}
cd {tmpdir} assert origin["type"] == "https"
mkdir repo1 assert origin["url"] == "https://example.com/repo2.git"
(
cd ./repo1
git -c init.defaultBranch=master init @pytest.mark.parametrize("default", [True, False])
echo test > test @pytest.mark.parametrize("configtype", ["toml", "yaml"])
git add test def test_repos_find_in_root(configtype, default):
git commit -m "commit1" with TempGitRepository() as repo_dir:
git remote add origin https://example.com/repo2.git
git remote add someremote ssh://example.com/repo2.git args = ["repos", "find", "local", repo_dir]
) if not default:
mkdir repo2 args += ["--format", configtype]
( cmd = grm(args)
cd ./repo2 assert cmd.returncode == 0
git -c init.defaultBranch=master init assert len(cmd.stderr) == 0
git checkout -b main
echo test > test if default or configtype == "toml":
git add test output = toml.loads(cmd.stdout)
git commit -m "commit1" elif configtype == "yaml":
git remote add origin https://example.com/repo2.git output = yaml.safe_load(cmd.stdout)
) else:
mkdir broken_repo raise NotImplementedError()
(
cd broken_repo assert isinstance(output, dict)
echo "broken" > .git assert set(output.keys()) == {"trees"}
) assert isinstance(output["trees"], list)
""" assert len(output["trees"]) == 1
) for tree in output["trees"]:
assert set(tree.keys()) == {"root", "repos"}
args = ["repos", "find", "local", tmpdir] assert tree["root"] == os.path.dirname(repo_dir)
if not default_format: assert isinstance(tree["repos"], list)
args += ["--format", configtype] assert len(tree["repos"]) == 1
cmd = grm(args)
assert cmd.returncode == 0 repo1 = [
assert "broken" in cmd.stderr r for r in tree["repos"] if r["name"] == os.path.basename(repo_dir)
][0]
if default_format or configtype == "toml": assert repo1["worktree_setup"] is False
assert isinstance(repo1["remotes"], list)
assert len(repo1["remotes"]) == 2
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
assert set(origin.keys()) == {"name", "type", "url"}
assert origin["type"] == "file"
someremote = [r for r in repo1["remotes"] if r["name"] == "otherremote"][0]
assert set(origin.keys()) == {"name", "type", "url"}
assert someremote["type"] == "file"
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("default", [True, False])
def test_repos_find_with_invalid_repo(configtype, default):
with tempfile.TemporaryDirectory() as tmpdir:
shell(
f"""
cd {tmpdir}
mkdir repo1
(
cd ./repo1
git init
echo test > test
git add test
git commit -m "commit1"
git remote add origin https://example.com/repo2.git
git remote add someremote ssh://example.com/repo2.git
)
mkdir repo2
(
cd ./repo2
git init
git checkout -b main
echo test > test
git add test
git commit -m "commit1"
git remote add origin https://example.com/repo2.git
)
mkdir broken_repo
(
cd broken_repo
echo "broken" > .git
)
"""
)
args = ["repos", "find", "local", tmpdir]
if not default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0
assert "broken" in cmd.stderr
if default or configtype == "toml":
output = toml.loads(cmd.stdout) output = toml.loads(cmd.stdout)
elif configtype == "yaml": elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout) output = yaml.safe_load(cmd.stdout)

View File

@@ -1,13 +1,14 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import re import re
import tempfile import os
import pytest
import toml import toml
import pytest
import yaml import yaml
from helpers import grm
from helpers import *
ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"] ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"]
PROVIDERS = ["github", "gitlab"] PROVIDERS = ["github", "gitlab"]
@@ -43,9 +44,7 @@ def test_repos_find_remote_invalid_provider(use_config):
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
if not use_config: if not use_config:
assert re.match( assert re.match(".*isn't a valid value for.*provider", cmd.stderr)
".*invalid value 'thisproviderdoesnotexist' for.*provider", cmd.stderr
)
@pytest.mark.parametrize("provider", PROVIDERS) @pytest.mark.parametrize("provider", PROVIDERS)
@@ -68,7 +67,7 @@ def test_repos_find_remote_invalid_format(provider):
) )
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert "invalid value 'invalidformat'" in cmd.stderr assert "isn't a valid value" in cmd.stderr
@pytest.mark.parametrize("provider", PROVIDERS) @pytest.mark.parametrize("provider", PROVIDERS)
@@ -167,7 +166,7 @@ def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
cmd = grm(args) cmd = grm(args)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert "did not specify any filters" in cmd.stderr.lower() assert len(cmd.stderr) == 0
if default or configtype == "toml": if default or configtype == "toml":
output = toml.loads(cmd.stdout) output = toml.loads(cmd.stdout)
@@ -249,7 +248,6 @@ def test_repos_find_remote_user_empty(
@pytest.mark.parametrize("force_ssh", [True, False]) @pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False]) @pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False]) @pytest.mark.parametrize("use_config", [True, False])
@pytest.mark.parametrize("override_remote_name", [True, False])
def test_repos_find_remote_user( def test_repos_find_remote_user(
provider, provider,
configtype, configtype,
@@ -260,7 +258,6 @@ def test_repos_find_remote_user(
force_ssh, force_ssh,
use_alternate_endpoint, use_alternate_endpoint,
use_config, use_config,
override_remote_name,
): ):
if use_config: if use_config:
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
@@ -276,9 +273,7 @@ def test_repos_find_remote_user(
if not worktree_default: if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n" cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh: if force_ssh:
cfg += "force_ssh = true\n" cfg += f"force_ssh = true\n"
if override_remote_name:
cfg += 'remote_name = "otherremote"\n'
if use_owner: if use_owner:
cfg += """ cfg += """
[filters] [filters]
@@ -315,8 +310,6 @@ def test_repos_find_remote_user(
args += ["--user", "myuser1"] args += ["--user", "myuser1"]
if force_ssh: if force_ssh:
args += ["--force-ssh"] args += ["--force-ssh"]
if override_remote_name:
args += ["--remote-name", "otherremote"]
if not worktree_default: if not worktree_default:
args += ["--worktree", str(worktree).lower()] args += ["--worktree", str(worktree).lower()]
if use_alternate_endpoint: if use_alternate_endpoint:
@@ -357,10 +350,7 @@ def test_repos_find_remote_user(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
if override_remote_name: assert repo["remotes"][0]["name"] == provider
assert repo["remotes"][0]["name"] == "otherremote"
else:
assert repo["remotes"][0]["name"] == "origin"
if force_ssh or i == 1: if force_ssh or i == 1:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
@@ -476,7 +466,7 @@ def test_repos_find_remote_group(
if not worktree_default: if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n" cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh: if force_ssh:
cfg += "force_ssh = true\n" cfg += f"force_ssh = true\n"
if use_alternate_endpoint: if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """ cfg += """
@@ -545,14 +535,14 @@ def test_repos_find_remote_group(
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
if force_ssh or i == 1: if force_ssh or i == 1:
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git" == f"ssh://git@example.com/mygroup1/myproject{i}.git"
) )
assert repo["remotes"][0]["type"] == "ssh" assert repo["remotes"][0]["type"] == "ssh"
else: else:
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git" == f"https://example.com/mygroup1/myproject{i}.git"
@@ -592,7 +582,7 @@ def test_repos_find_remote_user_and_group(
if not worktree_default: if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n" cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh: if force_ssh:
cfg += "force_ssh = true\n" cfg += f"force_ssh = true\n"
if use_alternate_endpoint: if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """ cfg += """
@@ -669,7 +659,7 @@ def test_repos_find_remote_user_and_group(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1: if force_ssh or i == 1:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
@@ -694,7 +684,7 @@ def test_repos_find_remote_user_and_group(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1: if force_ssh or i == 1:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
@@ -743,7 +733,7 @@ def test_repos_find_remote_owner(
if not worktree_default: if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n" cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh: if force_ssh:
cfg += "force_ssh = true\n" cfg += f"force_ssh = true\n"
if use_alternate_endpoint: if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """ cfg += """
@@ -824,7 +814,7 @@ def test_repos_find_remote_owner(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1: if force_ssh or i == 1:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
@@ -847,7 +837,7 @@ def test_repos_find_remote_owner(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1: if force_ssh or i == 1:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
@@ -871,14 +861,16 @@ def test_repos_find_remote_owner(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh: if force_ssh:
assert ( assert (
repo["remotes"][0]["url"] == "ssh://git@example.com/myuser2/myproject3.git" repo["remotes"][0]["url"] == f"ssh://git@example.com/myuser2/myproject3.git"
) )
assert repo["remotes"][0]["type"] == "ssh" assert repo["remotes"][0]["type"] == "ssh"
else: else:
assert repo["remotes"][0]["url"] == "https://example.com/myuser2/myproject3.git" assert (
repo["remotes"][0]["url"] == f"https://example.com/myuser2/myproject3.git"
)
assert repo["remotes"][0]["type"] == "https" assert repo["remotes"][0]["type"] == "https"
group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][ group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][
@@ -898,7 +890,7 @@ def test_repos_find_remote_owner(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1: if force_ssh or i == 1:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
@@ -918,17 +910,17 @@ def test_repos_find_remote_owner(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh: if force_ssh:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
== "ssh://git@example.com/mygroup1/myproject4.git" == f"ssh://git@example.com/mygroup1/myproject4.git"
) )
assert repo["remotes"][0]["type"] == "ssh" assert repo["remotes"][0]["type"] == "ssh"
else: else:
assert ( assert (
repo["remotes"][0]["url"] repo["remotes"][0]["url"]
== "https://example.com/mygroup1/myproject4.git" == f"https://example.com/mygroup1/myproject4.git"
) )
assert repo["remotes"][0]["type"] == "https" assert repo["remotes"][0]["type"] == "https"
@@ -944,14 +936,15 @@ def test_repos_find_remote_owner(
assert repo["worktree_setup"] is (not worktree_default and worktree) assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list) assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1 assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == "origin" assert repo["remotes"][0]["name"] == provider
if force_ssh: if force_ssh:
assert ( assert (
repo["remotes"][0]["url"] == "ssh://git@example.com/mygroup2/myproject5.git" repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup2/myproject5.git"
) )
assert repo["remotes"][0]["type"] == "ssh" assert repo["remotes"][0]["type"] == "ssh"
else: else:
assert ( assert (
repo["remotes"][0]["url"] == "https://example.com/mygroup2/myproject5.git" repo["remotes"][0]["url"] == f"https://example.com/mygroup2/myproject5.git"
) )
assert repo["remotes"][0]["type"] == "https" assert repo["remotes"][0]["type"] == "https"

View File

@@ -1,6 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from helpers import RepoTree, grm import tempfile
from helpers import *
def test_repos_sync_worktree_clone(): def test_repos_sync_worktree_clone():

View File

@@ -1,21 +1,14 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import re
import subprocess
import tempfile import tempfile
import re
import textwrap import textwrap
import git
import pytest import pytest
from helpers import ( import toml
NonExistentPath, import git
TempGitFileRemote,
TempGitRepository, from helpers import *
checksum_directory,
grm,
shell,
)
templates = { templates = {
"repo_simple": { "repo_simple": {
@@ -298,9 +291,7 @@ def test_repos_sync_unmanaged_repos(configtype):
# this removes the prefix (root) from the path (unmanaged_repo) # this removes the prefix (root) from the path (unmanaged_repo)
unmanaged_repo_name = os.path.relpath(unmanaged_repo, root) unmanaged_repo_name = os.path.relpath(unmanaged_repo, root)
regex = f".*unmanaged.*{unmanaged_repo_name}" regex = f".*unmanaged.*{unmanaged_repo_name}"
assert any( assert any([re.match(regex, l) for l in cmd.stderr.lower().split("\n")])
[re.match(regex, line) for line in cmd.stderr.lower().split("\n")]
)
@pytest.mark.parametrize("configtype", ["toml", "yaml"]) @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@@ -312,7 +303,7 @@ def test_repos_sync_root_is_file(configtype):
cmd = grm(["repos", "sync", "config", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0 assert cmd.returncode != 0
assert "notadirectory" in cmd.stderr.lower() assert "not a directory" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"]) @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@@ -383,7 +374,7 @@ def test_repos_sync_repo_in_subdirectory(configtype):
assert urls[0] == f"file://{remote}" assert urls[0] == f"file://{remote}"
cmd = grm(["repos", "sync", "config", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert "found unmanaged repository" not in cmd.stderr.lower() assert not "found unmanaged repository" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"]) @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@@ -428,7 +419,7 @@ def test_repos_sync_nested_clone(configtype):
cmd = grm(["repos", "sync", "config", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
print(cmd.stdout) print(cmd.stdout)
print(cmd.stderr) print(cmd.stderr)
assert "found unmanaged repository" not in cmd.stderr.lower() assert not "found unmanaged repository" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"]) @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@@ -729,14 +720,14 @@ def test_repos_sync_invalid_syntax(configtype):
with open(config.name, "w") as f: with open(config.name, "w") as f:
if configtype == "toml": if configtype == "toml":
f.write( f.write(
""" f"""
[[trees]] [[trees]]
root = invalid as there are no quotes ;) root = invalid as there are no quotes ;)
""" """
) )
elif configtype == "yaml": elif configtype == "yaml":
f.write( f.write(
""" f"""
trees: trees:
wrong: wrong:
indentation: indentation:
@@ -788,6 +779,8 @@ def test_repos_sync_normal_change_to_worktree(configtype):
cmd = grm(["repos", "sync", "config", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
templates["worktree_repo_with_remote"][configtype].format( templates["worktree_repo_with_remote"][configtype].format(
@@ -817,6 +810,8 @@ def test_repos_sync_worktree_change_to_normal(configtype):
cmd = grm(["repos", "sync", "config", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test")
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
templates["repo_with_remote"][configtype].format( templates["repo_with_remote"][configtype].format(

View File

@@ -1,21 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import pytest import pytest
from helpers import (
NonGitDir, from helpers import *
TempGitRepository,
TempGitRepositoryWorktree,
checksum_directory,
funcname,
grm,
shell,
)
def test_worktree_clean(): def test_worktree_clean():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert "test" in os.listdir(base_dir) assert "test" in os.listdir(base_dir)
@@ -26,7 +17,7 @@ def test_worktree_clean():
def test_worktree_clean_refusal_no_tracking_branch(): def test_worktree_clean_refusal_no_tracking_branch():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir) cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -40,7 +31,7 @@ def test_worktree_clean_refusal_no_tracking_branch():
def test_worktree_clean_refusal_uncommited_changes_new_file(): def test_worktree_clean_refusal_uncommited_changes_new_file():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -56,7 +47,7 @@ def test_worktree_clean_refusal_uncommited_changes_new_file():
def test_worktree_clean_refusal_uncommited_changes_changed_file(): def test_worktree_clean_refusal_uncommited_changes_changed_file():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -72,7 +63,7 @@ def test_worktree_clean_refusal_uncommited_changes_changed_file():
def test_worktree_clean_refusal_uncommited_changes_cleand_file(): def test_worktree_clean_refusal_uncommited_changes_cleand_file():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -90,7 +81,7 @@ def test_worktree_clean_refusal_uncommited_changes_cleand_file():
def test_worktree_clean_refusal_commited_changes(): def test_worktree_clean_refusal_commited_changes():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -108,7 +99,7 @@ def test_worktree_clean_refusal_commited_changes():
def test_worktree_clean_refusal_tracking_branch_mismatch(): def test_worktree_clean_refusal_tracking_branch_mismatch():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir) cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -126,7 +117,7 @@ def test_worktree_clean_refusal_tracking_branch_mismatch():
def test_worktree_clean_fail_from_subdir(): def test_worktree_clean_fail_from_subdir():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir) cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -157,18 +148,18 @@ def test_worktree_clean_non_git():
def test_worktree_clean_configured_default_branch( def test_worktree_clean_configured_default_branch(
configure_default_branch, branch_list_empty configure_default_branch, branch_list_empty
): ):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
if configure_default_branch: if configure_default_branch:
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
if branch_list_empty: if branch_list_empty:
f.write( f.write(
""" f"""
persistent_branches = [] persistent_branches = []
""" """
) )
else: else:
f.write( f.write(
""" f"""
persistent_branches = [ persistent_branches = [
"mybranch" "mybranch"
] ]

View File

@@ -2,12 +2,11 @@
import os.path import os.path
import git from helpers import *
from helpers import TempGitRepositoryWorktree, checksum_directory, funcname, grm, shell
def test_worktree_never_clean_persistent_branches(): def test_worktree_never_clean_persistent_branches():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write( f.write(
""" """
@@ -34,7 +33,7 @@ def test_worktree_never_clean_persistent_branches():
def test_worktree_clean_branch_merged_into_persistent(): def test_worktree_clean_branch_merged_into_persistent():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write( f.write(
""" """
@@ -73,7 +72,7 @@ def test_worktree_clean_branch_merged_into_persistent():
def test_worktree_no_clean_unmerged_branch(): def test_worktree_no_clean_unmerged_branch():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write( f.write(
""" """
@@ -106,7 +105,7 @@ def test_worktree_no_clean_unmerged_branch():
def test_worktree_delete_branch_merged_into_persistent(): def test_worktree_delete_branch_merged_into_persistent():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write( f.write(
""" """

View File

@@ -1,16 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os import tempfile
from helpers import ( from helpers import *
EmptyDir,
NonGitDir,
TempGitRepository,
TempGitRepositoryWorktree,
checksum_directory,
funcname,
grm,
)
def test_convert(): def test_convert():
@@ -31,7 +23,7 @@ def test_convert():
def test_convert_already_worktree(): def test_convert_already_worktree():
with TempGitRepositoryWorktree.get(funcname()) as (git_dir, _commit): with TempGitRepositoryWorktree() as (git_dir, _commit):
before = checksum_directory(git_dir) before = checksum_directory(git_dir)
cmd = grm(["wt", "convert"], cwd=git_dir) cmd = grm(["wt", "convert"], cwd=git_dir)

View File

@@ -1,21 +1,15 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from helpers import *
import re import re
import git
import pytest import pytest
from helpers import ( import git
EmptyDir,
TempGitFileRemote,
TempGitRepositoryWorktree,
funcname,
grm,
shell,
)
def test_worktree_fetch(): def test_worktree_fetch():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit): with TempGitRepositoryWorktree() as (base_dir, root_commit):
with TempGitFileRemote() as (remote_path, _remote_sha): with TempGitFileRemote() as (remote_path, _remote_sha):
shell( shell(
f""" f"""
@@ -62,7 +56,7 @@ def test_worktree_fetch():
@pytest.mark.parametrize("has_changes", [True, False]) @pytest.mark.parametrize("has_changes", [True, False])
@pytest.mark.parametrize("stash", [True, False]) @pytest.mark.parametrize("stash", [True, False])
def test_worktree_pull(rebase, ffable, has_changes, stash): def test_worktree_pull(rebase, ffable, has_changes, stash):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, root_commit): with TempGitRepositoryWorktree() as (base_dir, root_commit):
with TempGitFileRemote() as (remote_path, _remote_sha): with TempGitFileRemote() as (remote_path, _remote_sha):
shell( shell(
f""" f"""

View File

@@ -1,11 +1,11 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os from helpers import *
import re import re
import git
import pytest import pytest
from helpers import TempGitRepositoryWorktree, funcname, grm, shell import git
@pytest.mark.parametrize("pull", [True, False]) @pytest.mark.parametrize("pull", [True, False])
@@ -14,7 +14,7 @@ from helpers import TempGitRepositoryWorktree, funcname, grm, shell
@pytest.mark.parametrize("has_changes", [True, False]) @pytest.mark.parametrize("has_changes", [True, False])
@pytest.mark.parametrize("stash", [True, False]) @pytest.mark.parametrize("stash", [True, False])
def test_worktree_rebase(pull, rebase, ffable, has_changes, stash): def test_worktree_rebase(pull, rebase, ffable, has_changes, stash):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _root_commit): with TempGitRepositoryWorktree() as (base_dir, _root_commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write('persistent_branches = ["mybasebranch"]') f.write('persistent_branches = ["mybasebranch"]')

View File

@@ -1,22 +1,15 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import re import re
from helpers import *
import pytest import pytest
from helpers import (
NonGitDir,
TempGitRepository,
TempGitRepositoryWorktree,
funcname,
grm,
shell,
)
@pytest.mark.parametrize("has_config", [True, False]) @pytest.mark.parametrize("has_config", [True, False])
def test_worktree_status(has_config): def test_worktree_status(has_config):
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
if has_config: if has_config:
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write("") f.write("")
@@ -31,7 +24,7 @@ def test_worktree_status(has_config):
def test_worktree_status_fail_from_subdir(): def test_worktree_status_fail_from_subdir():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir) cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -58,7 +51,7 @@ def test_worktree_status_non_git():
def test_worktree_status_warn_with_non_worktree_dir(): def test_worktree_status_warn_with_non_worktree_dir():
with TempGitRepositoryWorktree.get(funcname()) as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir) cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0

File diff suppressed because it is too large Load Diff

View File

@@ -1,163 +0,0 @@
#!/usr/bin/env bash
set -o nounset
set -o errexit
set -o pipefail
usage() {
printf '%s\n' "usage: $0 (major|minor|patch)" >&2
}
if (($# != 1)); then
usage
exit 1
fi
current_version="$(grep '^version \?=' Cargo.toml | head -1 | cut -d '=' -f 2 | tr -d " '"'"')"
major="$(printf '%s' "${current_version}" | grep -oP '^\d+')"
minor="$(printf '%s' "${current_version}" | grep -oP '\.\d+\.' | tr -d '.')"
patch="$(printf '%s' "${current_version}" | grep -oP '\d+$' | tr -d '.')"
case "$1" in
major)
((major++)) || true
minor=0
patch=0
printf '%s\n' "Are you sure you want to release 1.x?" >&2
exit 1
;;
minor)
((minor++)) || true
patch=0
;;
patch)
((patch++)) || true
;;
*)
usage
exit 1
;;
esac
new_version="${major}.${minor}.${patch}"
if ! [[ "${new_version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
printf '%s\n' 'Version has to a complete semver' >&2
exit 1
fi
current_branch="$(git rev-parse --abbrev-ref HEAD)"
if [[ "${current_branch}" != "develop" ]]; then
printf '%s\n' 'You need to be on develop' >&2
exit 1
fi
gitstatus="$(git status --porcelain)"
if [[ -n "${gitstatus}" ]]; then
printf '%s\n' 'There are uncommitted changes' >&2
exit 1
fi
if git tag --list "v${new_version}" | grep -q .; then
printf 'Tag %s already exists\n' "v${new_version}" >&2
exit 1
fi
for remote in $(git remote); do
if git ls-remote --tags "${remote}" | grep -q "refs/tags/v${new_version}$"; then
printf 'Tag %s already exists on %s\n' "v${new_version}" "${remote}" >&2
exit 1
fi
done
git fetch --all
for remote in $(git remote); do
for branch in master develop; do
if ! git diff --quiet "${remote}/${branch}..${branch}"; then
printf 'Remote branch %s/%s not up to date, synchronize first!\n' "${remote}" "${branch}" >&2
exit 1
fi
done
done
if ! git merge-base --is-ancestor master develop; then
printf '%s\n' 'Develop is not a straight descendant of master, rebase!' >&2
exit 1
fi
changes="$(git log --oneline master..develop | wc -l)"
if ((changes == 0)); then
printf '%s\n' 'No changes between master and develop?' >&2
exit 1
fi
sed -i "0,/^version/{s/^version.*$/version = \"${new_version}\"/}" Cargo.toml
cargo update --package git-repo-manager --precise "${new_version}"
diff="$(git diff --numstat)"
if (($(printf '%s\n' "${diff}" | wc -l || true) != 2)); then
printf '%s\n' 'Weird changes detected, bailing' >&2
exit 1
fi
if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.lock$'; then
printf '%s\n' 'Weird changes detected, bailing' >&2
exit 1
fi
if ! printf '%s\n' "${diff}" | grep -Pq '^1\s+1\s+Cargo.toml$'; then
printf '%s\n' 'Weird changes detected, bailing' >&2
exit 1
fi
git add Cargo.lock Cargo.toml
git commit -m "Release v${new_version}"
git switch master 2>/dev/null || { [[ -d "../master" ]] && cd "../master"; } || {
printf '%s\n' 'Could not change to master' >&2
exit 1
}
current_branch="$(git rev-parse --abbrev-ref HEAD)"
if [[ "${current_branch}" != "master" ]]; then
printf '%s\n' 'Looks like branch switching to master did not work' >&2
exit 1
fi
git merge --no-ff --no-edit develop
git tag "v${new_version}"
for remote in $(git remote); do
while ! git push "${remote}" "v${new_version}" master; do
:
done
done
git switch develop 2>/dev/null || { [[ -d "../develop" ]] && cd "../develop"; } || {
printf '%s\n' 'Could not change to develop' >&2
exit 1
}
current_branch="$(git rev-parse --abbrev-ref HEAD)"
if [[ "${current_branch}" != "develop" ]]; then
printf '%s\n' 'Looks like branch switching to develop did not work' >&2
exit 1
fi
git merge --ff-only master
for remote in $(git remote); do
while ! git push "${remote}" develop; do
:
done
done
cargo publish
printf 'Published %s successfully\n' "${new_version}"
exit 0

View File

@@ -1,3 +0,0 @@
[toolchain]
channel = "stable"
targets = ["x86_64-unknown-linux-musl"]

View File

@@ -33,7 +33,6 @@ pub struct ConfigTrees {
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ConfigProviderFilter { pub struct ConfigProviderFilter {
pub access: Option<bool>, pub access: Option<bool>,
pub owner: Option<bool>, pub owner: Option<bool>,
@@ -42,7 +41,6 @@ pub struct ConfigProviderFilter {
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ConfigProvider { pub struct ConfigProvider {
pub provider: RemoteProvider, pub provider: RemoteProvider,
pub token_command: String, pub token_command: String,
@@ -54,8 +52,7 @@ pub struct ConfigProvider {
pub api_url: Option<String>, pub api_url: Option<String>,
pub worktree: Option<bool>, pub worktree: Option<bool>,
pub init_worktree: Option<bool>,
pub remote_name: Option<String>,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@@ -183,12 +180,6 @@ impl Config {
filters.access.unwrap_or(false), filters.access.unwrap_or(false),
); );
if filter.empty() {
print_warning(
"The configuration does not contain any filters, so no repos will match",
);
}
let repos = match config.provider { let repos = match config.provider {
RemoteProvider::Github => { RemoteProvider::Github => {
match provider::Github::new(filter, token, config.api_url) { match provider::Github::new(filter, token, config.api_url) {
@@ -201,7 +192,6 @@ impl Config {
.get_repos( .get_repos(
config.worktree.unwrap_or(false), config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false), config.force_ssh.unwrap_or(false),
config.remote_name,
)? )?
} }
RemoteProvider::Gitlab => { RemoteProvider::Gitlab => {
@@ -215,7 +205,6 @@ impl Config {
.get_repos( .get_repos(
config.worktree.unwrap_or(false), config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false), config.force_ssh.unwrap_or(false),
config.remote_name,
)? )?
} }
}; };
@@ -248,7 +237,7 @@ impl Config {
pub fn normalize(&mut self) { pub fn normalize(&mut self) {
if let Config::ConfigTrees(config) = self { if let Config::ConfigTrees(config) = self {
let home = path::env_home(); let home = path::env_home().display().to_string();
for tree in &mut config.trees_mut().iter_mut() { for tree in &mut config.trees_mut().iter_mut() {
if tree.root.starts_with(&home) { if tree.root.starts_with(&home) {
// The tilde is not handled differently, it's just a normal path component for `Path`. // The tilde is not handled differently, it's just a normal path component for `Path`.
@@ -306,7 +295,7 @@ pub fn read_config<'a, T>(path: &str) -> Result<T, String>
where where
T: for<'de> serde::Deserialize<'de>, T: for<'de> serde::Deserialize<'de>,
{ {
let content = match std::fs::read_to_string(path) { let content = match std::fs::read_to_string(&path) {
Ok(s) => s, Ok(s) => s,
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(

View File

@@ -1,4 +1,4 @@
use clap::Parser; use clap::{AppSettings, Parser};
#[derive(Parser)] #[derive(Parser)]
#[clap( #[clap(
@@ -7,6 +7,7 @@ use clap::Parser;
author = clap::crate_authors!("\n"), author = clap::crate_authors!("\n"),
about = clap::crate_description!(), about = clap::crate_description!(),
long_version = clap::crate_version!(), long_version = clap::crate_version!(),
global_setting(AppSettings::DeriveDisplayOrder),
propagate_version = true, propagate_version = true,
)] )]
pub struct Opts { pub struct Opts {
@@ -64,15 +65,7 @@ pub struct FindLocalArgs {
pub path: String, pub path: String,
#[clap( #[clap(
short, arg_enum,
long,
help = "Exclude repositories that match the given regex",
name = "REGEX"
)]
pub exclude: Option<String>,
#[clap(
value_enum,
short, short,
long, long,
help = "Format to produce", help = "Format to produce",
@@ -92,7 +85,7 @@ pub struct FindConfigArgs {
pub config: String, pub config: String,
#[clap( #[clap(
value_enum, arg_enum,
short, short,
long, long,
help = "Format to produce", help = "Format to produce",
@@ -107,14 +100,11 @@ pub struct FindRemoteArgs {
#[clap(short, long, help = "Path to the configuration file")] #[clap(short, long, help = "Path to the configuration file")]
pub config: Option<String>, pub config: Option<String>,
#[clap(value_enum, short, long, help = "Remote provider to use")] #[clap(arg_enum, short, long, help = "Remote provider to use")]
pub provider: RemoteProvider, pub provider: RemoteProvider,
#[clap(short, long, help = "Name of the remote to use")]
pub remote_name: Option<String>,
#[clap( #[clap(
action = clap::ArgAction::Append, multiple_occurrences = true,
name = "user", name = "user",
long, long,
help = "Users to get repositories from" help = "Users to get repositories from"
@@ -122,7 +112,7 @@ pub struct FindRemoteArgs {
pub users: Vec<String>, pub users: Vec<String>,
#[clap( #[clap(
action = clap::ArgAction::Append, multiple_occurrences = true,
name = "group", name = "group",
long, long,
help = "Groups to get repositories from" help = "Groups to get repositories from"
@@ -145,7 +135,7 @@ pub struct FindRemoteArgs {
pub root: String, pub root: String,
#[clap( #[clap(
value_enum, arg_enum,
short, short,
long, long,
help = "Format to produce", help = "Format to produce",
@@ -156,10 +146,11 @@ pub struct FindRemoteArgs {
#[clap( #[clap(
long, long,
help = "Use worktree setup for repositories", help = "Use worktree setup for repositories",
value_parser = ["true", "false"], possible_values = &["true", "false"],
default_value = "false", default_value = "false",
default_missing_value = "true", default_missing_value = "true",
num_args = 0..=1, min_values = 0,
max_values = 1,
)] )]
pub worktree: String, pub worktree: String,
@@ -180,11 +171,12 @@ pub struct Config {
#[clap( #[clap(
long, long,
value_parser = ["true", "false"],
help = "Check out the default worktree after clone", help = "Check out the default worktree after clone",
possible_values = &["true", "false"],
default_value = "true", default_value = "true",
default_missing_value = "true", default_missing_value = "true",
num_args = 0..=1, min_values = 0,
max_values = 1,
)] )]
pub init_worktree: String, pub init_worktree: String,
} }
@@ -194,14 +186,11 @@ pub type RemoteProvider = super::provider::RemoteProvider;
#[derive(Parser)] #[derive(Parser)]
#[clap()] #[clap()]
pub struct SyncRemoteArgs { pub struct SyncRemoteArgs {
#[clap(value_enum, short, long, help = "Remote provider to use")] #[clap(arg_enum, short, long, help = "Remote provider to use")]
pub provider: RemoteProvider, pub provider: RemoteProvider,
#[clap(short, long, help = "Name of the remote to use")]
pub remote_name: Option<String>,
#[clap( #[clap(
action = clap::ArgAction::Append, multiple_occurrences = true,
name = "user", name = "user",
long, long,
help = "Users to get repositories from" help = "Users to get repositories from"
@@ -209,7 +198,7 @@ pub struct SyncRemoteArgs {
pub users: Vec<String>, pub users: Vec<String>,
#[clap( #[clap(
action = clap::ArgAction::Append, multiple_occurrences = true,
name = "group", name = "group",
long, long,
help = "Groups to get repositories from" help = "Groups to get repositories from"
@@ -234,10 +223,11 @@ pub struct SyncRemoteArgs {
#[clap( #[clap(
long, long,
help = "Use worktree setup for repositories", help = "Use worktree setup for repositories",
value_parser = ["true", "false"], possible_values = &["true", "false"],
default_value = "false", default_value = "false",
default_missing_value = "true", default_missing_value = "true",
num_args = 0..=1, min_values = 0,
max_values = 1,
)] )]
pub worktree: String, pub worktree: String,
@@ -247,10 +237,11 @@ pub struct SyncRemoteArgs {
#[clap( #[clap(
long, long,
help = "Check out the default worktree after clone", help = "Check out the default worktree after clone",
value_parser = ["true", "false"], possible_values = &["true", "false"],
default_value = "true", default_value = "true",
default_missing_value = "true", default_missing_value = "true",
num_args = 0..=1, min_values = 0,
max_values = 1,
)] )]
pub init_worktree: String, pub init_worktree: String,
} }
@@ -262,7 +253,7 @@ pub struct OptionalConfig {
pub config: Option<String>, pub config: Option<String>,
} }
#[derive(clap::ValueEnum, Clone)] #[derive(clap::ArgEnum, Clone)]
pub enum ConfigFormat { pub enum ConfigFormat {
Yaml, Yaml,
Toml, Toml,
@@ -302,7 +293,7 @@ pub struct WorktreeAddArgs {
#[clap(short = 't', long = "track", help = "Remote branch to track")] #[clap(short = 't', long = "track", help = "Remote branch to track")]
pub track: Option<String>, pub track: Option<String>,
#[clap(long = "no-track", help = "Disable tracking")] #[clap(long = "--no-track", help = "Disable tracking")]
pub no_track: bool, pub no_track: bool,
} }
#[derive(Parser)] #[derive(Parser)]
@@ -331,19 +322,22 @@ pub struct WorktreeFetchArgs {}
#[derive(Parser)] #[derive(Parser)]
pub struct WorktreePullArgs { pub struct WorktreePullArgs {
#[clap(long = "rebase", help = "Perform a rebase instead of a fast-forward")] #[clap(long = "--rebase", help = "Perform a rebase instead of a fast-forward")]
pub rebase: bool, pub rebase: bool,
#[clap(long = "stash", help = "Stash & unstash changes before & after pull")] #[clap(long = "--stash", help = "Stash & unstash changes before & after pull")]
pub stash: bool, pub stash: bool,
} }
#[derive(Parser)] #[derive(Parser)]
pub struct WorktreeRebaseArgs { pub struct WorktreeRebaseArgs {
#[clap(long = "pull", help = "Perform a pull before rebasing")] #[clap(long = "--pull", help = "Perform a pull before rebasing")]
pub pull: bool, pub pull: bool,
#[clap(long = "rebase", help = "Perform a rebase when doing a pull")] #[clap(long = "--rebase", help = "Perform a rebase when doing a pull")]
pub rebase: bool, pub rebase: bool,
#[clap(long = "stash", help = "Stash & unstash changes before & after rebase")] #[clap(
long = "--stash",
help = "Stash & unstash changes before & after rebase"
)]
pub stash: bool, pub stash: bool,
} }

View File

@@ -1,5 +1,3 @@
#![forbid(unsafe_code)]
use std::path::Path; use std::path::Path;
use std::process; use std::process;
@@ -38,7 +36,7 @@ fn main() {
} }
} }
Err(error) => { Err(error) => {
print_error(&format!("Sync error: {}", error)); print_error(&format!("Error syncing trees: {}", error));
process::exit(1); process::exit(1);
} }
} }
@@ -55,10 +53,6 @@ fn main() {
let filter = let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access); provider::Filter::new(args.users, args.groups, args.owner, args.access);
if filter.empty() {
print_warning("You did not specify any filters, so no repos will match");
}
let worktree = args.worktree == "true"; let worktree = args.worktree == "true";
let repos = match args.provider { let repos = match args.provider {
@@ -66,29 +60,21 @@ fn main() {
match provider::Github::new(filter, token, args.api_url) { match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider, Ok(provider) => provider,
Err(error) => { Err(error) => {
print_error(&format!("Sync error: {}", error)); print_error(&format!("Error: {}", error));
process::exit(1); process::exit(1);
} }
} }
.get_repos( .get_repos(worktree, args.force_ssh)
worktree,
args.force_ssh,
args.remote_name,
)
} }
cmd::RemoteProvider::Gitlab => { cmd::RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, args.api_url) { match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider, Ok(provider) => provider,
Err(error) => { Err(error) => {
print_error(&format!("Sync error: {}", error)); print_error(&format!("Error: {}", error));
process::exit(1); process::exit(1);
} }
} }
.get_repos( .get_repos(worktree, args.force_ssh)
worktree,
args.force_ssh,
args.remote_name,
)
} }
}; };
@@ -116,13 +102,13 @@ fn main() {
} }
} }
Err(error) => { Err(error) => {
print_error(&format!("Sync error: {}", error)); print_error(&format!("Error syncing trees: {}", error));
process::exit(1); process::exit(1);
} }
} }
} }
Err(error) => { Err(error) => {
print_error(&format!("Sync error: {}", error)); print_error(&format!("Error: {}", error));
process::exit(1); process::exit(1);
} }
} }
@@ -199,8 +185,7 @@ fn main() {
} }
}; };
let (found_repos, warnings) = match find_in_tree(&path, args.exclude.as_deref()) let (found_repos, warnings) = match find_in_tree(&path) {
{
Ok((repos, warnings)) => (repos, warnings), Ok((repos, warnings)) => (repos, warnings),
Err(error) => { Err(error) => {
print_error(&error); print_error(&error);
@@ -283,10 +268,6 @@ fn main() {
filters.access.unwrap_or(false), filters.access.unwrap_or(false),
); );
if filter.empty() {
print_warning("You did not specify any filters, so no repos will match");
}
let repos = match config.provider { let repos = match config.provider {
provider::RemoteProvider::Github => { provider::RemoteProvider::Github => {
match match provider::Github::new(filter, token, config.api_url) { match match provider::Github::new(filter, token, config.api_url) {
@@ -299,7 +280,6 @@ fn main() {
.get_repos( .get_repos(
config.worktree.unwrap_or(false), config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false), config.force_ssh.unwrap_or(false),
config.remote_name,
) { ) {
Ok(provider) => provider, Ok(provider) => provider,
Err(error) => { Err(error) => {
@@ -319,7 +299,6 @@ fn main() {
.get_repos( .get_repos(
config.worktree.unwrap_or(false), config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false), config.force_ssh.unwrap_or(false),
config.remote_name,
) { ) {
Ok(provider) => provider, Ok(provider) => provider,
Err(error) => { Err(error) => {
@@ -392,10 +371,6 @@ fn main() {
let filter = let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access); provider::Filter::new(args.users, args.groups, args.owner, args.access);
if filter.empty() {
print_warning("You did not specify any filters, so no repos will match");
}
let worktree = args.worktree == "true"; let worktree = args.worktree == "true";
let repos = match args.provider { let repos = match args.provider {
@@ -407,11 +382,7 @@ fn main() {
process::exit(1); process::exit(1);
} }
} }
.get_repos( .get_repos(worktree, args.force_ssh)
worktree,
args.force_ssh,
args.remote_name,
)
} }
cmd::RemoteProvider::Gitlab => { cmd::RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, args.api_url) { match provider::Gitlab::new(filter, token, args.api_url) {
@@ -421,11 +392,7 @@ fn main() {
process::exit(1); process::exit(1);
} }
} }
.get_repos( .get_repos(worktree, args.force_ssh)
worktree,
args.force_ssh,
args.remote_name,
)
} }
}; };
@@ -496,9 +463,6 @@ fn main() {
match args.action { match args.action {
cmd::WorktreeAction::Add(action_args) => { cmd::WorktreeAction::Add(action_args) => {
if action_args.track.is_some() && action_args.no_track {
print_warning("You are using --track and --no-track at the same time. --track will be ignored");
}
let track = match &action_args.track { let track = match &action_args.track {
Some(branch) => { Some(branch) => {
let split = branch.split_once('/'); let split = branch.split_once('/');
@@ -520,20 +484,29 @@ fn main() {
None => None, None => None,
}; };
let mut name: &str = &action_args.name;
let subdirectory;
let split = name.split_once('/');
match split {
None => subdirectory = None,
Some(split) => {
if split.0.is_empty() || split.1.is_empty() {
print_error("Worktree name cannot start or end with a slash");
process::exit(1);
} else {
(subdirectory, name) = (Some(Path::new(split.0)), split.1);
}
}
}
match worktree::add_worktree( match worktree::add_worktree(
&cwd, &cwd,
&action_args.name, name,
subdirectory,
track, track,
action_args.no_track, action_args.no_track,
) { ) {
Ok(warnings) => { Ok(_) => print_success(&format!("Worktree {} created", &action_args.name)),
if let Some(warnings) = warnings {
for warning in warnings {
print_warning(&warning);
}
}
print_success(&format!("Worktree {} created", &action_args.name));
}
Err(error) => { Err(error) => {
print_error(&format!("Error creating worktree: {}", error)); print_error(&format!("Error creating worktree: {}", error));
process::exit(1); process::exit(1);
@@ -541,6 +514,8 @@ fn main() {
} }
} }
cmd::WorktreeAction::Delete(action_args) => { cmd::WorktreeAction::Delete(action_args) => {
let worktree_dir = cwd.join(&action_args.name);
let worktree_config = match repo::read_worktree_root_config(&cwd) { let worktree_config = match repo::read_worktree_root_config(&cwd) {
Ok(config) => config, Ok(config) => config,
Err(error) => { Err(error) => {
@@ -558,9 +533,8 @@ fn main() {
}); });
match repo.remove_worktree( match repo.remove_worktree(
&cwd,
&action_args.name, &action_args.name,
Path::new(&action_args.name), &worktree_dir,
action_args.force, action_args.force,
&worktree_config, &worktree_config,
) { ) {

View File

@@ -1,4 +1,5 @@
#![forbid(unsafe_code)] #![feature(io_error_more)]
#![feature(const_option_ext)]
use std::path::Path; use std::path::Path;
@@ -12,27 +13,19 @@ pub mod table;
pub mod tree; pub mod tree;
pub mod worktree; pub mod worktree;
const BRANCH_NAMESPACE_SEPARATOR: &str = "/";
/// Find all git repositories under root, recursively /// Find all git repositories under root, recursively
/// ///
/// The bool in the return value specifies whether there is a repository /// The bool in the return value specifies whether there is a repository
/// in root itself. /// in root itself.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn find_repos( fn find_repos(root: &Path) -> Result<Option<(Vec<repo::Repo>, Vec<String>, bool)>, String> {
root: &Path,
exclusion_pattern: Option<&str>,
) -> Result<Option<(Vec<repo::Repo>, Vec<String>, bool)>, String> {
let mut repos: Vec<repo::Repo> = Vec::new(); let mut repos: Vec<repo::Repo> = Vec::new();
let mut repo_in_root = false; let mut repo_in_root = false;
let mut warnings = Vec::new(); let mut warnings = Vec::new();
let exlusion_regex: regex::Regex = regex::Regex::new(exclusion_pattern.unwrap_or(r"^$"))
.map_err(|e| format!("invalid regex: {e}"))?;
for path in tree::find_repo_paths(root)? { for path in tree::find_repo_paths(root)? {
if exclusion_pattern.is_some() && exlusion_regex.is_match(&path::path_as_string(&path)) {
warnings.push(format!("[skipped] {}", &path::path_as_string(&path)));
continue;
}
let is_worktree = repo::RepoHandle::detect_worktree(&path); let is_worktree = repo::RepoHandle::detect_worktree(&path);
if path == root { if path == root {
repo_in_root = true; repo_in_root = true;
@@ -71,13 +64,12 @@ fn find_repos(
let name = remote.name(); let name = remote.name();
let url = remote.url(); let url = remote.url();
let remote_type = match repo::detect_remote_type(&url) { let remote_type = match repo::detect_remote_type(&url) {
Ok(t) => t, Some(t) => t,
Err(e) => { None => {
warnings.push(format!( warnings.push(format!(
"{}: Could not handle URL {}. Reason: {}", "{}: Could not detect remote type of \"{}\"",
&path::path_as_string(&path), &path::path_as_string(&path),
&url, &url
e
)); ));
continue; continue;
} }
@@ -115,7 +107,7 @@ fn find_repos(
}, },
) )
} else { } else {
let name = path.strip_prefix(root).unwrap(); let name = path.strip_prefix(&root).unwrap();
let namespace = name.parent().unwrap(); let namespace = name.parent().unwrap();
( (
if namespace != Path::new("") { if namespace != Path::new("") {
@@ -139,14 +131,10 @@ fn find_repos(
Ok(Some((repos, warnings, repo_in_root))) Ok(Some((repos, warnings, repo_in_root)))
} }
pub fn find_in_tree( pub fn find_in_tree(path: &Path) -> Result<(tree::Tree, Vec<String>), String> {
path: &Path,
exclusion_pattern: Option<&str>,
) -> Result<(tree::Tree, Vec<String>), String> {
let mut warnings = Vec::new(); let mut warnings = Vec::new();
let (repos, repo_in_root): (Vec<repo::Repo>, bool) = match find_repos(path, exclusion_pattern)? let (repos, repo_in_root): (Vec<repo::Repo>, bool) = match find_repos(path)? {
{
Some((vec, mut repo_warnings, repo_in_root)) => { Some((vec, mut repo_warnings, repo_in_root)) => {
warnings.append(&mut repo_warnings); warnings.append(&mut repo_warnings);
(vec, repo_in_root) (vec, repo_in_root)

View File

@@ -47,9 +47,9 @@ pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap() path.to_path_buf().into_os_string().into_string().unwrap()
} }
pub fn env_home() -> String { pub fn env_home() -> PathBuf {
match std::env::var("HOME") { match std::env::var("HOME") {
Ok(path) => path, Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => { Err(e) => {
print_error(&format!("Unable to read HOME: {}", e)); print_error(&format!("Unable to read HOME: {}", e));
process::exit(1); process::exit(1);
@@ -58,12 +58,16 @@ pub fn env_home() -> String {
} }
pub fn expand_path(path: &Path) -> PathBuf { pub fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context( let expanded_path = match shellexpand::full_with_context(
&path_as_string(path), &path_as_string(path),
|| Some(env_home()), home_dir,
|name| -> Result<Option<String>, &'static str> { |name| -> Result<Option<String>, &'static str> {
match name { match name {
"HOME" => Ok(Some(env_home())), "HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None), _ => Ok(None),
} }
}, },

View File

@@ -8,11 +8,10 @@ use super::JsonError;
use super::Project; use super::Project;
use super::Provider; use super::Provider;
const PROVIDER_NAME: &str = "github";
const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json"; const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json";
const GITHUB_API_BASEURL: &str = match option_env!("GITHUB_API_BASEURL") { const GITHUB_API_BASEURL: &str =
Some(url) => url, option_env!("GITHUB_API_BASEURL").unwrap_or("https://api.github.com");
None => "https://api.github.com",
};
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct GithubProject { pub struct GithubProject {
@@ -89,6 +88,10 @@ impl Provider for Github {
}) })
} }
fn name(&self) -> &str {
PROVIDER_NAME
}
fn filter(&self) -> &Filter { fn filter(&self) -> &Filter {
&self.filter &self.filter
} }
@@ -133,8 +136,8 @@ impl Provider for Github {
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> { fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> {
Ok(super::call::<GithubUser, GithubApiErrorResponse>( Ok(super::call::<GithubUser, GithubApiErrorResponse>(
&format!("{GITHUB_API_BASEURL}/user"), &format!("{GITHUB_API_BASEURL}/user"),
Self::auth_header_key(), &Self::auth_header_key(),
self.secret_token(), &self.secret_token(),
Some(ACCEPT_HEADER_JSON), Some(ACCEPT_HEADER_JSON),
)? )?
.username) .username)

View File

@@ -8,11 +8,9 @@ use super::JsonError;
use super::Project; use super::Project;
use super::Provider; use super::Provider;
const PROVIDER_NAME: &str = "gitlab";
const ACCEPT_HEADER_JSON: &str = "application/json"; const ACCEPT_HEADER_JSON: &str = "application/json";
const GITLAB_API_BASEURL: &str = match option_env!("GITLAB_API_BASEURL") { const GITLAB_API_BASEURL: &str = option_env!("GITLAB_API_BASEURL").unwrap_or("https://gitlab.com");
Some(url) => url,
None => "https://gitlab.com",
};
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
@@ -107,6 +105,10 @@ impl Provider for Gitlab {
}) })
} }
fn name(&self) -> &str {
PROVIDER_NAME
}
fn filter(&self) -> &Filter { fn filter(&self) -> &Filter {
&self.filter &self.filter
} }
@@ -155,8 +157,8 @@ impl Provider for Gitlab {
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> { fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> {
Ok(super::call::<GitlabUser, GitlabApiErrorResponse>( Ok(super::call::<GitlabUser, GitlabApiErrorResponse>(
&format!("{}/api/v4/user", self.api_url()), &format!("{}/api/v4/user", self.api_url()),
Self::auth_header_key(), &Self::auth_header_key(),
self.secret_token(), &self.secret_token(),
Some(ACCEPT_HEADER_JSON), Some(ACCEPT_HEADER_JSON),
)? )?
.username) .username)

View File

@@ -14,9 +14,7 @@ use super::repo;
use std::collections::HashMap; use std::collections::HashMap;
const DEFAULT_REMOTE_NAME: &str = "origin"; #[derive(Debug, Deserialize, Serialize, clap::ArgEnum, Clone)]
#[derive(Debug, Deserialize, Serialize, clap::ValueEnum, Clone)]
pub enum RemoteProvider { pub enum RemoteProvider {
#[serde(alias = "github", alias = "GitHub")] #[serde(alias = "github", alias = "GitHub")]
Github, Github,
@@ -89,10 +87,6 @@ impl Filter {
access, access,
} }
} }
pub fn empty(&self) -> bool {
self.users.is_empty() && self.groups.is_empty() && !self.owner && !self.access
}
} }
pub enum ApiErrorResponse<T> pub enum ApiErrorResponse<T>
@@ -128,6 +122,7 @@ pub trait Provider {
where where
Self: Sized; Self: Sized;
fn name(&self) -> &str;
fn filter(&self) -> &Filter; fn filter(&self) -> &Filter;
fn secret_token(&self) -> &auth::AuthToken; fn secret_token(&self) -> &auth::AuthToken;
fn auth_header_key() -> &'static str; fn auth_header_key() -> &'static str;
@@ -218,7 +213,6 @@ pub trait Provider {
&self, &self,
worktree_setup: bool, worktree_setup: bool,
force_ssh: bool, force_ssh: bool,
remote_name: Option<String>,
) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> { ) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> {
let mut repos = vec![]; let mut repos = vec![];
@@ -274,16 +268,12 @@ pub trait Provider {
} }
for group in &self.filter().groups { for group in &self.filter().groups {
let group_projects = self.get_group_projects(group).map_err(|error| { let group_projects = self
format!( .get_group_projects(group)
"group \"{}\": {}", .map_err(|error| match error {
group, ApiErrorResponse::Json(x) => x.to_string(),
match error { ApiErrorResponse::String(s) => s,
ApiErrorResponse::Json(x) => x.to_string(), })?;
ApiErrorResponse::String(s) => s,
}
)
})?;
for group_project in group_projects { for group_project in group_projects {
let mut already_present = false; let mut already_present = false;
for repo in &repos { for repo in &repos {
@@ -302,18 +292,16 @@ pub trait Provider {
let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new(); let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new();
let remote_name = remote_name.unwrap_or_else(|| DEFAULT_REMOTE_NAME.to_string());
for repo in repos { for repo in repos {
let namespace = repo.namespace(); let namespace = repo.namespace();
let mut repo = repo.into_repo_config(&remote_name, worktree_setup, force_ssh); let mut repo = repo.into_repo_config(&self.name(), worktree_setup, force_ssh);
// Namespace is already part of the hashmap key. I'm not too happy // Namespace is already part of the hashmap key. I'm not too happy
// about the data exchange format here. // about the data exchange format here.
repo.remove_namespace(); repo.remove_namespace();
ret.entry(namespace).or_default().push(repo); ret.entry(namespace).or_insert(vec![]).push(repo);
} }
Ok(ret) Ok(ret)

View File

@@ -233,7 +233,7 @@ impl Worktree {
let operation = operation.map_err(convert_libgit2_error)?; let operation = operation.map_err(convert_libgit2_error)?;
// This is required to preserve the commiter of the rebased // This is required to preserve the commiter of the rebased
// commits, which is the expected behavior. // commits, which is the expected behaviour.
let rebased_commit = repo let rebased_commit = repo
.0 .0
.find_commit(operation.id()) .find_commit(operation.id())
@@ -357,7 +357,7 @@ impl Worktree {
let operation = operation.map_err(convert_libgit2_error)?; let operation = operation.map_err(convert_libgit2_error)?;
// This is required to preserve the commiter of the rebased // This is required to preserve the commiter of the rebased
// commits, which is the expected behavior. // commits, which is the expected behaviour.
let rebased_commit = repo let rebased_commit = repo
.0 .0
.find_commit(operation.id()) .find_commit(operation.id())
@@ -406,78 +406,50 @@ mod tests {
fn check_ssh_remote() { fn check_ssh_remote() {
assert_eq!( assert_eq!(
detect_remote_type("ssh://git@example.com"), detect_remote_type("ssh://git@example.com"),
Ok(RemoteType::Ssh) Some(RemoteType::Ssh)
); );
assert_eq!(detect_remote_type("git@example.git"), Ok(RemoteType::Ssh)); assert_eq!(detect_remote_type("git@example.git"), Some(RemoteType::Ssh));
} }
#[test] #[test]
fn check_https_remote() { fn check_https_remote() {
assert_eq!( assert_eq!(
detect_remote_type("https://example.com"), detect_remote_type("https://example.com"),
Ok(RemoteType::Https) Some(RemoteType::Https)
); );
assert_eq!( assert_eq!(
detect_remote_type("https://example.com/test.git"), detect_remote_type("https://example.com/test.git"),
Ok(RemoteType::Https) Some(RemoteType::Https)
); );
} }
#[test] #[test]
fn check_file_remote() { fn check_file_remote() {
assert_eq!(detect_remote_type("file:///somedir"), Ok(RemoteType::File)); assert_eq!(
detect_remote_type("file:///somedir"),
Some(RemoteType::File)
);
} }
#[test] #[test]
fn check_invalid_remotes() { fn check_invalid_remotes() {
assert_eq!( assert_eq!(detect_remote_type("https//example.com"), None);
detect_remote_type("https//example.com"), assert_eq!(detect_remote_type("https:example.com"), None);
Err(String::from( assert_eq!(detect_remote_type("ssh//example.com"), None);
"The remote URL starts with an unimplemented protocol" assert_eq!(detect_remote_type("ssh:example.com"), None);
)) assert_eq!(detect_remote_type("git@example.com"), None);
);
assert_eq!(
detect_remote_type("https:example.com"),
Err(String::from(
"The remote URL starts with an unimplemented protocol",
))
);
assert_eq!(
detect_remote_type("ssh//example.com"),
Err(String::from(
"The remote URL starts with an unimplemented protocol",
))
);
assert_eq!(
detect_remote_type("ssh:example.com"),
Err(String::from(
"The remote URL starts with an unimplemented protocol",
))
);
assert_eq!(
detect_remote_type("git@example.com"),
Err(String::from(
"The remote URL starts with an unimplemented protocol",
))
);
} }
#[test] #[test]
#[should_panic]
fn check_unsupported_protocol_http() { fn check_unsupported_protocol_http() {
assert_eq!( detect_remote_type("http://example.com");
detect_remote_type("http://example.com"),
Err(String::from(
"Remotes using HTTP protocol are not supported",
))
);
} }
#[test] #[test]
#[should_panic]
fn check_unsupported_protocol_git() { fn check_unsupported_protocol_git() {
assert_eq!( detect_remote_type("git://example.com");
detect_remote_type("git://example.com"),
Err(String::from("Remotes using git protocol are not supported"))
);
} }
#[test] #[test]
@@ -501,31 +473,27 @@ mod tests {
} }
} }
pub fn detect_remote_type(remote_url: &str) -> Result<RemoteType, String> { pub fn detect_remote_type(remote_url: &str) -> Option<RemoteType> {
let git_regex = regex::Regex::new(r"^[a-zA-Z]+@.*$").unwrap(); let git_regex = regex::Regex::new(r"^[a-zA-Z]+@.*$").unwrap();
if remote_url.starts_with("ssh://") { if remote_url.starts_with("ssh://") {
return Ok(RemoteType::Ssh); return Some(RemoteType::Ssh);
} }
if git_regex.is_match(remote_url) && remote_url.ends_with(".git") { if git_regex.is_match(remote_url) && remote_url.ends_with(".git") {
return Ok(RemoteType::Ssh); return Some(RemoteType::Ssh);
} }
if remote_url.starts_with("https://") { if remote_url.starts_with("https://") {
return Ok(RemoteType::Https); return Some(RemoteType::Https);
} }
if remote_url.starts_with("file://") { if remote_url.starts_with("file://") {
return Ok(RemoteType::File); return Some(RemoteType::File);
} }
if remote_url.starts_with("http://") { if remote_url.starts_with("http://") {
return Err(String::from( unimplemented!("Remotes using HTTP protocol are not supported");
"Remotes using HTTP protocol are not supported",
));
} }
if remote_url.starts_with("git://") { if remote_url.starts_with("git://") {
return Err(String::from("Remotes using git protocol are not supported")); unimplemented!("Remotes using git protocol are not supported");
} }
Err(String::from( None
"The remote URL starts with an unimplemented protocol",
))
} }
pub struct RepoHandle(git2::Repository); pub struct RepoHandle(git2::Repository);
@@ -817,7 +785,7 @@ impl RepoHandle {
)) ))
})?; })?;
for entry in match std::fs::read_dir(root_dir) { for entry in match std::fs::read_dir(&root_dir) {
Ok(iterator) => iterator, Ok(iterator) => iterator,
Err(error) => { Err(error) => {
return Err(WorktreeConversionFailureReason::Error(format!( return Err(WorktreeConversionFailureReason::Error(format!(
@@ -1088,12 +1056,12 @@ impl RepoHandle {
// Note that <remote>/HEAD only exists after a normal clone, there is no way to get the // Note that <remote>/HEAD only exists after a normal clone, there is no way to get the
// remote HEAD afterwards. So this is a "best effort" approach. // remote HEAD afterwards. So this is a "best effort" approach.
if let Ok(remote_head) = self.find_remote_branch(remote_name, "HEAD") { if let Ok(remote_head) = self.find_remote_branch(&remote_name, "HEAD") {
if let Some(pointer_name) = remote_head.as_reference().symbolic_target() { if let Some(pointer_name) = remote_head.as_reference().symbolic_target() {
if let Some(local_branch_name) = if let Some(local_branch_name) =
pointer_name.strip_prefix(&format!("refs/remotes/{}/", remote_name)) pointer_name.strip_prefix(&format!("refs/remotes/{}/", remote_name))
{ {
return Ok(Some(self.find_local_branch(local_branch_name)?)); return Ok(Some(self.find_local_branch(&local_branch_name)?));
} else { } else {
eprintln!("Remote HEAD ({}) pointer is invalid", pointer_name); eprintln!("Remote HEAD ({}) pointer is invalid", pointer_name);
} }
@@ -1120,7 +1088,7 @@ impl RepoHandle {
if remotes.len() == 1 { if remotes.len() == 1 {
let remote_name = &remotes[0]; let remote_name = &remotes[0];
if let Some(default_branch) = self.get_remote_default_branch(remote_name)? { if let Some(default_branch) = self.get_remote_default_branch(&remote_name)? {
return Ok(default_branch); return Ok(default_branch);
} }
} else { } else {
@@ -1131,13 +1099,17 @@ impl RepoHandle {
} }
} }
if !default_branches.is_empty() if !default_branches.is_empty() {
&& (default_branches.len() == 1 if default_branches.len() == 1 {
|| default_branches return Ok(default_branches.remove(0));
} else {
if default_branches
.windows(2) .windows(2)
.all(|w| w[0].name() == w[1].name())) .all(|w| w[0].name() == w[1].name())
{ {
return Ok(default_branches.remove(0)); return Ok(default_branches.remove(0));
}
}
} }
} }
@@ -1185,21 +1157,18 @@ impl RepoHandle {
pub fn remove_worktree( pub fn remove_worktree(
&self, &self,
base_dir: &Path,
name: &str, name: &str,
worktree_dir: &Path, worktree_dir: &Path,
force: bool, force: bool,
worktree_config: &Option<WorktreeRootConfig>, worktree_config: &Option<WorktreeRootConfig>,
) -> Result<(), WorktreeRemoveFailureReason> { ) -> Result<(), WorktreeRemoveFailureReason> {
let fullpath = base_dir.join(worktree_dir); if !worktree_dir.exists() {
if !fullpath.exists() {
return Err(WorktreeRemoveFailureReason::Error(format!( return Err(WorktreeRemoveFailureReason::Error(format!(
"{} does not exist", "{} does not exist",
name name
))); )));
} }
let worktree_repo = RepoHandle::open(&fullpath, false).map_err(|error| { let worktree_repo = RepoHandle::open(worktree_dir, false).map_err(|error| {
WorktreeRemoveFailureReason::Error(format!("Error opening repo: {}", error)) WorktreeRemoveFailureReason::Error(format!("Error opening repo: {}", error))
})?; })?;
@@ -1211,11 +1180,12 @@ impl RepoHandle {
WorktreeRemoveFailureReason::Error(format!("Failed getting name of branch: {}", error)) WorktreeRemoveFailureReason::Error(format!("Failed getting name of branch: {}", error))
})?; })?;
if branch_name != name { if branch_name != name
&& !branch_name.ends_with(&format!("{}{}", super::BRANCH_NAMESPACE_SEPARATOR, name))
{
return Err(WorktreeRemoveFailureReason::Error(format!( return Err(WorktreeRemoveFailureReason::Error(format!(
"Branch \"{}\" is checked out in worktree \"{}\", this does not look correct", "Branch \"{}\" is checked out in worktree, this does not look correct",
&branch_name, &branch_name
&worktree_dir.display(),
))); )));
} }
@@ -1285,46 +1255,13 @@ impl RepoHandle {
} }
} }
// worktree_dir is a relative path, starting from base_dir. We walk it if let Err(e) = std::fs::remove_dir_all(&worktree_dir) {
// upwards (from subdirectory to parent directories) and remove each
// component, in case it is empty. Only the leaf directory can be
// removed unconditionally (as it contains the worktree itself).
if let Err(e) = std::fs::remove_dir_all(&fullpath) {
return Err(WorktreeRemoveFailureReason::Error(format!( return Err(WorktreeRemoveFailureReason::Error(format!(
"Error deleting {}: {}", "Error deleting {}: {}",
&worktree_dir.display(), &worktree_dir.display(),
e e
))); )));
} }
if let Some(current_dir) = worktree_dir.parent() {
for current_dir in current_dir.ancestors() {
let current_dir = base_dir.join(current_dir);
if current_dir
.read_dir()
.map_err(|error| {
WorktreeRemoveFailureReason::Error(format!(
"Error reading {}: {}",
&current_dir.display(),
error
))
})?
.next()
.is_none()
{
if let Err(e) = std::fs::remove_dir(&current_dir) {
return Err(WorktreeRemoveFailureReason::Error(format!(
"Error deleting {}: {}",
&worktree_dir.display(),
e
)));
}
} else {
break;
}
}
}
self.prune_worktree(name) self.prune_worktree(name)
.map_err(WorktreeRemoveFailureReason::Error)?; .map_err(WorktreeRemoveFailureReason::Error)?;
branch branch
@@ -1375,15 +1312,9 @@ impl RepoHandle {
}, },
}) })
{ {
let repo_dir = &directory.join(worktree.name()); let repo_dir = &directory.join(&worktree.name());
if repo_dir.exists() { if repo_dir.exists() {
match self.remove_worktree( match self.remove_worktree(worktree.name(), repo_dir, false, &config) {
directory,
worktree.name(),
Path::new(worktree.name()),
false,
&config,
) {
Ok(_) => print_success(&format!("Worktree {} deleted", &worktree.name())), Ok(_) => print_success(&format!("Worktree {} deleted", &worktree.name())),
Err(error) => match error { Err(error) => match error {
WorktreeRemoveFailureReason::Changes(changes) => { WorktreeRemoveFailureReason::Changes(changes) => {
@@ -1419,12 +1350,12 @@ impl RepoHandle {
.map_err(|error| format!("Getting worktrees failed: {}", error))?; .map_err(|error| format!("Getting worktrees failed: {}", error))?;
let mut unmanaged_worktrees = Vec::new(); let mut unmanaged_worktrees = Vec::new();
for entry in std::fs::read_dir(directory).map_err(|error| error.to_string())? { for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? {
let dirname = path::path_as_string( let dirname = path::path_as_string(
entry entry
.map_err(|error| error.to_string())? .map_err(|error| error.to_string())?
.path() .path()
.strip_prefix(directory) .strip_prefix(&directory)
// that unwrap() is safe as each entry is // that unwrap() is safe as each entry is
// guaranteed to be a subentry of &directory // guaranteed to be a subentry of &directory
.unwrap(), .unwrap(),
@@ -1508,7 +1439,7 @@ impl<'a> Branch<'a> {
} }
} }
impl<'a> Branch<'a> { impl Branch<'_> {
pub fn commit(&self) -> Result<Commit, String> { pub fn commit(&self) -> Result<Commit, String> {
Ok(Commit( Ok(Commit(
self.0 self.0
@@ -1518,15 +1449,6 @@ impl<'a> Branch<'a> {
)) ))
} }
pub fn commit_owned(self) -> Result<Commit<'a>, String> {
Ok(Commit(
self.0
.into_reference()
.peel_to_commit()
.map_err(convert_libgit2_error)?,
))
}
pub fn set_upstream(&mut self, remote_name: &str, branch_name: &str) -> Result<(), String> { pub fn set_upstream(&mut self, remote_name: &str, branch_name: &str) -> Result<(), String> {
self.0 self.0
.set_upstream(Some(&format!("{}/{}", remote_name, branch_name))) .set_upstream(Some(&format!("{}/{}", remote_name, branch_name)))
@@ -1620,7 +1542,7 @@ impl RemoteHandle<'_> {
pub fn is_pushable(&self) -> Result<bool, String> { pub fn is_pushable(&self) -> Result<bool, String> {
let remote_type = detect_remote_type(self.0.url().expect("Remote name is not valid utf-8")) let remote_type = detect_remote_type(self.0.url().expect("Remote name is not valid utf-8"))
.expect("Could not detect remote type"); .ok_or_else(|| String::from("Could not detect remote type"))?;
Ok(matches!(remote_type, RemoteType::Ssh | RemoteType::File)) Ok(matches!(remote_type, RemoteType::Ssh | RemoteType::File))
} }

View File

@@ -4,7 +4,6 @@ use super::repo;
use comfy_table::{Cell, Table}; use comfy_table::{Cell, Table};
use std::fmt::Write;
use std::path::Path; use std::path::Path;
fn add_table_header(table: &mut Table) { fn add_table_header(table: &mut Table) {
@@ -57,10 +56,9 @@ fn add_repo_status(
repo_status repo_status
.branches .branches
.iter() .iter()
.fold(String::new(), |mut s, (branch_name, remote_branch)| { .map(|(branch_name, remote_branch)| {
writeln!( format!(
&mut s, "branch: {}{}\n",
"branch: {}{}",
&branch_name, &branch_name,
&match remote_branch { &match remote_branch {
None => String::from(" <!local>"), None => String::from(" <!local>"),
@@ -80,9 +78,8 @@ fn add_repo_status(
} }
} }
) )
.unwrap();
s
}) })
.collect::<String>()
.trim(), .trim(),
&match is_worktree { &match is_worktree {
true => String::from(""), true => String::from(""),
@@ -94,10 +91,8 @@ fn add_repo_status(
repo_status repo_status
.remotes .remotes
.iter() .iter()
.fold(String::new(), |mut s, r| { .map(|r| format!("{}\n", r))
writeln!(&mut s, "{r}").unwrap(); .collect::<String>()
s
})
.trim(), .trim(),
]); ]);
@@ -116,7 +111,7 @@ pub fn get_worktree_status_table(
add_worktree_table_header(&mut table); add_worktree_table_header(&mut table);
for worktree in &worktrees { for worktree in &worktrees {
let worktree_dir = &directory.join(worktree.name()); let worktree_dir = &directory.join(&worktree.name());
if worktree_dir.exists() { if worktree_dir.exists() {
let repo = match repo::RepoHandle::open(worktree_dir, false) { let repo = match repo::RepoHandle::open(worktree_dir, false) {
Ok(repo) => repo, Ok(repo) => repo,
@@ -190,11 +185,7 @@ pub fn get_status_table(config: config::Config) -> Result<(Vec<Table>, Vec<Strin
} }
}; };
if let Err(err) = add_repo_status(&mut table, &repo.name, &repo_handle, repo.worktree_setup)?;
add_repo_status(&mut table, &repo.name, &repo_handle, repo.worktree_setup)
{
errors.push(format!("{}: Couldn't add repo status: {}", &repo.name, err));
}
} }
tables.push(table); tables.push(table);

View File

@@ -60,11 +60,7 @@ pub fn sync_trees(config: config::Config, init_worktree: bool) -> Result<bool, S
match find_unmanaged_repos(&root_path, &repos) { match find_unmanaged_repos(&root_path, &repos) {
Ok(repos) => { Ok(repos) => {
for path in repos.into_iter() { unmanaged_repos_absolute_paths.extend(repos);
if !unmanaged_repos_absolute_paths.contains(&path) {
unmanaged_repos_absolute_paths.push(path);
}
}
} }
Err(error) => { Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error)); print_error(&format!("Error getting unmanaged repos: {}", error));
@@ -128,6 +124,8 @@ pub fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
"Failed to open \"{}\": {}", "Failed to open \"{}\": {}",
&path.display(), &path.display(),
match e.kind() { match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"), std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()), _ => format!("{:?}", e.kind()),
} }
@@ -179,7 +177,7 @@ fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result
"Repo already exists, but is not using a worktree setup", "Repo already exists, but is not using a worktree setup",
)); ));
}; };
} else if repo.remotes.is_none() || repo.remotes.as_ref().unwrap().is_empty() { } else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action( print_repo_action(
&repo.name, &repo.name,
"Repository does not have remotes configured, initializing new", "Repository does not have remotes configured, initializing new",
@@ -224,7 +222,7 @@ fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result
if newly_created && repo.worktree_setup && init_worktree { if newly_created && repo.worktree_setup && init_worktree {
match repo_handle.default_branch() { match repo_handle.default_branch() {
Ok(branch) => { Ok(branch) => {
worktree::add_worktree(&repo_path, &branch.name()?, None, false)?; worktree::add_worktree(&repo_path, &branch.name()?, None, None, false)?;
} }
Err(_error) => print_repo_error( Err(_error) => print_repo_error(
&repo.name, &repo.name,

File diff suppressed because it is too large Load Diff