94 Commits
v0.5 ... v0.7.1

Author SHA1 Message Date
e75aead3a8 Release v0.7.1 2022-05-27 23:37:54 +02:00
dca2b3c9b4 Justfile: Add build targets 2022-05-27 23:37:54 +02:00
a71711978e Make sure we do not expose secrets in output
This is using the RFC-8959 URI scheme to detect secrets. Thanks
hackernews for the idea ;)
2022-05-27 23:37:54 +02:00
90d188e01e Back to pure docker for testing 2022-05-27 23:37:54 +02:00
2e6166e807 Link binary statically with musl 2022-05-27 23:37:54 +02:00
8aaaa55d45 gitlab: Add alternate error field in JSON response 2022-05-27 23:37:54 +02:00
df39bb3076 gitlab: Fix detection of private repositories 2022-05-27 23:37:54 +02:00
bc3d4e1c49 Properly escape URL parameters 2022-05-27 23:37:54 +02:00
32eb4676ee Restructure into smaller modules 2022-05-27 23:37:54 +02:00
5d7480f7a4 Merge branch 'develop' 2022-05-26 19:13:02 +02:00
2d34ba1bd7 Fix forge documentation 2022-05-26 19:11:19 +02:00
5b78c3ba9e Release v0.7.0 2022-05-26 19:09:27 +02:00
95cffc5f0e dependencies: Update isahc to 1.7.2 2022-05-26 19:07:17 +02:00
4841920c64 dependencies: Update serde_json to 1.0.81 2022-05-26 19:07:16 +02:00
c439595d92 Justfile: Add target to lint 2022-05-26 18:57:31 +02:00
62c1e430b2 Derive Eq when deriving PartialEq
There is a clippy lint for this.
2022-05-26 18:57:31 +02:00
1212917fae Add unit tests for Repo::fullname() 2022-05-26 18:57:31 +02:00
f41b9b1684 Add pycache to gitignore 2022-05-26 18:57:31 +02:00
b17f4d68ef Fix handling of unmanaged repositories
Before, there were warnings in case of nested trees.
2022-05-26 18:57:31 +02:00
b8c552fb62 Give repos a namespace to allow subdirectories 2022-05-26 18:57:31 +02:00
f2d2482476 e2e: Add tests for subdirectory checkouts 2022-05-26 18:57:31 +02:00
6ef759a14e Separate config structs from internal structs 2022-05-26 18:57:31 +02:00
10e02c20a1 e2e: Add tests for nested repository checkouts 2022-05-26 18:57:31 +02:00
433dc090e0 Prefix shell commands with dollar sign 2022-05-26 18:57:31 +02:00
35e7c34d11 Do not panic when finding unmanaged worktrees fails 2022-05-26 18:57:31 +02:00
50a0f4d766 Fail properly when default branch cannot be detected 2022-05-26 18:57:31 +02:00
1db3eadd4c Fix formatting 2022-05-26 18:57:14 +02:00
af45b13612 Justfile: Add target for formatting 2022-05-26 18:53:12 +02:00
c994c90247 Justfile: Remove Cargo.lock check 2022-05-26 18:53:12 +02:00
3e8aad2221 Format cargo update script with black 2022-05-26 18:53:12 +02:00
127dd0535e Normalize paths when printing configuration 2022-05-26 18:53:12 +02:00
664d44eddc Only initialize worktrees for actually cloned repos 2022-05-26 18:53:12 +02:00
ad206297d8 e2e: Test sync twice to verify no changes 2022-05-26 18:53:12 +02:00
f2f1d5bcaf Fix worktree initialization 2022-05-26 18:53:12 +02:00
881a33dc96 e2e: Add tests for worktree initialization 2022-05-26 18:53:12 +02:00
38c66cad62 Add git forge integration 2022-05-26 17:55:07 +02:00
7ad51ccb47 Cargo.lock: Updating ryu v1.0.9 -> v1.0.10 2022-05-26 17:26:28 +02:00
dd65f2cd81 Cargo.lock: Updating once_cell v1.10.0 -> v1.12.0 2022-05-26 17:26:28 +02:00
f01568a695 Cargo.lock: Updating mio v0.8.2 -> v0.8.3 2022-05-26 17:26:28 +02:00
be085e9b0f dependencies: Update regex to 1.5.6 2022-05-26 17:26:28 +02:00
3557dd2686 dependencies: Update clap to 3.1.18 2022-05-26 17:26:28 +02:00
908094f48b dependencies: Update git2 to 0.14.4 2022-05-26 17:26:28 +02:00
c3c1c98913 Run cargo fmt 2022-05-10 18:26:06 +02:00
e940ab69fb Accept clippy suggestions 2022-05-10 18:25:45 +02:00
1cf4e85014 Fix non-worktree directory detection for status 2022-05-10 18:24:29 +02:00
14c95f2704 Fix worktree creation handling 2022-05-10 17:54:03 +02:00
5f878793fd Merge branch 'develop' 2022-05-07 22:07:37 +02:00
fd6400ed68 Release v0.6.2 2022-05-07 22:06:19 +02:00
faf68e2052 depcheck: Make skipped prereleases more obvious 2022-05-07 22:04:59 +02:00
7296795aec e2e_tests/pip: Update typing_extensions to 4.2.0 2022-05-07 22:04:59 +02:00
88252fffc8 e2e_tests/pip: Update pytest to 7.1.2 2022-05-07 22:04:59 +02:00
e67f5a7db4 e2e_tests/pip: Update pyparsing to 3.0.8 2022-05-07 22:04:59 +02:00
87e0247b48 Cargo.lock: Updating getrandom v0.2.4 -> v0.2.6 2022-05-07 21:26:39 +02:00
d490d3ab84 Cargo.lock: Updating once_cell v1.9.0 -> v1.10.0 2022-05-07 21:26:37 +02:00
f7870797ac Cargo.lock: Updating crossterm v0.23.0 -> v0.23.2 2022-05-07 21:26:36 +02:00
17ffc793e0 dependencies: Update serde_yaml to 0.8.24 2022-05-07 21:26:34 +02:00
d3738f0887 dependencies: Update regex to 1.5.5 2022-05-07 21:26:34 +02:00
7da879d483 dependencies: Update clap to 3.1.17 2022-05-07 21:26:34 +02:00
c0bb71f84f dependencies: Update git2 to 0.14.3 2022-05-07 21:26:34 +02:00
230f380a6a dependencies: Update serde to 1.0.137 2022-05-07 21:26:34 +02:00
852f445b1f dependencies: Update toml to 0.5.9 2022-05-07 21:26:33 +02:00
584f68ba42 clap: Remove deprecation warning 2022-02-21 20:28:30 +01:00
92092ed4af Merge branch 'develop' 2022-02-21 19:55:15 +01:00
fadf687a3e Release v0.6.1 2022-02-21 19:54:36 +01:00
3a18870537 e2e_tests/pip: Update typing_extensions to 4.1.1 2022-02-21 19:52:04 +01:00
cf80678ccc e2e_tests/pip: Update pytest to 7.0.1 2022-02-21 19:52:02 +01:00
08ce4b6add e2e_tests/pip: Update GitPython to 3.1.27 2022-02-21 19:52:00 +01:00
39075a6269 Cargo.lock: Updating cc v1.0.72 -> v1.0.73 2022-02-21 19:51:38 +01:00
906ead80a4 dependencies: Update comfy-table to 5.0.1 2022-02-21 19:51:38 +01:00
7038661296 dependencies: Update clap to 3.1.1 2022-02-21 19:51:38 +01:00
543bf94a51 dependencies: Update serde to 1.0.136 2022-02-21 19:51:37 +01:00
453f73c2a0 e2e: Fix ignoring pip and setuptools on autoupdate 2022-01-23 22:17:54 +01:00
7e673200c8 Fix error on empty cloned repository 2022-01-23 22:11:54 +01:00
44a716248e Release v0.6.0 2022-01-23 12:28:45 +01:00
d20006a325 Merge branch 'develop' 2022-01-23 12:28:15 +01:00
f8adec1413 e2e: Ignore pip and setuptools for autoupdate 2022-01-23 12:27:59 +01:00
868269359c dependencies: Update serde to 1.0.135 2022-01-23 12:25:20 +01:00
61d4a4a0d8 e2e: Add PyYAML for YAML parsing 2022-01-22 11:23:53 +01:00
4e4de95a07 depcheck: Fix command to update crates.io index 2022-01-22 11:23:53 +01:00
9b64de7991 Add YAML as a config format option
@mustafa89 ;)
2022-01-22 11:23:53 +01:00
e45de3b498 depcheck: Fix crates.io cache update 2022-01-22 11:23:53 +01:00
6e4c388195 Add --stash options to pull and rebase 2022-01-22 11:23:53 +01:00
6436a8194e Disable "raw" SSH key usage
There is no sane way to get that fallback working with libgit2. Plus,
it's not a good practice anyway to have a non-password protected SSH
key.
2022-01-22 11:23:53 +01:00
f10ae25b2a Justfile: Add target to clean up 2022-01-22 11:23:53 +01:00
fd6b3b7438 Release v0.5.1 2022-01-22 11:19:37 +01:00
d68ff012f2 e2e_tests/pip: Update pyparsing to 3.0.7 2022-01-22 10:58:59 +01:00
9aad65edac dependencies: Update clap to 3.0.10 2022-01-22 10:58:59 +01:00
c370ef5815 dependencies: Update serde to 1.0.134 2022-01-22 10:58:59 +01:00
8f5b743ea4 Cargo.lock: Updating openssl-probe v0.1.4 -> v0.1.5 2022-01-22 10:58:47 +01:00
c0e981dbd4 Cargo.lock: Updating getrandom v0.2.3 -> v0.2.4 2022-01-22 10:58:47 +01:00
4303621b30 Cargo.lock: Updating smallvec v1.7.0 -> v1.8.0 2022-01-22 10:58:47 +01:00
63e04a9dcf dependencies: Update clap to 3.0.7 2022-01-22 10:58:47 +01:00
08ee946f2e dependencies: Update clap to 3.0.6 2022-01-22 10:58:47 +01:00
81de5a2d70 e2e_tests/pip: Update GitPython to 3.1.26 2022-01-22 10:58:47 +01:00
54 changed files with 6534 additions and 1375 deletions

View File

744
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,8 @@
[package] [package]
name = "git-repo-manager" name = "git-repo-manager"
version = "0.5.0" version = "0.7.1"
edition = "2021" edition = "2021"
authors = [ authors = [
"Hannes Körber <hannes@hkoerber.de>", "Hannes Körber <hannes@hkoerber.de>",
] ]
@@ -26,6 +27,9 @@ rust-version = "1.57"
license = "GPL-3.0-only" license = "GPL-3.0-only"
[profile.e2e-tests]
inherits = "release"
[lib] [lib]
name = "grm" name = "grm"
path = "src/lib.rs" path = "src/lib.rs"
@@ -37,30 +41,55 @@ path = "src/grm/main.rs"
[dependencies] [dependencies]
[dependencies.toml] [dependencies.toml]
version = "=0.5.8" version = "=0.5.9"
[dependencies.serde] [dependencies.serde]
version = "=1.0.133" version = "=1.0.137"
features = ["derive"] features = ["derive"]
[dependencies.git2] [dependencies.git2]
version = "=0.13.25" version = "=0.14.4"
[dependencies.shellexpand] [dependencies.shellexpand]
version = "=2.1.0" version = "=2.1.0"
[dependencies.clap] [dependencies.clap]
version = "=3.0.5" version = "=3.1.18"
features = ["derive", "cargo"] features = ["derive", "cargo"]
[dependencies.console] [dependencies.console]
version = "=0.15.0" version = "=0.15.0"
[dependencies.regex] [dependencies.regex]
version = "=1.5.4" version = "=1.5.6"
[dependencies.comfy-table] [dependencies.comfy-table]
version = "=5.0.0" version = "=5.0.1"
[dependencies.serde_yaml]
version = "=0.8.24"
[dependencies.serde_json]
version = "=1.0.81"
[dependencies.isahc]
version = "=1.7.2"
default-features = false
features = ["json", "http2", "text-decoding"]
[dependencies.parse_link_header]
version = "=0.3.2"
[dependencies.url-escape]
version = "=0.1.1"
[dev-dependencies.tempdir] [dev-dependencies.tempdir]
version = "=0.3.7" version = "=0.3.7"
[features]
static-build = [
"git2/vendored-openssl",
"git2/vendored-libgit2",
"isahc/static-curl",
"isahc/static-ssl",
]

View File

@@ -1,20 +1,43 @@
check: check-cargo-lock check-pip-requirements test set positional-arguments
target := "x86_64-unknown-linux-musl"
check: test
cargo check cargo check
cargo fmt --check cargo fmt --check
cargo clippy --no-deps -- -Dwarnings cargo clippy --no-deps -- -Dwarnings
check-cargo-lock: fmt:
cargo update --locked cargo fmt
git ls-files | grep '\.py$' | xargs black
lint:
cargo clippy --no-deps
lint-fix: lint-fix:
cargo clippy --no-deps --fix cargo clippy --no-deps --fix
release: release:
cargo build --release cargo build --release --target {{target}}
test-binary:
env \
GITHUB_API_BASEURL=http://rest:5000/github \
GITLAB_API_BASEURL=http://rest:5000/gitlab \
cargo build --target {{target}} --profile e2e-tests --features=static-build
install: install:
cargo install --path . cargo install --path .
install-static:
cargo install --target {{target}} --features=static-build --path .
build:
cargo build
build-static:
cargo build --target {{target}} --features=static-build
test: test-unit test-integration test-e2e test: test-unit test-integration test-e2e
test-unit: test-unit:
@@ -23,19 +46,18 @@ test-unit:
test-integration: test-integration:
cargo test --test "*" cargo test --test "*"
e2e-venv: test-e2e +tests=".": test-binary
cd ./e2e_tests \ cd ./e2e_tests \
&& python3 -m venv venv \ && docker-compose rm --stop -f \
&& . ./venv/bin/activate \ && docker-compose build \
&& pip --disable-pip-version-check install -r ./requirements.txt >/dev/null && docker-compose run \
--rm \
-v $PWD/../target/{{target}}/e2e-tests/grm:/grm \
pytest \
"GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest -p no:cacheprovider --color=yes "$@"" \
&& docker-compose rm --stop -f
update-dependencies: update-cargo-dependencies
test-e2e +tests=".": e2e-venv release
cd ./e2e_tests \
&& . ./venv/bin/activate \
&& TMPDIR=/dev/shm python -m pytest --color=yes {{tests}}
update-dependencies: update-cargo-dependencies update-pip-requirements
update-cargo-dependencies: update-cargo-dependencies:
@cd ./depcheck \ @cd ./depcheck \
@@ -43,12 +65,3 @@ update-cargo-dependencies:
&& . ./venv/bin/activate \ && . ./venv/bin/activate \
&& pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \ && pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \
&& ./update-cargo-dependencies.py && ./update-cargo-dependencies.py
update-pip-requirements: e2e-venv
@cd ./e2e_tests \
&& ./update_requirementstxt.sh
check-pip-requirements: e2e-venv
@cd ./e2e_tests \
&& . ./venv/bin/activate \
&& pip list --outdated | grep -q '.' && exit 1 || exit 0

View File

@@ -14,7 +14,13 @@ AUTOUPDATE_DISABLED = []
if os.path.exists(INDEX_DIR): if os.path.exists(INDEX_DIR):
subprocess.run( subprocess.run(
["git", "pull", "--depth=1", "origin"], ["git", "fetch", "--depth=1", "origin"],
cwd=INDEX_DIR,
check=True,
capture_output=True,
)
subprocess.run(
["git", "reset", "--hard", "origin/master"],
cwd=INDEX_DIR, cwd=INDEX_DIR,
check=True, check=True,
capture_output=True, capture_output=True,
@@ -33,7 +39,7 @@ update_necessary = False
# This updates the crates.io index, see https://github.com/rust-lang/cargo/issues/3377 # This updates the crates.io index, see https://github.com/rust-lang/cargo/issues/3377
subprocess.run( subprocess.run(
["cargo", "search", "--limit", "0"], ["cargo", "update", "--dry-run"],
check=True, check=True,
capture_output=False, # to get some git output capture_output=False, # to get some git output
) )
@@ -55,10 +61,14 @@ for tier in ["dependencies", "dev-dependencies"]:
latest_version = None latest_version = None
for version_entry in open(info_file, "r").readlines(): for version_entry in open(info_file, "r").readlines():
version = semver.VersionInfo.parse(json.loads(version_entry)["vers"]) version = semver.VersionInfo.parse(json.loads(version_entry)["vers"])
if current_version.prerelease == "" and version.prerelease != "":
# skip prereleases, except when we are on a prerelease already
continue
if latest_version is None or version > latest_version: if latest_version is None or version > latest_version:
if (
current_version.prerelease is None
and version.prerelease is not None
):
# skip prereleases, except when we are on a prerelease already
print(f"{name}: Skipping prerelease version {version}")
continue
latest_version = version latest_version = version
if latest_version != current_version: if latest_version != current_version:
@@ -84,7 +94,15 @@ for tier in ["dependencies", "dev-dependencies"]:
try: try:
cmd = subprocess.run( cmd = subprocess.run(
["cargo", "update", "-Z", "no-index-update", "--aggressive", "--package", name], [
"cargo",
"update",
"-Z",
"no-index-update",
"--aggressive",
"--package",
name,
],
check=True, check=True,
capture_output=True, capture_output=True,
text=True, text=True,
@@ -96,9 +114,16 @@ for tier in ["dependencies", "dev-dependencies"]:
message = f"dependencies: Update {name} to {latest_version}" message = f"dependencies: Update {name} to {latest_version}"
subprocess.run( subprocess.run(
["git", "commit", "--message", message, "../Cargo.toml", "../Cargo.lock"], [
"git",
"commit",
"--message",
message,
"../Cargo.toml",
"../Cargo.lock",
],
check=True, check=True,
capture_output=True capture_output=True,
) )
@@ -107,11 +132,19 @@ for tier in ["dependencies", "dev-dependencies"]:
while True: while True:
with open("../Cargo.lock", "r") as f: with open("../Cargo.lock", "r") as f:
cargo_lock = tomlkit.parse(f.read()) cargo_lock = tomlkit.parse(f.read())
for package in cargo_lock['package']: for package in cargo_lock["package"]:
spec = f"{package['name']}:{package['version']}" spec = f"{package['name']}:{package['version']}"
try: try:
cmd = subprocess.run( cmd = subprocess.run(
["cargo", "update", "-Z", "no-index-update", "--aggressive", "--package", spec], [
"cargo",
"update",
"-Z",
"no-index-update",
"--aggressive",
"--package",
spec,
],
check=True, check=True,
capture_output=True, capture_output=True,
text=True, text=True,
@@ -127,7 +160,7 @@ while True:
cmd = subprocess.run( cmd = subprocess.run(
["git", "commit", "--message", message, "../Cargo.lock"], ["git", "commit", "--message", message, "../Cargo.lock"],
check=True, check=True,
capture_output=True capture_output=True,
) )
break break
else: else:

View File

@@ -1,8 +1,9 @@
# Summary # Summary
- [Overview](./overview.md) - [Overview](./overview.md)
- [Getting started](./getting_started.md) - [Installation](./installation.md)
- [Repository trees](./repos.md) - [Repository trees](./repos.md)
- [Git Worktrees](./worktrees.md) - [Git Worktrees](./worktrees.md)
- [Forge Integrations](./forge_integration.md)
- [FAQ](./faq.md) - [FAQ](./faq.md)
- [Contributing](./contributing.md) - [Contributing](./contributing.md)

View File

@@ -0,0 +1,205 @@
# Forge Integrations
In addition to manging repositories locally, `grm` also integrates with source
code hosting platforms. Right now, the following platforms are supported:
* [GitHub](https://github.com/)
* [GitLab](https://gitlab.com/)
Imagine you are just starting out with `grm` and want to clone all your repositories
from GitHub. This is as simple as:
```bash
$ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects
```
You will end up with your projects cloned into `~/projects/{your_github_username}/`
## Authentication
The only currently supported authentication option is using personal access
token.
### GitHub
See the GitHub documentation for personal access tokens:
[Link](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token).
The only required permission is the "repo" scope.
### GitHub
See the GitLab documentation for personal access tokens:
[Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html).
The required scopes are a bit weird. Actually, the following should suffice:
* * `read_user` to get user information (required to get the current authenticated
user name for the `--owner` filter.
* A scope that allows reading private repositories. (`read_repository` is just
for *cloning* private repos). This unfortunately does not exist.
So currently, you'll need to select the `read_api` scope.
## Filters
By default, `grm` will sync **nothing**. This is quite boring, so you have to
tell the command what repositories to include. They are all inclusive (i.e. act
as a logical OR), so you can easily chain many filters to clone a bunch of
repositories. It's quite simple:
* `--user <USER>` syncs all repositories of that remote user
* `--group <GROUP>` syncs all repositories of that remote group/organization
* `--owner` syncs all repositories of the user that is used for authentication.
This is effectively a shortcut for `--user $YOUR_USER`
* `--access` syncs all repositories that the current user has access to
Easiest to see in an example:
```bash
$ grm repos sync remote --provider github --user torvals --owner --group zalando [...]
```
This would sync all of Torvald's repositories, all of my own repositories and
all (public) repositories in the "zalando" group.
## Strategies
There are generally three ways how you can use `grm` with forges:
### Ad-hoc cloning
This is the easiest, there are no local files involved. You just run the
command, `grm` clones the repos, that's it. If you run the command again, `grm`
will figure out the differences between local and remote repositories and
resolve them locally.
### Create a file
This is effectively `grm repos find local`, but using the forge instead of the
local file system. You will end up with a normal repository file that you can
commit to git. To update the list of repositories, just run the command again
and commit the new file.
### Define options in a file
This is a hybrid approach: You define filtering options in a file that you can
commit to source control. Effectively, you are persisting the options you gave
to `grm` on the command line with the ad-hoc approach. Similarly, `grm` will
figure out differences between local and remote and resolve them.
A file would look like this:
```toml
provider = "github"
token_command = "cat ~/.github_token"
root = "~/projects"
[filters]
owner = true
groups = [
"zalando"
]
```
The options in the file map to the command line options of the `grm repos sync
remote` command.
You'd then run the `grm repos sync` command the same way as with a list of
repositories in a config:
```bash
$ grm repos sync --config example.config.toml
```
You can even use that file to generate a repository list that you can feed into
`grm repos sync`:
```bash
$ grm repos find config --config example.config.toml > repos.toml
$ grm repos sync config --config repos.toml
```
## Using with selfhosted GitLab
By default, `grm` uses the default GitLab API endpoint
([https://gitlab.com](https://gitlab.com)). You can override the
endpoint by specifying the `--api-url` parameter. Like this:
```bash
$ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...]
```
## The cloning protocol
By default, `grm` will use HTTPS for public repositories and SSH otherwise. This
can be overridden with the `--force-ssh` switch.
## About the token command
To ensure maximum flexibility, `grm` has a single way to get the token it uses
to authenticate: Specify a command that returns the token via stdout. This easily
integrates with password managers like [`pass`](https://www.passwordstore.org/).
Of course, you are also free to specify something like `echo mytoken` as the
command, as long as you are ok with the security implications (like having the
token in cleartext in your shell history). It may be better to have the token
in a file instead and read it: `cat ~/.gitlab_token`.
Generally, use whatever you want. The command just has to return sucessfully and
return the token as the first line of stdout.
## Examples
Maybe you just want to locally clone all repos from your github user?
```bash
$ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token"
```
This will clone all repositories into `~/github_projects/{your_github_username}`.
If instead you want to clone **all** repositories you have access to (e.g. via
organizations or other users' private repos you have access to), just change the
filter a little bit:
```bash
$ grm repos sync remote --provider github --access --root ~/github_projects --token-command "pass show github_grm_access_token"
```
## Limitations
### GitHub
Unfortunately, GitHub does not have a nice API endpoint to get **private**
repositories for a certain user ([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user) only returns public
repositories).
Therefore, using `--user {user}` will only show public repositories for GitHub.
Note that this does not apply to `--access`: If you have access to another user's
private repository, it will be listed.
## Adding integrations
Adding a new integration involves writing some Rust code. Most of the logic is
generic, so you will not have to reinvent the wheel. Generally, you will need to
gather the following information:
* A list of repositories for a single user
* A list of repositories for a group (or any similar concept if applicable)
* A list of repositories for the user that the API token belongs to
* The username of the currently authenticated user
Authentication currently only works via a bearer token passed via the
`Authorization` HTTP header.
Each repo has to have the following properties:
* A name (which also acts as the identifier for diff between local and remote
repositories)
* An SSH url to push to
* An HTTPS url to clone and fetch from
* A flag that marks the repository as private
If you plan to implement another forge, please first open an issue so we can
go through the required setup. I'm happy to help!

View File

@@ -1,22 +0,0 @@
# Quickstart
## Installation
Building GRM currently requires the nightly Rust toolchain. The easiest way
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
Make sure that the nightly toolchain is installed:
```
$ rustup toolchain install nightly
```
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch master
```
If you're brave, you can also run the development build:
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
```

56
docs/src/installation.md Normal file
View File

@@ -0,0 +1,56 @@
# Installation
## Installation
Building GRM currently requires the nightly Rust toolchain. The easiest way
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
Make sure that the nightly toolchain is installed:
```
$ rustup toolchain install nightly
```
Then, install the build dependencies:
| Distribution | Command |
| ------------- | ------------------------------------------------------------------------------ |
| Archlinux | `pacman -S --needed gcc openssl pkg-config` |
| Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` |
Then, it's a simple command to install the latest stable version:
```bash
$ cargo +nightly install git-repo-manager
```
If you're brave, you can also run the development build:
```bash
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
```
## Static build
Note that by default, you will get a dynamically linked executable.
Alternatively, you can also build a statically linked binary. For this, you
will need `musl` and a few other build dependencies installed installed:
| Distribution | Command |
| ------------- | --------------------------------------------------------------------------- |
| Archlinux | `pacman -S --needed gcc musl perl make` |
| Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` |
(`perl` and `make` are required for the OpenSSL build script)
The, add the musl target via `rustup`:
```
$ rustup +nightly target add x86_64-unknown-linux-musl
```
Then, use a modified build command to get a statically linked binary:
```
$ cargo +nightly install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build
```

View File

@@ -5,7 +5,8 @@ Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a
tool that helps you manage git repositories. tool that helps you manage git repositories.
GRM helps you manage git repositories in a declarative way. Configure your GRM helps you manage git repositories in a declarative way. Configure your
repositories in a TOML file, GRM does the rest. Take a look at [the example repositories in a TOML or YAML file, GRM does the rest. Take a look at [the
example
configuration](https://github.com/hakoerber/git-repo-manager/blob/master/example.config.toml) configuration](https://github.com/hakoerber/git-repo-manager/blob/master/example.config.toml)
to get a feel for the way you configure your repositories. See the [repository to get a feel for the way you configure your repositories. See the [repository
tree chapter](./repos.md) for details. tree chapter](./repos.md) for details.

View File

@@ -17,7 +17,7 @@ Then, you're ready to run the first sync. This will clone all configured reposit
and set up the remotes. and set up the remotes.
```bash ```bash
$ grm repos sync --config example.config.toml $ grm repos sync config --config example.config.toml
[] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git" [] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
[] git-repo-manager: Repository successfully cloned [] git-repo-manager: Repository successfully cloned
[] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git" [] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
@@ -30,7 +30,7 @@ $ grm repos sync --config example.config.toml
If you run it again, it will report no changes: If you run it again, it will report no changes:
``` ```
$ grm repos sync --config example.config.toml $ grm repos sync config -c example.config.toml
[✔] git-repo-manager: OK [✔] git-repo-manager: OK
[✔] dotfiles: OK [✔] dotfiles: OK
``` ```
@@ -42,7 +42,7 @@ a configuration from scratch. Luckily, GRM has a way to generate a configuration
from an existing file tree: from an existing file tree:
```bash ```bash
$ grm repos find ~/your/project/root > config.toml $ grm repos find local ~/your/project/root > config.toml
``` ```
This will detect all repositories and remotes and write them to `config.toml`. This will detect all repositories and remotes and write them to `config.toml`.
@@ -74,3 +74,9 @@ $ grm repos status
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯ ╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
``` ```
## YAML
By default, the repo configuration uses TOML. If you prefer YAML, just give it
a YAML file instead (file ending does not matter, `grm` will figure out the format
itself). For generating a configuration, pass `--format yaml` to `grm repo find`
to generate YAML instead of TOML.

View File

@@ -269,7 +269,7 @@ wt convert`. This command has to be run in the root of the repository you want
to convert: to convert:
``` ```
grm wt convert $ grm wt convert
[✔] Conversion successful [✔] Conversion successful
``` ```
@@ -285,7 +285,7 @@ To fetch all remote references from all remotes in a worktree setup, you can
use the following command: use the following command:
``` ```
grm wt fetch $ grm wt fetch
[✔] Fetched from all remotes [✔] Fetched from all remotes
``` ```
@@ -295,7 +295,7 @@ Often, you may want to pull all remote changes into your worktrees. For this,
use the `git pull` equivalent: use the `git pull` equivalent:
``` ```
grm wt pull $ grm wt pull
[✔] master: Done [✔] master: Done
[✔] my-cool-branch: Done [✔] my-cool-branch: Done
``` ```
@@ -304,18 +304,22 @@ This will refuse when there are local changes, or if the branch cannot be fast
forwarded. If you want to rebase your local branches, use the `--rebase` switch: forwarded. If you want to rebase your local branches, use the `--rebase` switch:
``` ```
grm wt pull --rebase $ grm wt pull --rebase
[✔] master: Done [✔] master: Done
[✔] my-cool-branch: Done [✔] my-cool-branch: Done
``` ```
As noted, this will fail if there are any local changes in your worktree. If you
want to stash these changes automatically before the pull (and unstash them
afterwards), use the `--stash` option.
This will rebase your changes onto the upstream branch. This is mainly helpful This will rebase your changes onto the upstream branch. This is mainly helpful
for persistent branches that change on the remote side. for persistent branches that change on the remote side.
There is a similar rebase feature that rebases onto the **default** branch instead: There is a similar rebase feature that rebases onto the **default** branch instead:
``` ```
grm wt rebase $ grm wt rebase
[✔] master: Done [✔] master: Done
[✔] my-cool-branch: Done [✔] my-cool-branch: Done
``` ```
@@ -327,7 +331,7 @@ use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
on non-fast-forwards: on non-fast-forwards:
``` ```
grm wt rebase --pull --rebase $ grm wt rebase --pull --rebase
[✔] master: Done [✔] master: Done
[✔] my-cool-branch: Done [✔] my-cool-branch: Done
``` ```
@@ -346,6 +350,10 @@ run two commands.
I understand that the UX is not the most intuitive. If you can think of an I understand that the UX is not the most intuitive. If you can think of an
improvement, please let me know (e.g. via an GitHub issue)! improvement, please let me know (e.g. via an GitHub issue)!
As with `pull`, `rebase` will also refuse to run when there are changes in your
worktree. And you can also use the `--stash` option to stash/unstash changes
automatically.
### Manual access ### Manual access
GRM isn't doing any magic, it's just git under the hood. If you need to have access GRM isn't doing any magic, it's just git under the hood. If you need to have access

View File

@@ -1,2 +1 @@
/venv/
/__pycache__/ /__pycache__/

8
e2e_tests/conftest.py Normal file
View File

@@ -0,0 +1,8 @@
import os
def pytest_configure(config):
os.environ["GIT_AUTHOR_NAME"] = "Example user"
os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com"
os.environ["GIT_COMMITTER_NAME"] = "Example user"
os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com"

View File

@@ -0,0 +1,32 @@
version: "3.7"
services:
pytest:
build: ./docker
volumes:
- type: bind
source: ./
target: /tests
read_only: true
- type: tmpfs
target: /tmp
environment:
TMPDIR: /tmp
depends_on:
- rest
command:
- "true"
networks:
main:
rest:
build: ./docker-rest/
expose:
- "5000"
networks:
main:
aliases:
- alternate-rest
networks:
main:

View File

@@ -0,0 +1,19 @@
FROM docker.io/debian:11.3
WORKDIR /app
ENV FLASK_APP=app.py
RUN apt-get update \
&& apt-get install -y \
dumb-init \
python3-flask \
python3-jinja2 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
EXPOSE 5000
COPY flask .
CMD ["/usr/bin/dumb-init", "--", "flask", "run", "--port", "5000", "--host", "0.0.0.0"]

View File

@@ -0,0 +1,7 @@
from flask import Flask
app = Flask(__name__)
app.url_map.strict_slashes = False
import github
import gitlab

View File

@@ -0,0 +1,103 @@
import os.path
from app import app
from flask import Flask, request, abort, jsonify, make_response
import jinja2
def check_headers():
if request.headers.get("accept") != "application/vnd.github.v3+json":
app.logger.error("Invalid accept header")
abort(500)
auth_header = request.headers.get("authorization")
if auth_header != "token secret-token:myauthtoken":
app.logger.error("Invalid authorization header: %s", auth_header)
abort(
make_response(
jsonify(
{
"message": "Bad credentials",
"documentation_url": "https://docs.example.com/rest",
}
),
401,
)
)
def add_pagination(response, page, last_page):
host = request.headers["host"]
link_header = ""
def args(page):
args = request.args.copy()
args["page"] = page
return "&".join([f"{k}={v}" for k, v in args.items()])
if page < last_page:
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
)
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
)
response.headers["link"] = link_header
def read_project_files(namespaces=[]):
last_page = 4
page = username = int(request.args.get("page", "1"))
response_file = f"./github_api_page_{page}.json.j2"
if not os.path.exists(response_file):
return jsonify([])
response = make_response(
jinja2.Template(open(response_file).read()).render(
namespace=namespaces[page - 1]
)
)
add_pagination(response, page, last_page)
response.headers["content-type"] = "application/json"
return response
def single_namespaced_projects(namespace):
return read_project_files([namespace] * 4)
def mixed_projects(namespaces):
return read_project_files(namespaces)
@app.route("/github/users/<string:user>/repos/")
def github_user_repos(user):
check_headers()
if user == "myuser1":
return single_namespaced_projects("myuser1")
return jsonify([])
@app.route("/github/orgs/<string:group>/repos/")
def github_group_repos(group):
check_headers()
if not (request.args.get("type") == "all"):
abort(500, "wrong arguments")
if group == "mygroup1":
return single_namespaced_projects("mygroup1")
return jsonify([])
@app.route("/github/user/repos/")
def github_own_repos():
check_headers()
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
@app.route("/github/user/")
def github_user():
check_headers()
response = make_response(open("./github_api_user.json").read())
response.headers["content-type"] = "application/json"
return response

View File

@@ -0,0 +1,228 @@
[
{
"id": 1,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject1",
"full_name": "{{ namespace }}/myproject1",
"private": true,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject1",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject1",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject1/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject1/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject1/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject1/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject1/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject1/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject1/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject1/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject1/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject1/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject1/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject1/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject1/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject1/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject1/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject1.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject1.git",
"clone_url": "https://example.com/{{ namespace }}/myproject1.git",
"svn_url": "https://example.com/{{ namespace }}/myproject1",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
},
{
"id": 2,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject2",
"full_name": "{{ namespace }}/myproject2",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject2",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject2",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject2/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject2/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject2/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject2/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject2/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject2/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject2/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject2/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject2/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject2/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject2/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject2/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject2/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject2/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject2/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject2.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject2.git",
"clone_url": "https://example.com/{{ namespace }}/myproject2.git",
"svn_url": "https://example.com/{{ namespace }}/myproject2",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,115 @@
[
{
"id": 3,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject3",
"full_name": "{{ namespace }}/myproject3",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject3",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject3",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject3/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject3/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject3/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject3/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject3/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject3/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject3/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject3/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject3/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject3/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject3/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject3/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject3/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject3/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject3/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject3.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject3.git",
"clone_url": "https://example.com/{{ namespace }}/myproject3.git",
"svn_url": "https://example.com/{{ namespace }}/myproject3",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,115 @@
[
{
"id": 3,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject4",
"full_name": "{{ namespace }}/myproject4",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject4",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject4",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject4/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject4/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject4/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject4/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject4/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject4/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject4/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject4/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject4/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject4/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject4/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject4/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject4/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject4/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject4/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject4.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject4.git",
"clone_url": "https://example.com/{{ namespace }}/myproject4.git",
"svn_url": "https://example.com/{{ namespace }}/myproject4",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,115 @@
[
{
"id": 3,
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
"name": "myproject5",
"full_name": "{{ namespace }}/myproject5",
"private": false,
"owner": {
"login": "someuser",
"id": 1,
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
"avatar_url": "https://example.com/u/3748696?v=4",
"gravatar_id": "",
"url": "https://api.example.com/users/{{ namespace }}",
"html_url": "https://example.com/{{ namespace }}",
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
"type": "User",
"site_admin": false
},
"html_url": "https://example.com/{{ namespace }}/myproject5",
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
"fork": false,
"url": "https://api.example.com/repos/{{ namespace }}/myproject5",
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/forks",
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject5/keys{/key_id}",
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject5/collaborators{/collaborator}",
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject5/teams",
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/hooks",
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/events{/number}",
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/events",
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/assignees{/user}",
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject5/branches{/branch}",
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/tags",
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/blobs{/sha}",
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/tags{/sha}",
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/refs{/sha}",
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/trees{/sha}",
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject5/statuses/{sha}",
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject5/languages",
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/stargazers",
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contributors",
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscribers",
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscription",
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/commits{/sha}",
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/commits{/sha}",
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/comments{/number}",
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/comments{/number}",
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contents/{+path}",
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject5/compare/{base}...{head}",
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject5/merges",
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject5/{archive_format}{/ref}",
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject5/downloads",
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues{/number}",
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject5/pulls{/number}",
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject5/milestones{/number}",
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject5/notifications{?since,all,participating}",
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject5/labels{/name}",
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject5/releases{/id}",
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/deployments",
"created_at": "2016-01-07T22:27:54Z",
"updated_at": "2021-11-20T16:15:37Z",
"pushed_at": "2021-11-20T16:15:34Z",
"git_url": "git://example.com/{{ namespace }}/myproject5.git",
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject5.git",
"clone_url": "https://example.com/{{ namespace }}/myproject5.git",
"svn_url": "https://example.com/{{ namespace }}/myproject5",
"homepage": null,
"size": 12,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Shell",
"has_issues": true,
"has_projects": true,
"has_downloads": true,
"has_wiki": true,
"has_pages": false,
"forks_count": 0,
"mirror_url": null,
"archived": false,
"disabled": false,
"open_issues_count": 0,
"license": {
"key": "apache-2.0",
"name": "Apache License 2.0",
"spdx_id": "Apache-2.0",
"url": "https://api.example.com/licenses/apache-2.0",
"node_id": "MDc6TGljZW5zZTI="
},
"allow_forking": true,
"is_template": false,
"topics": [
],
"visibility": "public",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master",
"permissions": {
"admin": true,
"maintain": true,
"push": true,
"triage": true,
"pull": true
}
}
]

View File

@@ -0,0 +1,46 @@
{
"login": "myuser1",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://example.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.example.com/users/octocat",
"html_url": "https://example.com/octocat",
"followers_url": "https://api.example.com/users/octocat/followers",
"following_url": "https://api.example.com/users/octocat/following{/other_user}",
"gists_url": "https://api.example.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.example.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.example.com/users/octocat/subscriptions",
"organizations_url": "https://api.example.com/users/octocat/orgs",
"repos_url": "https://api.example.com/users/octocat/repos",
"events_url": "https://api.example.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.example.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
"name": "monalisa octocat",
"company": "GitHub",
"blog": "https://example.com/blog",
"location": "San Francisco",
"email": "octocat@example.com",
"hireable": false,
"bio": "There once was...",
"twitter_username": "monatheoctocat",
"public_repos": 2,
"public_gists": 1,
"followers": 20,
"following": 0,
"created_at": "2008-01-14T04:33:35Z",
"updated_at": "2008-01-14T04:33:35Z",
"private_gists": 81,
"total_private_repos": 100,
"owned_private_repos": 100,
"disk_usage": 10000,
"collaborators": 8,
"two_factor_authentication": true,
"plan": {
"name": "Medium",
"space": 400,
"private_repos": 20,
"collaborators": 0
}
}

View File

@@ -0,0 +1,106 @@
import os.path
from app import app
from flask import Flask, request, abort, jsonify, make_response
import jinja2
def check_headers():
if request.headers.get("accept") != "application/json":
app.logger.error("Invalid accept header")
abort(500)
auth_header = request.headers.get("authorization")
if auth_header != "bearer secret-token:myauthtoken":
app.logger.error("Invalid authorization header: %s", auth_header)
abort(
make_response(
jsonify(
{
"message": "Bad credentials",
"documentation_url": "https://docs.example.com/rest",
}
),
401,
)
)
def add_pagination(response, page, last_page):
host = request.headers["host"]
link_header = ""
def args(page):
args = request.args.copy()
args["page"] = page
return "&".join([f"{k}={v}" for k, v in args.items()])
if page < last_page:
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
)
link_header += (
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
)
response.headers["link"] = link_header
def read_project_files(namespaces=[]):
last_page = 4
page = username = int(request.args.get("page", "1"))
response_file = f"./gitlab_api_page_{page}.json"
if not os.path.exists(response_file):
return jsonify([])
response = make_response(
jinja2.Template(open(response_file).read()).render(
namespace=namespaces[page - 1]
)
)
add_pagination(response, page, last_page)
response.headers["content-type"] = "application/json"
return response
def single_namespaced_projects(namespace):
return read_project_files([namespace] * 4)
def mixed_projects(namespaces):
return read_project_files(namespaces)
@app.route("/gitlab/api/v4/users/<string:user>/projects")
def gitlab_user_repos(user):
check_headers()
if user == "myuser1":
return single_namespaced_projects("myuser1")
return jsonify([])
@app.route("/gitlab/api/v4/groups/<string:group>/projects")
def gitlab_group_repos(group):
check_headers()
if not (
request.args.get("include_subgroups") == "true"
and request.args.get("archived") == "false"
):
abort(500, "wrong arguments")
if group == "mygroup1":
return single_namespaced_projects("mygroup1")
return jsonify([])
@app.route("/gitlab/api/v4/projects/")
def gitlab_own_repos():
check_headers()
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
@app.route("/gitlab/api/v4/user/")
def gitlab_user():
check_headers()
response = make_response(open("./gitlab_api_user.json").read())
response.headers["content-type"] = "application/json"
return response

View File

@@ -0,0 +1,236 @@
[
{
"id": 1,
"description": "",
"name": "myproject1",
"name_with_namespace": "{{ namespace }} / myproject1",
"path": "myproject1",
"path_with_namespace": "{{ namespace }}/myproject1",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject1.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject1.git",
"web_url": "https://example.com/{{ namespace }}/myproject1",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject1",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "private",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject1-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
},
{
"id": 2,
"description": "",
"name": "myproject2",
"name_with_namespace": "{{ namespace }} / myproject2",
"path": "myproject2",
"path_with_namespace": "{{ namespace }}/myproject2",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject2.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject2.git",
"web_url": "https://example.com/{{ namespace }}/myproject2",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject2",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject2-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,119 @@
[
{
"id": 3,
"description": "",
"name": "myproject3",
"name_with_namespace": "{{ namespace }} / myproject3",
"path": "myproject3",
"path_with_namespace": "{{ namespace }}/myproject3",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject3.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject3.git",
"web_url": "https://example.com/{{ namespace }}/myproject3",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject3",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject3-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,119 @@
[
{
"id": 4,
"description": "",
"name": "myproject4",
"name_with_namespace": "{{ namespace }} / myproject4",
"path": "myproject4",
"path_with_namespace": "{{ namespace }}/myproject4",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject4.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject4.git",
"web_url": "https://example.com/{{ namespace }}/myproject4",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject4",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject4-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,119 @@
[
{
"id": 5,
"description": "",
"name": "myproject5",
"name_with_namespace": "{{ namespace }} / myproject5",
"path": "myproject5",
"path_with_namespace": "{{ namespace }}/myproject5",
"created_at": "2020-11-26T17:23:39.904Z",
"default_branch": "master",
"tag_list": [],
"topics": [],
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject5.git",
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject5.git",
"web_url": "https://example.com/{{ namespace }}/myproject5",
"readme_url": null,
"avatar_url": null,
"forks_count": 0,
"star_count": 0,
"last_activity_at": "2020-11-26T17:23:39.904Z",
"namespace": {
"id": 3,
"name": "{{ namespace }}",
"path": "{{ namespace }}",
"kind": "group",
"full_path": "{{ namespace }}",
"parent_id": null,
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
"web_url": "https://example.com/groups/{{ namespace }}"
},
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject5",
"_links": {
"self": "https://example.com/api/v4/projects/2",
"issues": "https://example.com/api/v4/projects/2/issues",
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
"labels": "https://example.com/api/v4/projects/2/labels",
"events": "https://example.com/api/v4/projects/2/events",
"members": "https://example.com/api/v4/projects/2/members",
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
},
"packages_enabled": true,
"empty_repo": false,
"archived": false,
"visibility": "public",
"resolve_outdated_diff_discussions": false,
"container_expiration_policy": {
"cadence": "1d",
"enabled": false,
"keep_n": 10,
"older_than": "90d",
"name_regex": ".*",
"name_regex_keep": null,
"next_run_at": "2020-11-27T17:23:39.927Z"
},
"issues_enabled": true,
"merge_requests_enabled": true,
"wiki_enabled": true,
"jobs_enabled": true,
"snippets_enabled": true,
"container_registry_enabled": true,
"service_desk_enabled": true,
"service_desk_address": "contact-for-myproject5-2-issue-@incoming.example.com",
"can_create_merge_request_in": true,
"issues_access_level": "enabled",
"repository_access_level": "enabled",
"merge_requests_access_level": "enabled",
"forking_access_level": "enabled",
"wiki_access_level": "enabled",
"builds_access_level": "enabled",
"snippets_access_level": "enabled",
"pages_access_level": "private",
"operations_access_level": "enabled",
"analytics_access_level": "enabled",
"container_registry_access_level": "enabled",
"security_and_compliance_access_level": "private",
"emails_disabled": null,
"shared_runners_enabled": true,
"lfs_enabled": true,
"creator_id": 1803951,
"import_url": null,
"import_type": null,
"import_status": "none",
"open_issues_count": 0,
"ci_default_git_depth": 50,
"ci_forward_deployment_enabled": true,
"ci_job_token_scope_enabled": false,
"ci_separated_caches": true,
"public_jobs": true,
"build_timeout": 3600,
"auto_cancel_pending_pipelines": "enabled",
"build_coverage_regex": null,
"ci_config_path": "",
"shared_with_groups": [],
"only_allow_merge_if_pipeline_succeeds": false,
"allow_merge_on_skipped_pipeline": null,
"restrict_user_defined_variables": false,
"request_access_enabled": true,
"only_allow_merge_if_all_discussions_are_resolved": false,
"remove_source_branch_after_merge": true,
"printing_merge_request_link_enabled": true,
"merge_method": "merge",
"squash_option": "default_off",
"enforce_auth_checks_on_uploads": true,
"suggestion_commit_message": null,
"merge_commit_template": null,
"squash_commit_template": null,
"auto_devops_enabled": false,
"auto_devops_deploy_strategy": "continuous",
"autoclose_referenced_issues": true,
"keep_latest_artifact": true,
"runner_token_expiration_interval": null,
"external_authorization_classification_label": "",
"requirements_enabled": false,
"requirements_access_level": "enabled",
"security_and_compliance_enabled": true,
"compliance_frameworks": []
}
]

View File

@@ -0,0 +1,42 @@
{
"id": 1,
"username": "myuser1",
"name": "My User",
"state": "active",
"avatar_url": "https://example.com/avatar",
"web_url": "https://example.com/myuser1",
"created_at": "2016-12-10T10:09:11.585Z",
"bio": "",
"location": "",
"public_email": "",
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": "",
"job_title": "",
"pronouns": "",
"bot": false,
"work_information": null,
"followers": 0,
"following": 0,
"is_followed": false,
"local_time": "11:59 PM",
"last_sign_in_at": "2020-03-14T09:13:44.977Z",
"confirmed_at": "2022-05-19T23:48:47.033Z",
"last_activity_on": "2022-05-19",
"email": "myuser1@example.com",
"theme_id": null,
"color_scheme_id": 1,
"projects_limit": 100000,
"current_sign_in_at": "2022-05-19T23:45:49.661Z",
"identities": [],
"can_create_group": true,
"can_create_project": true,
"two_factor_enabled": false,
"external": false,
"private_profile": false,
"commit_email": "myuser1@example.com",
"shared_runners_minutes_limit": 2000,
"extra_shared_runners_minutes_limit": null
}

View File

@@ -0,0 +1,14 @@
FROM docker.io/debian:11.3
RUN apt-get update \
&& apt-get install -y \
python3-pytest \
python3-toml \
python3-git \
python3-yaml \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /tests
ENTRYPOINT ["/bin/sh", "-c", "--"]

View File

@@ -8,9 +8,7 @@ import hashlib
import git import git
binary = os.path.join( binary = os.environ["GRM_BINARY"]
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "target/release/grm"
)
def grm(args, cwd=None, is_invalid=False): def grm(args, cwd=None, is_invalid=False):
@@ -20,6 +18,8 @@ def grm(args, cwd=None, is_invalid=False):
print(f"grmcmd: {args}") print(f"grmcmd: {args}")
print(f"stdout:\n{cmd.stdout}") print(f"stdout:\n{cmd.stdout}")
print(f"stderr:\n{cmd.stderr}") print(f"stderr:\n{cmd.stderr}")
assert "secret-token:" not in cmd.stdout
assert "secret-token:" not in cmd.stderr
assert "panicked" not in cmd.stderr assert "panicked" not in cmd.stderr
return cmd return cmd
@@ -208,7 +208,7 @@ class RepoTree:
""" """
) )
cmd = grm(["repos", "sync", "--config", self.config.name]) cmd = grm(["repos", "sync", "config", "--config", self.config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
return (self.root.name, self.config.name, ["test", "test_worktree"]) return (self.root.name, self.config.name, ["test", "test_worktree"])

View File

@@ -1,12 +0,0 @@
attrs==21.4.0
gitdb==4.0.9
GitPython==3.1.25
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.0.6
pytest==6.2.5
smmap==5.0.0
toml==0.10.2
typing_extensions==4.0.1

View File

@@ -3,13 +3,15 @@
import tempfile import tempfile
import toml import toml
import pytest
import yaml
from helpers import * from helpers import *
def test_repos_find_nonexistent(): def test_repos_find_nonexistent():
with NonExistentPath() as nonexistent_dir: with NonExistentPath() as nonexistent_dir:
cmd = grm(["repos", "find", nonexistent_dir]) cmd = grm(["repos", "find", "local", nonexistent_dir])
assert "does not exist" in cmd.stderr.lower() assert "does not exist" in cmd.stderr.lower()
assert cmd.returncode != 0 assert cmd.returncode != 0
assert not os.path.exists(nonexistent_dir) assert not os.path.exists(nonexistent_dir)
@@ -17,19 +19,30 @@ def test_repos_find_nonexistent():
def test_repos_find_file(): def test_repos_find_file():
with tempfile.NamedTemporaryFile() as tmpfile: with tempfile.NamedTemporaryFile() as tmpfile:
cmd = grm(["repos", "find", tmpfile.name]) cmd = grm(["repos", "find", "local", tmpfile.name])
assert "not a directory" in cmd.stderr.lower() assert "not a directory" in cmd.stderr.lower()
assert cmd.returncode != 0 assert cmd.returncode != 0
def test_repos_find_empty(): def test_repos_find_empty():
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
cmd = grm(["repos", "find", tmpdir]) cmd = grm(["repos", "find", "local", tmpdir])
assert cmd.returncode == 0 assert cmd.returncode == 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert len(cmd.stderr) != 0 assert len(cmd.stderr) != 0
def test_repos_find_invalid_format():
with tempfile.TemporaryDirectory() as tmpdir:
cmd = grm(
["repos", "find", "local", tmpdir, "--format", "invalidformat"],
is_invalid=True,
)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "isn't a valid value" in cmd.stderr
def test_repos_find_non_git_repos(): def test_repos_find_non_git_repos():
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
shell( shell(
@@ -43,14 +56,16 @@ def test_repos_find_non_git_repos():
""" """
) )
cmd = grm(["repos", "find", tmpdir]) cmd = grm(["repos", "find", "local", tmpdir])
assert cmd.returncode == 0 assert cmd.returncode == 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert len(cmd.stderr) != 0 assert len(cmd.stderr) != 0
def test_repos_find(): @pytest.mark.parametrize("default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_find(configtype, default):
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
shell( shell(
f""" f"""
@@ -69,7 +84,7 @@ def test_repos_find():
( (
cd ./repo2 cd ./repo2
git init git init
git co -b main git checkout -b main
echo test > test echo test > test
git add test git add test
git commit -m "commit1" git commit -m "commit1"
@@ -83,11 +98,19 @@ def test_repos_find():
""" """
) )
cmd = grm(["repos", "find", tmpdir]) args = ["repos", "find", "local", tmpdir]
if not default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert len(cmd.stderr) == 0 assert len(cmd.stderr) == 0
if default or configtype == "toml":
output = toml.loads(cmd.stdout) output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict) assert isinstance(output, dict)
assert set(output.keys()) == {"trees"} assert set(output.keys()) == {"trees"}
@@ -125,14 +148,24 @@ def test_repos_find():
assert origin["url"] == "https://example.com/repo2.git" assert origin["url"] == "https://example.com/repo2.git"
def test_repos_find_in_root(): @pytest.mark.parametrize("default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_find_in_root(configtype, default):
with TempGitRepository() as repo_dir: with TempGitRepository() as repo_dir:
cmd = grm(["repos", "find", repo_dir]) args = ["repos", "find", "local", repo_dir]
if not default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert len(cmd.stderr) == 0 assert len(cmd.stderr) == 0
if default or configtype == "toml":
output = toml.loads(cmd.stdout) output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict) assert isinstance(output, dict)
assert set(output.keys()) == {"trees"} assert set(output.keys()) == {"trees"}
@@ -160,7 +193,9 @@ def test_repos_find_in_root():
assert someremote["type"] == "file" assert someremote["type"] == "file"
def test_repos_find_with_invalid_repo(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("default", [True, False])
def test_repos_find_with_invalid_repo(configtype, default):
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
shell( shell(
f""" f"""
@@ -179,7 +214,7 @@ def test_repos_find_with_invalid_repo():
( (
cd ./repo2 cd ./repo2
git init git init
git co -b main git checkout -b main
echo test > test echo test > test
git add test git add test
git commit -m "commit1" git commit -m "commit1"
@@ -193,11 +228,19 @@ def test_repos_find_with_invalid_repo():
""" """
) )
cmd = grm(["repos", "find", tmpdir]) args = ["repos", "find", "local", tmpdir]
if not default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert "broken" in cmd.stderr assert "broken" in cmd.stderr
if default or configtype == "toml":
output = toml.loads(cmd.stdout) output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict) assert isinstance(output, dict)
assert set(output.keys()) == {"trees"} assert set(output.keys()) == {"trees"}

View File

@@ -0,0 +1,950 @@
#!/usr/bin/env python3
import re
import os
import toml
import pytest
import yaml
from helpers import *
ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"]
PROVIDERS = ["github", "gitlab"]
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_invalid_provider(use_config):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
"""
provider = "thisproviderdoesnotexist"
token_command = "true"
root = "/"
"""
)
args = ["repos", "find", "config", "--config", config.name]
cmd = grm(args, is_invalid=True)
else:
args = [
"repos",
"find",
"remote",
"--provider",
"thisproviderdoesnotexist",
"--token-command",
"true",
"--root",
"/",
]
cmd = grm(args, is_invalid=True)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
if not use_config:
assert re.match(".*isn't a valid value for.*provider", cmd.stderr)
@pytest.mark.parametrize("provider", PROVIDERS)
def test_repos_find_remote_invalid_format(provider):
cmd = grm(
[
"repos",
"find",
"remote",
"--provider",
provider,
"--format",
"invalidformat",
"--token-command",
"true",
"--root",
"/myroot",
],
is_invalid=True,
)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "isn't a valid value" in cmd.stderr
@pytest.mark.parametrize("provider", PROVIDERS)
def test_repos_find_remote_token_command_failed(provider):
cmd = grm(
[
"repos",
"find",
"remote",
"--provider",
provider,
"--format",
"yaml",
"--token-command",
"false",
"--root",
"/myroot",
],
is_invalid=True,
)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "token command failed" in cmd.stderr.lower()
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_wrong_token(provider, use_config):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
f"""
provider = "{provider}"
token_command = "echo wrongtoken"
root = "/myroot"
[filters]
access = true
"""
)
args = ["repos", "find", "config", "--config", config.name]
cmd = grm(args, is_invalid=True)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo wrongtoken",
"--root",
"/myroot",
"--access",
]
cmd = grm(args, is_invalid=True)
assert cmd.returncode != 0
assert len(cmd.stdout) == 0
assert "bad credentials" in cmd.stderr.lower()
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
)
args = ["repos", "find", "config", "--config", config.name]
if not default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
]
if not default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 0
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_user_empty(
provider, configtype, configtype_default, use_config
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
[filters]
users = ["someotheruser"]
"""
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--user",
"someotheruser",
]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 0
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [True, False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("use_owner", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_user(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
use_owner,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_owner:
cfg += """
[filters]
owner = true\n
"""
else:
cfg += """
[filters]
users = ["myuser1"]\n
"""
print(cfg)
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
]
if use_owner:
args += ["--owner"]
else:
args += ["--user", "myuser1"]
if force_ssh:
args += ["--force-ssh"]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 1
assert set(output["trees"][0].keys()) == {"root", "repos"}
assert isinstance(output["trees"][0]["repos"], list)
assert len(output["trees"][0]["repos"]) == 5
for i in range(1, 6):
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
0
]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_group_empty(
provider, configtype, configtype_default, use_alternate_endpoint, use_config
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
groups = ["someothergroup"]\n
"""
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
"someothergroup",
]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 0
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_group(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
groups = ["mygroup1"]\n
"""
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
"mygroup1",
]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if force_ssh:
args += ["--force-ssh"]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 1
assert set(output["trees"][0].keys()) == {"root", "repos"}
assert isinstance(output["trees"][0]["repos"], list)
assert len(output["trees"][0]["repos"]) == 5
for i in range(1, 6):
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
0
]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
if force_ssh or i == 1:
assert repo["remotes"][0]["name"] == provider
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert repo["remotes"][0]["name"] == provider
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("use_owner", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_user_and_group(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
use_owner,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
groups = ["mygroup1"]\n
"""
if use_owner:
cfg += "owner = true\n"
else:
cfg += 'users = ["myuser1"]\n'
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--group",
"mygroup1",
]
if use_owner:
args += ["--owner"]
else:
args += ["--user", "myuser1"]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if force_ssh:
args += ["--force-ssh"]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 2
user_namespace = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
assert set(user_namespace.keys()) == {"root", "repos"}
assert isinstance(user_namespace["repos"], list)
assert len(user_namespace["repos"]) == 5
for i in range(1, 6):
repo = [r for r in user_namespace["repos"] if r["name"] == f"myproject{i}"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
group_namespace = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][0]
assert set(group_namespace.keys()) == {"root", "repos"}
assert isinstance(group_namespace["repos"], list)
assert len(group_namespace["repos"]) == 5
for i in range(1, 6):
repo = [r for r in group_namespace["repos"] if r["name"] == f"myproject{i}"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
@pytest.mark.parametrize("provider", PROVIDERS)
@pytest.mark.parametrize("configtype_default", [False])
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("worktree_default", [True, False])
@pytest.mark.parametrize("worktree", [True, False])
@pytest.mark.parametrize("with_user_filter", [True, False])
@pytest.mark.parametrize("with_group_filter", [True, False])
@pytest.mark.parametrize("force_ssh", [True, False])
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
@pytest.mark.parametrize("use_config", [True, False])
def test_repos_find_remote_owner(
provider,
configtype,
configtype_default,
worktree,
worktree_default,
with_user_filter,
with_group_filter,
force_ssh,
use_alternate_endpoint,
use_config,
):
if use_config:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
cfg = f"""
provider = "{provider}"
token_command = "echo secret-token:myauthtoken"
root = "/myroot"
"""
if not worktree_default:
cfg += f"worktree = {str(worktree).lower()}\n"
if force_ssh:
cfg += f"force_ssh = true\n"
if use_alternate_endpoint:
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
cfg += """
[filters]
access = true\n
"""
if with_user_filter:
cfg += 'users = ["myuser1"]\n'
if with_group_filter:
cfg += 'groups = ["mygroup1"]\n'
f.write(cfg)
args = ["repos", "find", "config", "--config", config.name]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
else:
args = [
"repos",
"find",
"remote",
"--provider",
provider,
"--token-command",
"echo secret-token:myauthtoken",
"--root",
"/myroot",
"--access",
]
if not worktree_default:
args += ["--worktree", str(worktree).lower()]
if with_user_filter:
args += ["--user", "myuser1"]
if with_group_filter:
args += ["--group", "mygroup1"]
if force_ssh:
args += ["--force-ssh"]
if use_alternate_endpoint:
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
if not configtype_default:
args += ["--format", configtype]
cmd = grm(args)
if use_alternate_endpoint and provider == "github":
assert cmd.returncode != 0
assert "overriding is not supported for github" in cmd.stderr.lower()
return
assert cmd.returncode == 0
assert len(cmd.stderr) == 0
if configtype_default or configtype == "toml":
output = toml.loads(cmd.stdout)
elif configtype == "yaml":
output = yaml.safe_load(cmd.stdout)
else:
raise NotImplementedError()
assert isinstance(output, dict)
assert set(output.keys()) == {"trees"}
assert isinstance(output["trees"], list)
assert len(output["trees"]) == 4
user_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
assert set(user_namespace_1.keys()) == {"root", "repos"}
assert isinstance(user_namespace_1["repos"], list)
if with_user_filter:
assert len(user_namespace_1["repos"]) == 5
for i in range(1, 6):
repo = [
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
else:
assert len(user_namespace_1["repos"]) == 2
for i in range(1, 3):
repo = [
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/myuser1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
user_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/myuser2"][0]
assert set(user_namespace_2.keys()) == {"root", "repos"}
assert isinstance(user_namespace_2["repos"], list)
assert len(user_namespace_2["repos"]) == 1
repo = user_namespace_2["repos"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh:
assert (
repo["remotes"][0]["url"] == f"ssh://git@example.com/myuser2/myproject3.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"] == f"https://example.com/myuser2/myproject3.git"
)
assert repo["remotes"][0]["type"] == "https"
group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][
0
]
assert set(group_namespace_1.keys()) == {"root", "repos"}
assert isinstance(group_namespace_1["repos"], list)
if with_group_filter:
assert len(group_namespace_1["repos"]) == 5
for i in range(1, 6):
repo = [
r for r in group_namespace_1["repos"] if r["name"] == f"myproject{i}"
][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh or i == 1:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject{i}.git"
)
assert repo["remotes"][0]["type"] == "https"
else:
assert len(group_namespace_1["repos"]) == 1
repo = group_namespace_1["repos"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup1/myproject4.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"]
== f"https://example.com/mygroup1/myproject4.git"
)
assert repo["remotes"][0]["type"] == "https"
group_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup2"][
0
]
assert set(group_namespace_2.keys()) == {"root", "repos"}
assert isinstance(group_namespace_2["repos"], list)
assert len(group_namespace_2["repos"]) == 1
repo = group_namespace_2["repos"][0]
assert repo["worktree_setup"] is (not worktree_default and worktree)
assert isinstance(repo["remotes"], list)
assert len(repo["remotes"]) == 1
assert repo["remotes"][0]["name"] == provider
if force_ssh:
assert (
repo["remotes"][0]["url"]
== f"ssh://git@example.com/mygroup2/myproject5.git"
)
assert repo["remotes"][0]["type"] == "ssh"
else:
assert (
repo["remotes"][0]["url"] == f"https://example.com/mygroup2/myproject5.git"
)
assert repo["remotes"][0]["type"] == "https"

View File

@@ -2,6 +2,7 @@
import tempfile import tempfile
import re import re
import textwrap
import pytest import pytest
import toml import toml
@@ -9,8 +10,204 @@ import git
from helpers import * from helpers import *
templates = {
"repo_simple": {
"toml": """
[[trees]]
root = "{root}"
def test_repos_sync_config_is_valid_symlink(): [[trees.repos]]
name = "test"
""",
"yaml": """
trees:
- root: "{root}"
repos:
- name: "test"
""",
},
"repo_with_remote": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "{remotename}"
url = "file://{remote}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: test
remotes:
- name: "{remotename}"
url: "file://{remote}"
type: "file"
"""
),
},
"repo_with_two_remotes": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote2}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: "test"
remotes:
- name: "origin"
url: "file://{remote1}"
type: "file"
- name: "origin2"
url: "file://{remote2}"
type: "file"
"""
),
},
"worktree_repo_simple": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "test"
worktree_setup = true
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: test
worktree_setup: true
"""
),
},
"worktree_repo_with_remote": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "test"
worktree_setup = true
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: test
worktree_setup: true
remotes:
- name: origin
url: "file://{remote}"
type: "file"
"""
),
},
"repo_in_subdirectory": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "outer/inner"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: outer/inner
remotes:
- name: origin
url: "file://{remote}"
type: "file"
"""
),
},
"nested_trees": {
"toml": """
[[trees]]
root = "{root}"
[[trees.repos]]
name = "outer"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees]]
root = "{root}/subdir"
[[trees.repos]]
name = "inner"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote2}"
type = "file"
""",
"yaml": textwrap.dedent(
"""
trees:
- root: "{root}"
repos:
- name: outer
remotes:
- name: origin
url: "file://{remote1}"
type: "file"
- root: "{root}/subdir"
repos:
- name: inner
remotes:
- name: origin
url: "file://{remote2}"
type: "file"
"""
),
},
}
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_config_is_valid_symlink(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote, head_commit_sha): with TempGitFileRemote() as (remote, head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
@@ -20,21 +217,14 @@ def test_repos_sync_config_is_valid_symlink():
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config_symlink]) subprocess.run(["cat", config.name])
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -54,7 +244,7 @@ def test_repos_sync_config_is_invalid_symlink():
config_symlink = os.path.join(config_dir, "cfglink") config_symlink = os.path.join(config_dir, "cfglink")
os.symlink(nonexistent_dir, config_symlink) os.symlink(nonexistent_dir, config_symlink)
cmd = grm(["repos", "sync", "--config", config_symlink]) cmd = grm(["repos", "sync", "config", "--config", config_symlink])
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
@@ -65,7 +255,7 @@ def test_repos_sync_config_is_invalid_symlink():
def test_repos_sync_config_is_directory(): def test_repos_sync_config_is_directory():
with tempfile.TemporaryDirectory() as config: with tempfile.TemporaryDirectory() as config:
cmd = grm(["repos", "sync", "--config", config]) cmd = grm(["repos", "sync", "config", "--config", config])
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
@@ -77,30 +267,22 @@ def test_repos_sync_config_is_unreadable():
config_path = os.path.join(config_dir, "cfg") config_path = os.path.join(config_dir, "cfg")
open(config_path, "w") open(config_path, "w")
os.chmod(config_path, 0o0000) os.chmod(config_path, 0o0000)
cmd = grm(["repos", "sync", "--config", config_path]) cmd = grm(["repos", "sync", "config", "--config", config_path])
assert os.path.exists(config_path) assert os.path.exists(config_path)
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert "permission denied" in cmd.stderr.lower()
def test_repos_sync_unmanaged_repos(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_unmanaged_repos(configtype):
with tempfile.TemporaryDirectory() as root: with tempfile.TemporaryDirectory() as root:
with TempGitRepository(dir=root) as unmanaged_repo: with TempGitRepository(dir=root) as unmanaged_repo:
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(templates["repo_simple"][configtype].format(root=root))
f"""
[[trees]]
root = "{root}"
[[trees.repos]] cmd = grm(["repos", "sync", "config", "--config", config.name])
name = "test"
"""
)
cmd = grm(["repos", "sync", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(root, "test") git_dir = os.path.join(root, "test")
@@ -112,53 +294,33 @@ def test_repos_sync_unmanaged_repos():
assert any([re.match(regex, l) for l in cmd.stderr.lower().split("\n")]) assert any([re.match(regex, l) for l in cmd.stderr.lower().split("\n")])
def test_repos_sync_root_is_file(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_root_is_file(configtype):
with tempfile.NamedTemporaryFile() as target: with tempfile.NamedTemporaryFile() as target:
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(templates["repo_simple"][configtype].format(root=target.name))
f"""
[[trees]]
root = "{target.name}"
[[trees.repos]] cmd = grm(["repos", "sync", "config", "--config", config.name])
name = "test"
"""
)
cmd = grm(["repos", "sync", "--config", config.name])
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert "not a directory" in cmd.stderr.lower() assert "not a directory" in cmd.stderr.lower()
def test_repos_sync_normal_clone(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_clone(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_two_remotes"][configtype].format(
[[trees]] root=target, remote1=remote1, remote2=remote2
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote2}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -183,21 +345,92 @@ def test_repos_sync_normal_clone():
assert urls[0] == f"file://{remote2}" assert urls[0] == f"file://{remote2}"
def test_repos_sync_normal_init(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_repo_in_subdirectory(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote, remote_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_in_subdirectory"][configtype].format(
[[trees]] root=target, remote=remote
root = "{target}" )
[[trees.repos]]
name = "test"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
git_dir = os.path.join(target, "outer", "inner")
assert os.path.exists(git_dir)
with git.Repo(git_dir) as repo:
assert not repo.bare
assert not repo.is_dirty()
assert set([str(r) for r in repo.remotes]) == {"origin"}
assert str(repo.active_branch) == "master"
assert str(repo.head.commit) == remote_head_commit_sha
assert len(repo.remotes) == 1
urls = list(repo.remote("origin").urls)
assert len(urls) == 1
assert urls[0] == f"file://{remote}"
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert not "found unmanaged repository" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_nested_clone(configtype):
with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(
templates["nested_trees"][configtype].format(
root=target, remote1=remote1, remote2=remote2
)
)
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0
def validate(git_dir, sha, remote):
assert os.path.exists(git_dir)
with git.Repo(git_dir) as repo:
assert not repo.bare
assert not repo.is_dirty()
assert set([str(r) for r in repo.remotes]) == {"origin"}
assert str(repo.active_branch) == "master"
assert str(repo.head.commit) == sha
assert len(repo.remotes) == 1
urls = list(repo.remote("origin").urls)
assert len(urls) == 1
assert urls[0] == f"file://{remote}"
validate(
os.path.join(target, "outer"), remote1_head_commit_sha, remote1
)
validate(
os.path.join(target, "subdir", "inner"),
remote2_head_commit_sha,
remote2,
)
cmd = grm(["repos", "sync", "config", "--config", config.name])
print(cmd.stdout)
print(cmd.stderr)
assert not "found unmanaged repository" in cmd.stderr.lower()
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_init(configtype):
with tempfile.TemporaryDirectory() as target:
with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f:
f.write(templates["repo_simple"][configtype].format(root=target))
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -210,28 +443,20 @@ def test_repos_sync_normal_init():
assert not repo.head.is_valid() assert not repo.head.is_valid()
def test_repos_sync_normal_add_remote(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_add_remote(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -246,26 +471,12 @@ def test_repos_sync_normal_add_remote():
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_two_remotes"][configtype].format(
[[trees]] root=target, remote1=remote1, remote2=remote2
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote2}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
with git.Repo(git_dir) as repo: with git.Repo(git_dir) as repo:
assert set([str(r) for r in repo.remotes]) == { assert set([str(r) for r in repo.remotes]) == {
@@ -282,33 +493,20 @@ def test_repos_sync_normal_add_remote():
assert urls[0] == f"file://{remote2}" assert urls[0] == f"file://{remote2}"
def test_repos_sync_normal_remove_remote(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_remove_remote(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_two_remotes"][configtype].format(
[[trees]] root=target, remote1=remote1, remote2=remote2
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote2}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -326,21 +524,12 @@ def test_repos_sync_normal_remove_remote():
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote2, remotename="origin2"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote2}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
shell(f"cd {git_dir} && git remote -v") shell(f"cd {git_dir} && git remote -v")
with git.Repo(git_dir) as repo: with git.Repo(git_dir) as repo:
@@ -369,28 +558,20 @@ def test_repos_sync_normal_remove_remote():
assert urls[0] == f"file://{remote2}" assert urls[0] == f"file://{remote2}"
def test_repos_sync_normal_change_remote_url(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_change_remote_url(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -405,21 +586,12 @@ def test_repos_sync_normal_change_remote_url():
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote2, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote2}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
with git.Repo(git_dir) as repo: with git.Repo(git_dir) as repo:
assert set([str(r) for r in repo.remotes]) == {"origin"} assert set([str(r) for r in repo.remotes]) == {"origin"}
@@ -429,28 +601,20 @@ def test_repos_sync_normal_change_remote_url():
assert urls[0] == f"file://{remote2}" assert urls[0] == f"file://{remote2}"
def test_repos_sync_normal_change_remote_name(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_change_remote_name(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
@@ -465,21 +629,12 @@ def test_repos_sync_normal_change_remote_name():
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin2"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
with git.Repo(git_dir) as repo: with git.Repo(git_dir) as repo:
# See the note in `test_repos_sync_normal_remove_remote()` # See the note in `test_repos_sync_normal_remove_remote()`
@@ -492,34 +647,41 @@ def test_repos_sync_normal_change_remote_name():
assert urls[0] == f"file://{remote1}" assert urls[0] == f"file://{remote1}"
def test_repos_sync_worktree_clone(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
@pytest.mark.parametrize("init_worktree", [True, False, "default"])
def test_repos_sync_worktree_clone(configtype, init_worktree):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote, head_commit_sha): with TempGitFileRemote() as (remote, head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["worktree_repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
worktree_setup = true
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) args = ["repos", "sync", "config", "--config", config.name]
if init_worktree is True:
args.append("--init-worktree=true")
if init_worktree is False:
args.append("--init-worktree=false")
for i in [1, 2]:
cmd = grm(args)
assert cmd.returncode == 0 assert cmd.returncode == 0
worktree_dir = f"{target}/test" worktree_dir = f"{target}/test"
assert os.path.exists(worktree_dir) assert os.path.exists(worktree_dir)
assert set(os.listdir(worktree_dir)) == {".git-main-working-tree"} if init_worktree is True or init_worktree == "default":
assert set(os.listdir(worktree_dir)) == {
".git-main-working-tree",
"master",
}
else:
assert set(os.listdir(worktree_dir)) == {
".git-main-working-tree"
}
with git.Repo( with git.Repo(
os.path.join(worktree_dir, ".git-main-working-tree") os.path.join(worktree_dir, ".git-main-working-tree")
@@ -530,22 +692,16 @@ def test_repos_sync_worktree_clone():
assert str(repo.head.commit) == head_commit_sha assert str(repo.head.commit) == head_commit_sha
def test_repos_sync_worktree_init(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_worktree_init(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["worktree_repo_simple"][configtype].format(root=target)
[[trees]]
root = "{target}"
[[trees.repos]]
name = "test"
worktree_setup = true
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
worktree_dir = f"{target}/test" worktree_dir = f"{target}/test"
@@ -559,149 +715,112 @@ def test_repos_sync_worktree_init():
assert not repo.head.is_valid() assert not repo.head.is_valid()
def test_repos_sync_invalid_toml(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_invalid_syntax(configtype):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
if configtype == "toml":
f.write( f.write(
f""" f"""
[[trees]] [[trees]]
root = invalid as there are no quotes ;) root = invalid as there are no quotes ;)
""" """
) )
cmd = grm(["repos", "sync", "--config", config.name]) elif configtype == "yaml":
f.write(
f"""
trees:
wrong:
indentation:
"""
)
else:
raise NotImplementedError()
cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0 assert cmd.returncode != 0
def test_repos_sync_unchanged(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_unchanged(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_two_remotes"][configtype].format(
[[trees]] root=target, remote1=remote1, remote2=remote2
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
[[trees.repos.remotes]]
name = "origin2"
url = "file://{remote2}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
before = checksum_directory(target) before = checksum_directory(target)
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
after = checksum_directory(target) after = checksum_directory(target)
assert cmd.returncode == 0 assert cmd.returncode == 0
assert before == after assert before == after
def test_repos_sync_normal_change_to_worktree(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_normal_change_to_worktree(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["worktree_repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
worktree_setup = true
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0 assert cmd.returncode != 0
assert "already exists" in cmd.stderr assert "already exists" in cmd.stderr
assert "not using a worktree setup" in cmd.stderr assert "not using a worktree setup" in cmd.stderr
def test_repos_sync_worktree_change_to_normal(): @pytest.mark.parametrize("configtype", ["toml", "yaml"])
def test_repos_sync_worktree_change_to_normal(configtype):
with tempfile.TemporaryDirectory() as target: with tempfile.TemporaryDirectory() as target:
with TempGitFileRemote() as (remote1, remote1_head_commit_sha): with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
with TempGitFileRemote() as (remote2, remote2_head_commit_sha): with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
with tempfile.NamedTemporaryFile() as config: with tempfile.NamedTemporaryFile() as config:
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["worktree_repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
worktree_setup = true
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode == 0 assert cmd.returncode == 0
git_dir = os.path.join(target, "test") git_dir = os.path.join(target, "test")
with open(config.name, "w") as f: with open(config.name, "w") as f:
f.write( f.write(
f""" templates["repo_with_remote"][configtype].format(
[[trees]] root=target, remote=remote1, remotename="origin"
root = "{target}" )
[[trees.repos]]
name = "test"
[[trees.repos.remotes]]
name = "origin"
url = "file://{remote1}"
type = "file"
"""
) )
cmd = grm(["repos", "sync", "--config", config.name]) cmd = grm(["repos", "sync", "config", "--config", config.name])
assert cmd.returncode != 0 assert cmd.returncode != 0
assert "already exists" in cmd.stderr assert "already exists" in cmd.stderr
assert "using a worktree setup" in cmd.stderr assert "using a worktree setup" in cmd.stderr

View File

@@ -2,6 +2,8 @@
from helpers import * from helpers import *
import re
import pytest import pytest
import git import git
@@ -51,7 +53,9 @@ def test_worktree_fetch():
@pytest.mark.parametrize("rebase", [True, False]) @pytest.mark.parametrize("rebase", [True, False])
@pytest.mark.parametrize("ffable", [True, False]) @pytest.mark.parametrize("ffable", [True, False])
def test_worktree_pull(rebase, ffable): @pytest.mark.parametrize("has_changes", [True, False])
@pytest.mark.parametrize("stash", [True, False])
def test_worktree_pull(rebase, ffable, has_changes, stash):
with TempGitRepositoryWorktree() as (base_dir, root_commit): with TempGitRepositoryWorktree() as (base_dir, root_commit):
with TempGitFileRemote() as (remote_path, _remote_sha): with TempGitFileRemote() as (remote_path, _remote_sha):
shell( shell(
@@ -94,20 +98,40 @@ def test_worktree_pull(rebase, ffable):
""" """
) )
if has_changes:
shell(
f"""
cd {base_dir}/master
echo change >> root-commit-in-worktree-1
echo uncommitedchange > uncommitedchange
"""
)
args = ["wt", "pull"] args = ["wt", "pull"]
if rebase: if rebase:
args += ["--rebase"] args += ["--rebase"]
if stash:
args += ["--stash"]
cmd = grm(args, cwd=base_dir) cmd = grm(args, cwd=base_dir)
assert cmd.returncode == 0 if has_changes and not stash:
assert cmd.returncode != 0
assert re.match(r".*master.*contains changes.*", cmd.stderr)
else:
assert repo.commit("upstream/master").hexsha == remote_commit assert repo.commit("upstream/master").hexsha == remote_commit
assert repo.commit("origin/master").hexsha == root_commit assert repo.commit("origin/master").hexsha == root_commit
assert ( assert (
repo.commit("master").hexsha != repo.commit("origin/master").hexsha repo.commit("master").hexsha
!= repo.commit("origin/master").hexsha
) )
if has_changes:
assert ["uncommitedchange"] == repo.untracked_files
assert repo.is_dirty()
else:
assert not repo.is_dirty()
if not rebase: if not rebase:
if ffable: if ffable:
assert cmd.returncode == 0
assert ( assert (
repo.commit("master").hexsha repo.commit("master").hexsha
!= repo.commit("origin/master").hexsha != repo.commit("origin/master").hexsha
@@ -116,16 +140,22 @@ def test_worktree_pull(rebase, ffable):
repo.commit("master").hexsha repo.commit("master").hexsha
== repo.commit("upstream/master").hexsha == repo.commit("upstream/master").hexsha
) )
assert repo.commit("upstream/master").hexsha == remote_commit assert (
repo.commit("upstream/master").hexsha == remote_commit
)
else: else:
assert cmd.returncode != 0
assert "cannot be fast forwarded" in cmd.stderr assert "cannot be fast forwarded" in cmd.stderr
assert ( assert (
repo.commit("master").hexsha repo.commit("master").hexsha
!= repo.commit("origin/master").hexsha != repo.commit("origin/master").hexsha
) )
assert repo.commit("master").hexsha != remote_commit assert repo.commit("master").hexsha != remote_commit
assert repo.commit("upstream/master").hexsha == remote_commit assert (
repo.commit("upstream/master").hexsha == remote_commit
)
else: else:
assert cmd.returncode == 0
if ffable: if ffable:
assert ( assert (
repo.commit("master").hexsha repo.commit("master").hexsha
@@ -135,7 +165,9 @@ def test_worktree_pull(rebase, ffable):
repo.commit("master").hexsha repo.commit("master").hexsha
== repo.commit("upstream/master").hexsha == repo.commit("upstream/master").hexsha
) )
assert repo.commit("upstream/master").hexsha == remote_commit assert (
repo.commit("upstream/master").hexsha == remote_commit
)
else: else:
assert ( assert (
repo.commit("master").message.strip() repo.commit("master").message.strip()

View File

@@ -2,15 +2,18 @@
from helpers import * from helpers import *
import pytest import re
import pytest
import git import git
@pytest.mark.parametrize("pull", [True, False]) @pytest.mark.parametrize("pull", [True, False])
@pytest.mark.parametrize("rebase", [True, False]) @pytest.mark.parametrize("rebase", [True, False])
@pytest.mark.parametrize("ffable", [True, False]) @pytest.mark.parametrize("ffable", [True, False])
def test_worktree_rebase(pull, rebase, ffable): @pytest.mark.parametrize("has_changes", [True, False])
@pytest.mark.parametrize("stash", [True, False])
def test_worktree_rebase(pull, rebase, ffable, has_changes, stash):
with TempGitRepositoryWorktree() as (base_dir, _root_commit): with TempGitRepositoryWorktree() as (base_dir, _root_commit):
with open(os.path.join(base_dir, "grm.toml"), "w") as f: with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write('persistent_branches = ["mybasebranch"]') f.write('persistent_branches = ["mybasebranch"]')
@@ -83,6 +86,14 @@ def test_worktree_rebase(pull, rebase, ffable):
""" """
) )
if has_changes:
shell(
f"""
cd {base_dir}/myfeatbranch
echo uncommitedchange > uncommitedchange
"""
)
grm(["wt", "delete", "--force", "tmp"], cwd=base_dir) grm(["wt", "delete", "--force", "tmp"], cwd=base_dir)
repo = git.Repo(f"{base_dir}/.git-main-working-tree") repo = git.Repo(f"{base_dir}/.git-main-working-tree")
@@ -133,17 +144,23 @@ def test_worktree_rebase(pull, rebase, ffable):
args += ["--pull"] args += ["--pull"]
if rebase: if rebase:
args += ["--rebase"] args += ["--rebase"]
if stash:
args += ["--stash"]
cmd = grm(args, cwd=base_dir) cmd = grm(args, cwd=base_dir)
print(args)
if rebase and not pull: if rebase and not pull:
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stderr) != 0 assert len(cmd.stderr) != 0
elif has_changes and not stash:
assert cmd.returncode != 0
assert re.match(r".*myfeatbranch.*contains changes.*", cmd.stderr)
else: else:
assert cmd.returncode == 0
repo = git.Repo(f"{base_dir}/myfeatbranch") repo = git.Repo(f"{base_dir}/myfeatbranch")
if has_changes:
assert ["uncommitedchange"] == repo.untracked_files
if pull: if pull:
if rebase: if rebase:
assert cmd.returncode == 0
if ffable: if ffable:
assert ( assert (
repo.commit("HEAD").message.strip() repo.commit("HEAD").message.strip()
@@ -190,6 +207,7 @@ def test_worktree_rebase(pull, rebase, ffable):
assert repo.commit("HEAD~6").message.strip() == "commit-root" assert repo.commit("HEAD~6").message.strip() == "commit-root"
else: else:
if ffable: if ffable:
assert cmd.returncode == 0
assert ( assert (
repo.commit("HEAD").message.strip() repo.commit("HEAD").message.strip()
== "commit-in-feat-remote" == "commit-in-feat-remote"
@@ -208,6 +226,7 @@ def test_worktree_rebase(pull, rebase, ffable):
) )
assert repo.commit("HEAD~4").message.strip() == "commit-root" assert repo.commit("HEAD~4").message.strip() == "commit-root"
else: else:
assert cmd.returncode != 0
assert ( assert (
repo.commit("HEAD").message.strip() repo.commit("HEAD").message.strip()
== "commit-in-feat-local-no-ff" == "commit-in-feat-local-no-ff"
@@ -226,6 +245,7 @@ def test_worktree_rebase(pull, rebase, ffable):
) )
assert repo.commit("HEAD~4").message.strip() == "commit-root" assert repo.commit("HEAD~4").message.strip() == "commit-root"
else: else:
assert cmd.returncode == 0
if ffable: if ffable:
assert repo.commit("HEAD").message.strip() == "commit-in-feat-local" assert repo.commit("HEAD").message.strip() == "commit-in-feat-local"
assert ( assert (

View File

@@ -1,10 +1,18 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import re
from helpers import * from helpers import *
import pytest
def test_worktree_status():
@pytest.mark.parametrize("has_config", [True, False])
def test_worktree_status(has_config):
with TempGitRepositoryWorktree() as (base_dir, _commit): with TempGitRepositoryWorktree() as (base_dir, _commit):
if has_config:
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
f.write("")
cmd = grm(["wt", "add", "test"], cwd=base_dir) cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0 assert cmd.returncode == 0
@@ -40,3 +48,30 @@ def test_worktree_status_non_git():
assert cmd.returncode != 0 assert cmd.returncode != 0
assert len(cmd.stdout) == 0 assert len(cmd.stdout) == 0
assert len(cmd.stderr) != 0 assert len(cmd.stderr) != 0
def test_worktree_status_warn_with_non_worktree_dir():
with TempGitRepositoryWorktree() as (base_dir, _commit):
cmd = grm(["wt", "add", "test"], cwd=base_dir)
assert cmd.returncode == 0
shell(
f"""
cd {base_dir}
mkdir not_a_worktree
"""
)
cmd = grm(["wt", "status"], cwd=base_dir)
assert cmd.returncode == 0
assert len(cmd.stdout) != 0
assert len(cmd.stderr) != 0
assert (
re.match(
".*error.*not_a_worktree.*not a valid worktree directory",
cmd.stderr,
re.IGNORECASE,
)
is not None
)

View File

@@ -1,18 +0,0 @@
#!/usr/bin/env bash
set -o nounset
set -o errexit
# shellcheck disable=SC1091
source ./venv/bin/activate
pip --disable-pip-version-check install -r ./requirements.txt
pip3 list --outdated --format=freeze | grep -v '^\-e' | cut -d = -f 1 | while read -r package ; do
pip install --upgrade "${package}"
version="$(pip show "${package}" | grep '^Version' | cut -d ' ' -f 2)"
message="e2e_tests/pip: Update ${package} to ${version}"
pip freeze | grep -v '^pkg_resources' > requirements.txt
git add ./requirements.txt
git commit --message "${message}"
done

16
example.config.yaml Normal file
View File

@@ -0,0 +1,16 @@
trees:
- root: "~/example-projects/"
repos:
- name: "git-repo-manager"
remotes:
- name: "origin"
url: "https://code.hkoerber.de/hannes/git-repo-manager.git"
type: "https"
- name: "github"
url: "https://github.com/hakoerber/git-repo-manager.git"
type: "https"
- name: "dotfiles"
remotes:
- name: "origin"
url: "https://github.com/hakoerber/dotfiles.git"
type: "https"

36
src/auth.rs Normal file
View File

@@ -0,0 +1,36 @@
use std::process;
pub fn get_token_from_command(command: &str) -> Result<String, String> {
let output = process::Command::new("/usr/bin/env")
.arg("sh")
.arg("-c")
.arg(command)
.output()
.map_err(|error| format!("Failed to run token-command: {}", error))?;
let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?;
let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?;
if !output.status.success() {
if !stderr.is_empty() {
return Err(format!("Token command failed: {}", stderr));
} else {
return Err(String::from("Token command failed."));
}
}
if !stderr.is_empty() {
return Err(format!("Token command produced stderr: {}", stderr));
}
if stdout.is_empty() {
return Err(String::from("Token command did not produce output"));
}
let token = stdout
.split('\n')
.next()
.ok_or_else(|| String::from("Output did not contain any newline"))?;
Ok(token.to_string())
}

View File

@@ -1,51 +1,300 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::process;
use super::repo::RepoConfig; use std::path::Path;
#[derive(Debug, Serialize, Deserialize)] use super::auth;
#[serde(deny_unknown_fields)] use super::output::*;
pub struct Config { use super::path;
pub trees: Trees, use super::provider;
use super::provider::Filter;
use super::provider::Provider;
use super::repo;
use super::tree;
pub type RemoteProvider = provider::RemoteProvider;
pub type RemoteType = repo::RemoteType;
fn worktree_setup_default() -> bool {
false
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Trees(Vec<Tree>); #[serde(untagged)]
pub enum Config {
ConfigTrees(ConfigTrees),
ConfigProvider(ConfigProvider),
}
impl Trees { #[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ConfigTrees {
pub trees: Vec<ConfigTree>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ConfigProviderFilter {
pub access: Option<bool>,
pub owner: Option<bool>,
pub users: Option<Vec<String>>,
pub groups: Option<Vec<String>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ConfigProvider {
pub provider: RemoteProvider,
pub token_command: String,
pub root: String,
pub filters: Option<ConfigProviderFilter>,
pub force_ssh: Option<bool>,
pub api_url: Option<String>,
pub worktree: Option<bool>,
pub init_worktree: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RemoteConfig {
pub name: String,
pub url: String,
#[serde(rename = "type")]
pub remote_type: RemoteType,
}
impl RemoteConfig {
pub fn from_remote(remote: repo::Remote) -> Self {
Self {
name: remote.name,
url: remote.url,
remote_type: remote.remote_type,
}
}
pub fn into_remote(self) -> repo::Remote {
repo::Remote {
name: self.name,
url: self.url,
remote_type: self.remote_type,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RepoConfig {
pub name: String,
#[serde(default = "worktree_setup_default")]
pub worktree_setup: bool,
pub remotes: Option<Vec<RemoteConfig>>,
}
impl RepoConfig {
pub fn from_repo(repo: repo::Repo) -> Self {
Self {
name: repo.name,
worktree_setup: repo.worktree_setup,
remotes: repo
.remotes
.map(|remotes| remotes.into_iter().map(RemoteConfig::from_remote).collect()),
}
}
pub fn into_repo(self) -> repo::Repo {
let (namespace, name) = if let Some((namespace, name)) = self.name.rsplit_once('/') {
(Some(namespace.to_string()), name.to_string())
} else {
(None, self.name)
};
repo::Repo {
name,
namespace,
worktree_setup: self.worktree_setup,
remotes: self.remotes.map(|remotes| {
remotes
.into_iter()
.map(|remote| remote.into_remote())
.collect()
}),
}
}
}
impl ConfigTrees {
pub fn to_config(self) -> Config { pub fn to_config(self) -> Config {
Config { trees: self } Config::ConfigTrees(self)
} }
pub fn from_vec(vec: Vec<Tree>) -> Self { pub fn from_vec(vec: Vec<ConfigTree>) -> Self {
Trees(vec) ConfigTrees { trees: vec }
} }
pub fn as_vec(self) -> Vec<Tree> { pub fn from_trees(vec: Vec<tree::Tree>) -> Self {
self.0 ConfigTrees {
trees: vec.into_iter().map(ConfigTree::from_tree).collect(),
}
} }
pub fn as_vec_ref(&self) -> &Vec<Tree> { pub fn trees(self) -> Vec<ConfigTree> {
self.0.as_ref() self.trees
}
pub fn trees_mut(&mut self) -> &mut Vec<ConfigTree> {
&mut self.trees
}
pub fn trees_ref(&self) -> &Vec<ConfigTree> {
self.trees.as_ref()
} }
} }
impl Config { impl Config {
pub fn trees(self) -> Result<Vec<ConfigTree>, String> {
match self {
Config::ConfigTrees(config) => Ok(config.trees),
Config::ConfigProvider(config) => {
let token = match auth::get_token_from_command(&config.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filters = config.filters.unwrap_or(ConfigProviderFilter {
access: Some(false),
owner: Some(false),
users: Some(vec![]),
groups: Some(vec![]),
});
let filter = Filter::new(
filters.users.unwrap_or_default(),
filters.groups.unwrap_or_default(),
filters.owner.unwrap_or(false),
filters.access.unwrap_or(false),
);
let repos = match config.provider {
RemoteProvider::Github => {
match provider::Github::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
)?
}
RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
)?
}
};
let mut trees = vec![];
for (namespace, namespace_repos) in repos {
let repos = namespace_repos
.into_iter()
.map(RepoConfig::from_repo)
.collect();
let tree = ConfigTree {
root: if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&config.root).join(namespace))
} else {
path::path_as_string(Path::new(&config.root))
},
repos: Some(repos),
};
trees.push(tree);
}
Ok(trees)
}
}
}
pub fn from_trees(trees: Vec<ConfigTree>) -> Self {
Config::ConfigTrees(ConfigTrees { trees })
}
pub fn normalize(&mut self) {
if let Config::ConfigTrees(config) = self {
let home = path::env_home().display().to_string();
for tree in &mut config.trees_mut().iter_mut() {
if tree.root.starts_with(&home) {
// The tilde is not handled differently, it's just a normal path component for `Path`.
// Therefore we can treat it like that during **output**.
//
// The `unwrap()` is safe here as we are testing via `starts_with()`
// beforehand
let mut path = tree.root.strip_prefix(&home).unwrap();
if path.starts_with('/') {
path = path.strip_prefix('/').unwrap();
}
tree.root = Path::new("~").join(path).display().to_string();
}
}
}
}
pub fn as_toml(&self) -> Result<String, String> { pub fn as_toml(&self) -> Result<String, String> {
match toml::to_string(self) { match toml::to_string(self) {
Ok(toml) => Ok(toml), Ok(toml) => Ok(toml),
Err(error) => Err(error.to_string()), Err(error) => Err(error.to_string()),
} }
} }
pub fn as_yaml(&self) -> Result<String, String> {
serde_yaml::to_string(self).map_err(|e| e.to_string())
}
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct Tree { pub struct ConfigTree {
pub root: String, pub root: String,
pub repos: Option<Vec<RepoConfig>>, pub repos: Option<Vec<RepoConfig>>,
} }
pub fn read_config(path: &str) -> Result<Config, String> { impl ConfigTree {
pub fn from_repos(root: String, repos: Vec<repo::Repo>) -> Self {
Self {
root,
repos: Some(repos.into_iter().map(RepoConfig::from_repo).collect()),
}
}
pub fn from_tree(tree: tree::Tree) -> Self {
Self {
root: tree.root,
repos: Some(tree.repos.into_iter().map(RepoConfig::from_repo).collect()),
}
}
}
pub fn read_config<'a, T>(path: &str) -> Result<T, String>
where
T: for<'de> serde::Deserialize<'de>,
{
let content = match std::fs::read_to_string(&path) { let content = match std::fs::read_to_string(&path) {
Ok(s) => s, Ok(s) => s,
Err(e) => { Err(e) => {
@@ -60,7 +309,9 @@ pub fn read_config(path: &str) -> Result<Config, String> {
} }
}; };
let config: Config = match toml::from_str(&content) { let config: T = match toml::from_str(&content) {
Ok(c) => c,
Err(_) => match serde_yaml::from_str(&content) {
Ok(c) => c, Ok(c) => c,
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
@@ -68,6 +319,7 @@ pub fn read_config(path: &str) -> Result<Config, String> {
path, e path, e
)) ))
} }
},
}; };
Ok(config) Ok(config)

View File

@@ -7,8 +7,8 @@ use clap::{AppSettings, Parser};
author = clap::crate_authors!("\n"), author = clap::crate_authors!("\n"),
about = clap::crate_description!(), about = clap::crate_description!(),
long_version = clap::crate_version!(), long_version = clap::crate_version!(),
setting = AppSettings::DeriveDisplayOrder, global_setting(AppSettings::DeriveDisplayOrder),
setting = AppSettings::PropagateVersion, propagate_version = true,
)] )]
pub struct Opts { pub struct Opts {
#[clap(subcommand)] #[clap(subcommand)]
@@ -31,20 +31,51 @@ pub struct Repos {
#[derive(Parser)] #[derive(Parser)]
pub enum ReposAction { pub enum ReposAction {
#[clap( #[clap(subcommand)]
visible_alias = "run", Sync(SyncAction),
about = "Synchronize the repositories to the configured values" #[clap(subcommand)]
)] Find(FindAction),
Sync(Sync),
#[clap(about = "Generate a repository configuration from an existing file tree")]
Find(Find),
#[clap(about = "Show status of configured repositories")] #[clap(about = "Show status of configured repositories")]
Status(OptionalConfig), Status(OptionalConfig),
} }
#[derive(Parser)] #[derive(Parser)]
#[clap()] #[clap(about = "Sync local repositories with a configured list")]
pub struct Sync { pub enum SyncAction {
#[clap(about = "Synchronize the repositories to the configured values")]
Config(Config),
#[clap(about = "Synchronize the repositories from a remote provider")]
Remote(SyncRemoteArgs),
}
#[derive(Parser)]
#[clap(about = "Generate a repository configuration from existing repositories")]
pub enum FindAction {
#[clap(about = "Find local repositories")]
Local(FindLocalArgs),
#[clap(about = "Find repositories on remote provider")]
Remote(FindRemoteArgs),
#[clap(about = "Find repositories as defined in the configuration file")]
Config(FindConfigArgs),
}
#[derive(Parser)]
pub struct FindLocalArgs {
#[clap(help = "The path to search through")]
pub path: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
}
#[derive(Parser)]
pub struct FindConfigArgs {
#[clap( #[clap(
short, short,
long, long,
@@ -52,6 +83,167 @@ pub struct Sync {
help = "Path to the configuration file" help = "Path to the configuration file"
)] )]
pub config: String, pub config: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
}
#[derive(Parser)]
#[clap()]
pub struct FindRemoteArgs {
#[clap(short, long, help = "Path to the configuration file")]
pub config: Option<String>,
#[clap(arg_enum, short, long, help = "Remote provider to use")]
pub provider: RemoteProvider,
#[clap(
multiple_occurrences = true,
name = "user",
long,
help = "Users to get repositories from"
)]
pub users: Vec<String>,
#[clap(
multiple_occurrences = true,
name = "group",
long,
help = "Groups to get repositories from"
)]
pub groups: Vec<String>,
#[clap(long, help = "Get repositories that belong to the requesting user")]
pub owner: bool,
#[clap(long, help = "Get repositories that the requesting user has access to")]
pub access: bool,
#[clap(long, help = "Always use SSH, even for public repositories")]
pub force_ssh: bool,
#[clap(long, help = "Command to get API token")]
pub token_command: String,
#[clap(long, help = "Root of the repo tree to produce")]
pub root: String,
#[clap(
arg_enum,
short,
long,
help = "Format to produce",
default_value_t = ConfigFormat::Toml,
)]
pub format: ConfigFormat,
#[clap(
long,
help = "Use worktree setup for repositories",
possible_values = &["true", "false"],
default_value = "false",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub worktree: String,
#[clap(long, help = "Base URL for the API")]
pub api_url: Option<String>,
}
#[derive(Parser)]
#[clap()]
pub struct Config {
#[clap(
short,
long,
default_value = "./config.toml",
help = "Path to the configuration file"
)]
pub config: String,
#[clap(
long,
help = "Check out the default worktree after clone",
possible_values = &["true", "false"],
default_value = "true",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub init_worktree: String,
}
pub type RemoteProvider = super::provider::RemoteProvider;
#[derive(Parser)]
#[clap()]
pub struct SyncRemoteArgs {
#[clap(arg_enum, short, long, help = "Remote provider to use")]
pub provider: RemoteProvider,
#[clap(
multiple_occurrences = true,
name = "user",
long,
help = "Users to get repositories from"
)]
pub users: Vec<String>,
#[clap(
multiple_occurrences = true,
name = "group",
long,
help = "Groups to get repositories from"
)]
pub groups: Vec<String>,
#[clap(long, help = "Get repositories that belong to the requesting user")]
pub owner: bool,
#[clap(long, help = "Get repositories that the requesting user has access to")]
pub access: bool,
#[clap(long, help = "Always use SSH, even for public repositories")]
pub force_ssh: bool,
#[clap(long, help = "Command to get API token")]
pub token_command: String,
#[clap(long, help = "Root of the repo tree to produce")]
pub root: String,
#[clap(
long,
help = "Use worktree setup for repositories",
possible_values = &["true", "false"],
default_value = "false",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub worktree: String,
#[clap(long, help = "Base URL for the API")]
pub api_url: Option<String>,
#[clap(
long,
help = "Check out the default worktree after clone",
possible_values = &["true", "false"],
default_value = "true",
default_missing_value = "true",
min_values = 0,
max_values = 1,
)]
pub init_worktree: String,
} }
#[derive(Parser)] #[derive(Parser)]
@@ -61,10 +253,10 @@ pub struct OptionalConfig {
pub config: Option<String>, pub config: Option<String>,
} }
#[derive(Parser)] #[derive(clap::ArgEnum, Clone)]
pub struct Find { pub enum ConfigFormat {
#[clap(help = "The path to search through")] Yaml,
pub path: String, Toml,
} }
#[derive(Parser)] #[derive(Parser)]
@@ -132,6 +324,8 @@ pub struct WorktreeFetchArgs {}
pub struct WorktreePullArgs { pub struct WorktreePullArgs {
#[clap(long = "--rebase", help = "Perform a rebase instead of a fast-forward")] #[clap(long = "--rebase", help = "Perform a rebase instead of a fast-forward")]
pub rebase: bool, pub rebase: bool,
#[clap(long = "--stash", help = "Stash & unstash changes before & after pull")]
pub stash: bool,
} }
#[derive(Parser)] #[derive(Parser)]
@@ -140,6 +334,11 @@ pub struct WorktreeRebaseArgs {
pub pull: bool, pub pull: bool,
#[clap(long = "--rebase", help = "Perform a rebase when doing a pull")] #[clap(long = "--rebase", help = "Perform a rebase when doing a pull")]
pub rebase: bool, pub rebase: bool,
#[clap(
long = "--stash",
help = "Stash & unstash changes before & after rebase"
)]
pub stash: bool,
} }
pub fn parse() -> Opts { pub fn parse() -> Opts {

View File

@@ -3,24 +3,33 @@ use std::process;
mod cmd; mod cmd;
use grm::auth;
use grm::config; use grm::config;
use grm::find_in_tree;
use grm::output::*; use grm::output::*;
use grm::path;
use grm::provider;
use grm::provider::Provider;
use grm::repo; use grm::repo;
use grm::table;
use grm::tree;
use grm::worktree;
fn main() { fn main() {
let opts = cmd::parse(); let opts = cmd::parse();
match opts.subcmd { match opts.subcmd {
cmd::SubCommand::Repos(repos) => match repos.action { cmd::SubCommand::Repos(repos) => match repos.action {
cmd::ReposAction::Sync(sync) => { cmd::ReposAction::Sync(sync) => match sync {
let config = match config::read_config(&sync.config) { cmd::SyncAction::Config(args) => {
let config = match config::read_config(&args.config) {
Ok(config) => config, Ok(config) => config,
Err(error) => { Err(error) => {
print_error(&error); print_error(&error);
process::exit(1); process::exit(1);
} }
}; };
match grm::sync_trees(config) { match tree::sync_trees(config, args.init_worktree == "true") {
Ok(success) => { Ok(success) => {
if !success { if !success {
process::exit(1) process::exit(1)
@@ -32,6 +41,79 @@ fn main() {
} }
} }
} }
cmd::SyncAction::Remote(args) => {
let token = match auth::get_token_from_command(&args.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access);
let worktree = args.worktree == "true";
let repos = match args.provider {
cmd::RemoteProvider::Github => {
match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(worktree, args.force_ssh)
}
cmd::RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(worktree, args.force_ssh)
}
};
match repos {
Ok(repos) => {
let mut trees: Vec<config::ConfigTree> = vec![];
for (namespace, repolist) in repos {
let root = if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&args.root).join(namespace))
} else {
path::path_as_string(Path::new(&args.root))
};
let tree = config::ConfigTree::from_repos(root, repolist);
trees.push(tree);
}
let config = config::Config::from_trees(trees);
match tree::sync_trees(config, args.init_worktree == "true") {
Ok(success) => {
if !success {
process::exit(1)
}
}
Err(error) => {
print_error(&format!("Error syncing trees: {}", error));
process::exit(1);
}
}
}
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
}
},
cmd::ReposAction::Status(args) => match &args.config { cmd::ReposAction::Status(args) => match &args.config {
Some(config_path) => { Some(config_path) => {
let config = match config::read_config(config_path) { let config = match config::read_config(config_path) {
@@ -41,7 +123,7 @@ fn main() {
process::exit(1); process::exit(1);
} }
}; };
match grm::table::get_status_table(config) { match table::get_status_table(config) {
Ok((tables, errors)) => { Ok((tables, errors)) => {
for table in tables { for table in tables {
println!("{}", table); println!("{}", table);
@@ -65,7 +147,7 @@ fn main() {
} }
}; };
match grm::table::show_single_repo_status(&dir) { match table::show_single_repo_status(&dir) {
Ok((table, warnings)) => { Ok((table, warnings)) => {
println!("{}", table); println!("{}", table);
for warning in warnings { for warning in warnings {
@@ -79,8 +161,9 @@ fn main() {
} }
} }
}, },
cmd::ReposAction::Find(find) => { cmd::ReposAction::Find(find) => match find {
let path = Path::new(&find.path); cmd::FindAction::Local(args) => {
let path = Path::new(&args.path);
if !path.exists() { if !path.exists() {
print_error(&format!("Path \"{}\" does not exist", path.display())); print_error(&format!("Path \"{}\" does not exist", path.display()));
process::exit(1); process::exit(1);
@@ -102,7 +185,7 @@ fn main() {
} }
}; };
let (found_repos, warnings) = match grm::find_in_tree(&path) { let (found_repos, warnings) = match find_in_tree(&path) {
Ok((repos, warnings)) => (repos, warnings), Ok((repos, warnings)) => (repos, warnings),
Err(error) => { Err(error) => {
print_error(&error); print_error(&error);
@@ -110,29 +193,267 @@ fn main() {
} }
}; };
let trees = grm::config::Trees::from_vec(vec![found_repos]); let trees = config::ConfigTrees::from_trees(vec![found_repos]);
if trees.as_vec_ref().iter().all(|t| match &t.repos { if trees.trees_ref().iter().all(|t| match &t.repos {
None => false, None => false,
Some(r) => r.is_empty(), Some(r) => r.is_empty(),
}) { }) {
print_warning("No repositories found"); print_warning("No repositories found");
} else { } else {
let config = trees.to_config(); let mut config = trees.to_config();
config.normalize();
match args.format {
cmd::ConfigFormat::Toml => {
let toml = match config.as_toml() { let toml = match config.as_toml() {
Ok(toml) => toml, Ok(toml) => toml,
Err(error) => { Err(error) => {
print_error(&format!("Failed converting config to TOML: {}", &error)); print_error(&format!(
"Failed converting config to TOML: {}",
&error
));
process::exit(1); process::exit(1);
} }
}; };
print!("{}", toml); print!("{}", toml);
} }
cmd::ConfigFormat::Yaml => {
let yaml = match config.as_yaml() {
Ok(yaml) => yaml,
Err(error) => {
print_error(&format!(
"Failed converting config to YAML: {}",
&error
));
process::exit(1);
}
};
print!("{}", yaml);
}
}
}
for warning in warnings { for warning in warnings {
print_warning(&warning); print_warning(&warning);
} }
} }
cmd::FindAction::Config(args) => {
let config: config::ConfigProvider = match config::read_config(&args.config) {
Ok(config) => config,
Err(error) => {
print_error(&error);
process::exit(1);
}
};
let token = match auth::get_token_from_command(&config.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filters = config.filters.unwrap_or(config::ConfigProviderFilter {
access: Some(false),
owner: Some(false),
users: Some(vec![]),
groups: Some(vec![]),
});
let filter = provider::Filter::new(
filters.users.unwrap_or_default(),
filters.groups.unwrap_or_default(),
filters.owner.unwrap_or(false),
filters.access.unwrap_or(false),
);
let repos = match config.provider {
provider::RemoteProvider::Github => {
match match provider::Github::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
}
provider::RemoteProvider::Gitlab => {
match match provider::Gitlab::new(filter, token, config.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(
config.worktree.unwrap_or(false),
config.force_ssh.unwrap_or(false),
) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
}
};
let mut trees = vec![];
for (namespace, namespace_repos) in repos {
let tree = config::ConfigTree {
root: if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&config.root).join(namespace))
} else {
path::path_as_string(Path::new(&config.root))
},
repos: Some(
namespace_repos
.into_iter()
.map(config::RepoConfig::from_repo)
.collect(),
),
};
trees.push(tree);
}
let config = config::Config::from_trees(trees);
match args.format {
cmd::ConfigFormat::Toml => {
let toml = match config.as_toml() {
Ok(toml) => toml,
Err(error) => {
print_error(&format!(
"Failed converting config to TOML: {}",
&error
));
process::exit(1);
}
};
print!("{}", toml);
}
cmd::ConfigFormat::Yaml => {
let yaml = match config.as_yaml() {
Ok(yaml) => yaml,
Err(error) => {
print_error(&format!(
"Failed converting config to YAML: {}",
&error
));
process::exit(1);
}
};
print!("{}", yaml);
}
}
}
cmd::FindAction::Remote(args) => {
let token = match auth::get_token_from_command(&args.token_command) {
Ok(token) => token,
Err(error) => {
print_error(&format!("Getting token from command failed: {}", error));
process::exit(1);
}
};
let filter =
provider::Filter::new(args.users, args.groups, args.owner, args.access);
let worktree = args.worktree == "true";
let repos = match args.provider {
cmd::RemoteProvider::Github => {
match provider::Github::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(worktree, args.force_ssh)
}
cmd::RemoteProvider::Gitlab => {
match provider::Gitlab::new(filter, token, args.api_url) {
Ok(provider) => provider,
Err(error) => {
print_error(&format!("Error: {}", error));
process::exit(1);
}
}
.get_repos(worktree, args.force_ssh)
}
};
let repos = repos.unwrap_or_else(|error| {
print_error(&format!("Error: {}", error));
process::exit(1);
});
let mut trees: Vec<config::ConfigTree> = vec![];
for (namespace, repolist) in repos {
let tree = config::ConfigTree {
root: if let Some(namespace) = namespace {
path::path_as_string(&Path::new(&args.root).join(namespace))
} else {
path::path_as_string(Path::new(&args.root))
},
repos: Some(
repolist
.into_iter()
.map(config::RepoConfig::from_repo)
.collect(),
),
};
trees.push(tree);
}
let mut config = config::Config::from_trees(trees);
config.normalize();
match args.format {
cmd::ConfigFormat::Toml => {
let toml = match config.as_toml() {
Ok(toml) => toml,
Err(error) => {
print_error(&format!(
"Failed converting config to TOML: {}",
&error
));
process::exit(1);
}
};
print!("{}", toml);
}
cmd::ConfigFormat::Yaml => {
let yaml = match config.as_yaml() {
Ok(yaml) => yaml,
Err(error) => {
print_error(&format!(
"Failed converting config to YAML: {}",
&error
));
process::exit(1);
}
};
print!("{}", yaml);
}
}
}
},
}, },
cmd::SubCommand::Worktree(args) => { cmd::SubCommand::Worktree(args) => {
let cwd = std::env::current_dir().unwrap_or_else(|error| { let cwd = std::env::current_dir().unwrap_or_else(|error| {
@@ -178,7 +499,13 @@ fn main() {
} }
} }
match grm::add_worktree(&cwd, name, subdirectory, track, action_args.no_track) { match worktree::add_worktree(
&cwd,
name,
subdirectory,
track,
action_args.no_track,
) {
Ok(_) => print_success(&format!("Worktree {} created", &action_args.name)), Ok(_) => print_success(&format!("Worktree {} created", &action_args.name)),
Err(error) => { Err(error) => {
print_error(&format!("Error creating worktree: {}", error)); print_error(&format!("Error creating worktree: {}", error));
@@ -200,7 +527,7 @@ fn main() {
} }
}; };
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
print_error(&format!("Error opening repository: {}", error)); print_error(&format!("Error opening repository: {}", error));
process::exit(1); process::exit(1);
}); });
@@ -214,17 +541,17 @@ fn main() {
Ok(_) => print_success(&format!("Worktree {} deleted", &action_args.name)), Ok(_) => print_success(&format!("Worktree {} deleted", &action_args.name)),
Err(error) => { Err(error) => {
match error { match error {
grm::WorktreeRemoveFailureReason::Error(msg) => { repo::WorktreeRemoveFailureReason::Error(msg) => {
print_error(&msg); print_error(&msg);
process::exit(1); process::exit(1);
} }
grm::WorktreeRemoveFailureReason::Changes(changes) => { repo::WorktreeRemoveFailureReason::Changes(changes) => {
print_warning(&format!( print_warning(&format!(
"Changes in worktree: {}. Refusing to delete", "Changes in worktree: {}. Refusing to delete",
changes changes
)); ));
} }
grm::WorktreeRemoveFailureReason::NotMerged(message) => { repo::WorktreeRemoveFailureReason::NotMerged(message) => {
print_warning(&message); print_warning(&message);
} }
} }
@@ -233,12 +560,12 @@ fn main() {
} }
} }
cmd::WorktreeAction::Status(_args) => { cmd::WorktreeAction::Status(_args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
print_error(&format!("Error opening repository: {}", error)); print_error(&format!("Error opening repository: {}", error));
process::exit(1); process::exit(1);
}); });
match grm::table::get_worktree_status_table(&repo, &cwd) { match table::get_worktree_status_table(&repo, &cwd) {
Ok((table, errors)) => { Ok((table, errors)) => {
println!("{}", table); println!("{}", table);
for error in errors { for error in errors {
@@ -258,8 +585,8 @@ fn main() {
// * Remove all files // * Remove all files
// * Set `core.bare` to `true` // * Set `core.bare` to `true`
let repo = grm::Repo::open(&cwd, false).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, false).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository"); print_error("Directory does not contain a git repository");
} else { } else {
print_error(&format!("Opening repository failed: {}", error)); print_error(&format!("Opening repository failed: {}", error));
@@ -286,8 +613,8 @@ fn main() {
} }
} }
cmd::WorktreeAction::Clean(_args) => { cmd::WorktreeAction::Clean(_args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository"); print_error("Directory does not contain a git repository");
} else { } else {
print_error(&format!("Opening repository failed: {}", error)); print_error(&format!("Opening repository failed: {}", error));
@@ -320,8 +647,8 @@ fn main() {
} }
} }
cmd::WorktreeAction::Fetch(_args) => { cmd::WorktreeAction::Fetch(_args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository"); print_error("Directory does not contain a git repository");
} else { } else {
print_error(&format!("Opening repository failed: {}", error)); print_error(&format!("Opening repository failed: {}", error));
@@ -336,8 +663,8 @@ fn main() {
print_success("Fetched from all remotes"); print_success("Fetched from all remotes");
} }
cmd::WorktreeAction::Pull(args) => { cmd::WorktreeAction::Pull(args) => {
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository"); print_error("Directory does not contain a git repository");
} else { } else {
print_error(&format!("Opening repository failed: {}", error)); print_error(&format!("Opening repository failed: {}", error));
@@ -350,34 +677,35 @@ fn main() {
process::exit(1); process::exit(1);
}); });
let mut failures = false;
for worktree in repo.get_worktrees().unwrap_or_else(|error| { for worktree in repo.get_worktrees().unwrap_or_else(|error| {
print_error(&format!("Error getting worktrees: {}", error)); print_error(&format!("Error getting worktrees: {}", error));
process::exit(1); process::exit(1);
}) { }) {
if let Some(warning) = if let Some(warning) = worktree
worktree .forward_branch(args.rebase, args.stash)
.forward_branch(args.rebase)
.unwrap_or_else(|error| { .unwrap_or_else(|error| {
print_error(&format!( print_error(&format!("Error updating worktree branch: {}", error));
"Error updating worktree branch: {}",
error
));
process::exit(1); process::exit(1);
}) })
{ {
print_warning(&format!("{}: {}", worktree.name(), warning)); print_warning(&format!("{}: {}", worktree.name(), warning));
failures = true;
} else { } else {
print_success(&format!("{}: Done", worktree.name())); print_success(&format!("{}: Done", worktree.name()));
} }
} }
if failures {
process::exit(1);
}
} }
cmd::WorktreeAction::Rebase(args) => { cmd::WorktreeAction::Rebase(args) => {
if args.rebase && !args.pull { if args.rebase && !args.pull {
print_error("There is no point in using --rebase without --pull"); print_error("There is no point in using --rebase without --pull");
process::exit(1); process::exit(1);
} }
let repo = grm::Repo::open(&cwd, true).unwrap_or_else(|error| { let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
if error.kind == grm::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
print_error("Directory does not contain a git repository"); print_error("Directory does not contain a git repository");
} else { } else {
print_error(&format!("Opening repository failed: {}", error)); print_error(&format!("Opening repository failed: {}", error));
@@ -392,12 +720,8 @@ fn main() {
}); });
} }
let config = let config = repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
grm::repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| { print_error(&format!("Failed to read worktree configuration: {}", error));
print_error(&format!(
"Failed to read worktree configuration: {}",
error
));
process::exit(1); process::exit(1);
}); });
@@ -406,10 +730,12 @@ fn main() {
process::exit(1); process::exit(1);
}); });
let mut failures = false;
for worktree in &worktrees { for worktree in &worktrees {
if args.pull { if args.pull {
if let Some(warning) = worktree if let Some(warning) = worktree
.forward_branch(args.rebase) .forward_branch(args.rebase, args.stash)
.unwrap_or_else(|error| { .unwrap_or_else(|error| {
print_error(&format!( print_error(&format!(
"Error updating worktree branch: {}", "Error updating worktree branch: {}",
@@ -418,28 +744,29 @@ fn main() {
process::exit(1); process::exit(1);
}) })
{ {
failures = true;
print_warning(&format!("{}: {}", worktree.name(), warning)); print_warning(&format!("{}: {}", worktree.name(), warning));
} }
} }
} }
for worktree in &worktrees { for worktree in &worktrees {
if let Some(warning) = if let Some(warning) = worktree
worktree .rebase_onto_default(&config, args.stash)
.rebase_onto_default(&config)
.unwrap_or_else(|error| { .unwrap_or_else(|error| {
print_error(&format!( print_error(&format!("Error rebasing worktree branch: {}", error));
"Error rebasing worktree branch: {}",
error
));
process::exit(1); process::exit(1);
}) })
{ {
failures = true;
print_warning(&format!("{}: {}", worktree.name(), warning)); print_warning(&format!("{}: {}", worktree.name(), warning));
} else { } else {
print_success(&format!("{}: Done", worktree.name())); print_success(&format!("{}: Done", worktree.name()));
} }
} }
if failures {
process::exit(1);
}
} }
} }
} }

View File

@@ -1,341 +1,37 @@
#![feature(io_error_more)] #![feature(io_error_more)]
#![feature(const_option_ext)]
use std::fs; use std::path::Path;
use std::path::{Path, PathBuf};
use std::process;
pub mod auth;
pub mod config; pub mod config;
pub mod output; pub mod output;
pub mod path;
pub mod provider;
pub mod repo; pub mod repo;
pub mod table; pub mod table;
pub mod tree;
pub mod worktree;
use config::{Config, Tree};
use output::*;
use repo::{clone_repo, detect_remote_type, Remote, RepoConfig};
pub use repo::{RemoteTrackingStatus, Repo, RepoErrorKind, WorktreeRemoveFailureReason};
const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
const BRANCH_NAMESPACE_SEPARATOR: &str = "/"; const BRANCH_NAMESPACE_SEPARATOR: &str = "/";
const GIT_CONFIG_BARE_KEY: &str = "core.bare";
const GIT_CONFIG_PUSH_DEFAULT: &str = "push.default";
#[cfg(test)]
mod tests {
use super::*;
fn setup() {
std::env::set_var("HOME", "/home/test");
}
#[test]
fn check_expand_tilde() {
setup();
assert_eq!(
expand_path(Path::new("~/file")),
Path::new("/home/test/file")
);
}
#[test]
fn check_expand_invalid_tilde() {
setup();
assert_eq!(
expand_path(Path::new("/home/~/file")),
Path::new("/home/~/file")
);
}
#[test]
fn check_expand_home() {
setup();
assert_eq!(
expand_path(Path::new("$HOME/file")),
Path::new("/home/test/file")
);
assert_eq!(
expand_path(Path::new("${HOME}/file")),
Path::new("/home/test/file")
);
}
}
pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap()
}
pub fn env_home() -> PathBuf {
match std::env::var("HOME") {
Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => {
print_error(&format!("Unable to read HOME: {}", e));
process::exit(1);
}
}
}
fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context(
&path_as_string(path),
home_dir,
|name| -> Result<Option<String>, &'static str> {
match name {
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None),
}
},
) {
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
Ok(std::borrow::Cow::Owned(path)) => path,
Err(e) => {
print_error(&format!("Unable to expand root: {}", e));
process::exit(1);
}
};
Path::new(&expanded_path).to_path_buf()
}
fn sync_repo(root_path: &Path, repo: &RepoConfig) -> Result<(), String> {
let repo_path = root_path.join(&repo.name);
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
let mut repo_handle = None;
if repo_path.exists() {
if repo.worktree_setup && !actual_git_directory.exists() {
return Err(String::from(
"Repo already exists, but is not using a worktree setup",
));
}
repo_handle = match Repo::open(&repo_path, repo.worktree_setup) {
Ok(repo) => Some(repo),
Err(error) => {
if !repo.worktree_setup && Repo::open(&repo_path, true).is_ok() {
return Err(String::from(
"Repo already exists, but is using a worktree setup",
));
} else {
return Err(format!("Opening repository failed: {}", error));
}
}
};
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action(
&repo.name,
"Repository does not have remotes configured, initializing new",
);
repo_handle = match Repo::init(&repo_path, repo.worktree_setup) {
Ok(r) => {
print_repo_success(&repo.name, "Repository created");
Some(r)
}
Err(e) => {
return Err(format!("Repository failed during init: {}", e));
}
}
} else {
let first = repo.remotes.as_ref().unwrap().first().unwrap();
match clone_repo(first, &repo_path, repo.worktree_setup) {
Ok(_) => {
print_repo_success(&repo.name, "Repository successfully cloned");
}
Err(e) => {
return Err(format!("Repository failed during clone: {}", e));
}
};
}
if let Some(remotes) = &repo.remotes {
let repo_handle = repo_handle.unwrap_or_else(|| {
Repo::open(&repo_path, repo.worktree_setup).unwrap_or_else(|_| process::exit(1))
});
let current_remotes: Vec<String> = repo_handle
.remotes()
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
for remote in remotes {
let current_remote = repo_handle.find_remote(&remote.name)?;
match current_remote {
Some(current_remote) => {
let current_url = current_remote.url();
if remote.url != current_url {
print_repo_action(
&repo.name,
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
);
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
};
}
}
None => {
print_repo_action(
&repo.name,
&format!(
"Setting up new remote \"{}\" to \"{}\"",
&remote.name, &remote.url
),
);
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
return Err(format!(
"Repository failed during setting the remotes: {}",
e
));
}
}
}
}
for current_remote in &current_remotes {
if !remotes.iter().any(|r| &r.name == current_remote) {
print_repo_action(
&repo.name,
&format!("Deleting remote \"{}\"", &current_remote,),
);
if let Err(e) = repo_handle.remote_delete(current_remote) {
return Err(format!(
"Repository failed during deleting remote \"{}\": {}",
&current_remote, e
));
}
}
}
}
Ok(())
}
pub fn find_unmanaged_repos(
root_path: &Path,
managed_repos: &[RepoConfig],
) -> Result<Vec<String>, String> {
let mut unmanaged_repos = Vec::new();
for repo in find_repo_paths(root_path)? {
let name = path_as_string(repo.strip_prefix(&root_path).unwrap());
if !managed_repos.iter().any(|r| r.name == name) {
unmanaged_repos.push(name);
}
}
Ok(unmanaged_repos)
}
pub fn sync_trees(config: Config) -> Result<bool, String> {
let mut failures = false;
for tree in config.trees.as_vec() {
let repos = tree.repos.unwrap_or_default();
let root_path = expand_path(Path::new(&tree.root));
for repo in &repos {
match sync_repo(&root_path, repo) {
Ok(_) => print_repo_success(&repo.name, "OK"),
Err(error) => {
print_repo_error(&repo.name, &error);
failures = true;
}
}
}
match find_unmanaged_repos(&root_path, &repos) {
Ok(unmanaged_repos) => {
for name in unmanaged_repos {
print_warning(&format!("Found unmanaged repository: {}", name));
}
}
Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error));
failures = true;
}
}
}
Ok(!failures)
}
/// Finds repositories recursively, returning their path
fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
let mut repos = Vec::new();
let git_dir = path.join(".git");
let git_worktree = path.join(GIT_MAIN_WORKTREE_DIRECTORY);
if git_dir.exists() || git_worktree.exists() {
repos.push(path.to_path_buf());
} else {
match fs::read_dir(path) {
Ok(contents) => {
for content in contents {
match content {
Ok(entry) => {
let path = entry.path();
if path.is_symlink() {
continue;
}
if path.is_dir() {
match find_repo_paths(&path) {
Ok(ref mut r) => repos.append(r),
Err(error) => return Err(error),
}
}
}
Err(e) => {
return Err(format!("Error accessing directory: {}", e));
}
};
}
}
Err(e) => {
return Err(format!(
"Failed to open \"{}\": {}",
&path.display(),
match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()),
}
));
}
};
}
Ok(repos)
}
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
match is_worktree {
false => path.to_path_buf(),
true => path.join(GIT_MAIN_WORKTREE_DIRECTORY),
}
}
/// Find all git repositories under root, recursively /// Find all git repositories under root, recursively
/// ///
/// The bool in the return value specifies whether there is a repository /// The bool in the return value specifies whether there is a repository
/// in root itself. /// in root itself.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)>, String> { fn find_repos(root: &Path) -> Result<Option<(Vec<repo::Repo>, Vec<String>, bool)>, String> {
let mut repos: Vec<RepoConfig> = Vec::new(); let mut repos: Vec<repo::Repo> = Vec::new();
let mut repo_in_root = false; let mut repo_in_root = false;
let mut warnings = Vec::new(); let mut warnings = Vec::new();
for path in find_repo_paths(root)? { for path in tree::find_repo_paths(root)? {
let is_worktree = Repo::detect_worktree(&path); let is_worktree = repo::RepoHandle::detect_worktree(&path);
if path == root { if path == root {
repo_in_root = true; repo_in_root = true;
} }
match Repo::open(&path, is_worktree) { match repo::RepoHandle::open(&path, is_worktree) {
Err(error) => { Err(error) => {
warnings.push(format!( warnings.push(format!(
"Error opening repo {}{}: {}", "Error opening repo {}{}: {}",
@@ -354,32 +50,32 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
Err(error) => { Err(error) => {
warnings.push(format!( warnings.push(format!(
"{}: Error getting remotes: {}", "{}: Error getting remotes: {}",
&path_as_string(&path), &path::path_as_string(&path),
error error
)); ));
continue; continue;
} }
}; };
let mut results: Vec<Remote> = Vec::new(); let mut results: Vec<repo::Remote> = Vec::new();
for remote_name in remotes.iter() { for remote_name in remotes.iter() {
match repo.find_remote(remote_name)? { match repo.find_remote(remote_name)? {
Some(remote) => { Some(remote) => {
let name = remote.name(); let name = remote.name();
let url = remote.url(); let url = remote.url();
let remote_type = match detect_remote_type(&url) { let remote_type = match repo::detect_remote_type(&url) {
Some(t) => t, Some(t) => t,
None => { None => {
warnings.push(format!( warnings.push(format!(
"{}: Could not detect remote type of \"{}\"", "{}: Could not detect remote type of \"{}\"",
&path_as_string(&path), &path::path_as_string(&path),
&url &url
)); ));
continue; continue;
} }
}; };
results.push(Remote { results.push(repo::Remote {
name, name,
url, url,
remote_type, remote_type,
@@ -388,7 +84,7 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
None => { None => {
warnings.push(format!( warnings.push(format!(
"{}: Remote {} not found", "{}: Remote {} not found",
&path_as_string(&path), &path::path_as_string(&path),
remote_name remote_name
)); ));
continue; continue;
@@ -397,17 +93,35 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
} }
let remotes = results; let remotes = results;
repos.push(RepoConfig { let (namespace, name) = if path == root {
name: match path == root { (
true => match &root.parent() { None,
Some(parent) => path_as_string(path.strip_prefix(parent).unwrap()), match &root.parent() {
Some(parent) => {
path::path_as_string(path.strip_prefix(parent).unwrap())
}
None => { None => {
warnings.push(String::from("Getting name of the search root failed. Do you have a git repository in \"/\"?")); warnings.push(String::from("Getting name of the search root failed. Do you have a git repository in \"/\"?"));
continue continue;
},
} }
false => path_as_string(path.strip_prefix(&root).unwrap()),
}, },
)
} else {
let name = path.strip_prefix(&root).unwrap();
let namespace = name.parent().unwrap();
(
if namespace != Path::new("") {
Some(path::path_as_string(namespace).to_string())
} else {
None
},
path::path_as_string(name),
)
};
repos.push(repo::Repo {
name,
namespace,
remotes: Some(remotes), remotes: Some(remotes),
worktree_setup: is_worktree, worktree_setup: is_worktree,
}); });
@@ -417,10 +131,10 @@ fn find_repos(root: &Path) -> Result<Option<(Vec<RepoConfig>, Vec<String>, bool)
Ok(Some((repos, warnings, repo_in_root))) Ok(Some((repos, warnings, repo_in_root)))
} }
pub fn find_in_tree(path: &Path) -> Result<(Tree, Vec<String>), String> { pub fn find_in_tree(path: &Path) -> Result<(tree::Tree, Vec<String>), String> {
let mut warnings = Vec::new(); let mut warnings = Vec::new();
let (repos, repo_in_root): (Vec<RepoConfig>, bool) = match find_repos(path)? { let (repos, repo_in_root): (Vec<repo::Repo>, bool) = match find_repos(path)? {
Some((vec, mut repo_warnings, repo_in_root)) => { Some((vec, mut repo_warnings, repo_in_root)) => {
warnings.append(&mut repo_warnings); warnings.append(&mut repo_warnings);
(vec, repo_in_root) (vec, repo_in_root)
@@ -439,182 +153,12 @@ pub fn find_in_tree(path: &Path) -> Result<(Tree, Vec<String>), String> {
} }
} }
} }
let home = env_home();
if root.starts_with(&home) {
// The tilde is not handled differently, it's just a normal path component for `Path`.
// Therefore we can treat it like that during **output**.
//
// The `unwrap()` is safe here as we are testing via `starts_with()`
// beforehand
root = Path::new("~").join(root.strip_prefix(&home).unwrap());
}
Ok(( Ok((
Tree { tree::Tree {
root: root.into_os_string().into_string().unwrap(), root: root.into_os_string().into_string().unwrap(),
repos: Some(repos), repos,
}, },
warnings, warnings,
)) ))
} }
pub fn add_worktree(
directory: &Path,
name: &str,
subdirectory: Option<&Path>,
track: Option<(&str, &str)>,
no_track: bool,
) -> Result<(), String> {
let repo = Repo::open(directory, true).map_err(|error| match error.kind {
RepoErrorKind::NotFound => {
String::from("Current directory does not contain a worktree setup")
}
_ => format!("Error opening repo: {}", error),
})?;
let config = repo::read_worktree_root_config(directory)?;
let path = match subdirectory {
Some(dir) => dir.join(name),
None => Path::new(name).to_path_buf(),
};
if repo.find_worktree(&path).is_ok() {
return Err(format!("Worktree {} already exists", &name));
}
let mut remote_branch_exists = false;
let default_checkout = || repo.default_branch()?.to_commit();
let checkout_commit;
if no_track {
checkout_commit = default_checkout()?;
} else {
match track {
Some((remote_name, remote_branch_name)) => {
let remote_branch = repo.find_remote_branch(remote_name, remote_branch_name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
remote_branch_exists = false;
checkout_commit = default_checkout()?;
}
}
}
None => match &config {
None => checkout_commit = default_checkout()?,
Some(config) => match &config.track {
None => checkout_commit = default_checkout()?,
Some(track_config) => {
if track_config.default {
let remote_branch =
repo.find_remote_branch(&track_config.default_remote, name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
checkout_commit = default_checkout()?;
}
}
} else {
checkout_commit = default_checkout()?;
}
}
},
},
};
}
let mut target_branch = match repo.find_local_branch(name) {
Ok(branchref) => branchref,
Err(_) => repo.create_branch(name, &checkout_commit)?,
};
fn push(
remote: &mut repo::RemoteHandle,
branch_name: &str,
remote_branch_name: &str,
repo: &repo::Repo,
) -> Result<(), String> {
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
remote.push(branch_name, remote_branch_name, repo)
}
if !no_track {
if let Some((remote_name, remote_branch_name)) = track {
if remote_branch_exists {
target_branch.set_upstream(remote_name, remote_branch_name)?;
} else {
let mut remote = repo
.find_remote(remote_name)
.map_err(|error| format!("Error getting remote {}: {}", remote_name, error))?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
push(
&mut remote,
&target_branch.name()?,
remote_branch_name,
&repo,
)?;
target_branch.set_upstream(remote_name, remote_branch_name)?;
}
} else if let Some(config) = config {
if let Some(track_config) = config.track {
if track_config.default {
let remote_name = track_config.default_remote;
if remote_branch_exists {
target_branch.set_upstream(&remote_name, name)?;
} else {
let remote_branch_name = match track_config.default_remote_prefix {
Some(prefix) => {
format!("{}{}{}", &prefix, BRANCH_NAMESPACE_SEPARATOR, &name)
}
None => name.to_string(),
};
let mut remote = repo
.find_remote(&remote_name)
.map_err(|error| {
format!("Error getting remote {}: {}", remote_name, error)
})?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
push(
&mut remote,
&target_branch.name()?,
&remote_branch_name,
&repo,
)?;
target_branch.set_upstream(&remote_name, &remote_branch_name)?;
}
}
}
}
}
if let Some(subdirectory) = subdirectory {
std::fs::create_dir_all(subdirectory).map_err(|error| error.to_string())?;
}
repo.new_worktree(name, &path, &target_branch)?;
Ok(())
}

84
src/path.rs Normal file
View File

@@ -0,0 +1,84 @@
use std::path::{Path, PathBuf};
use std::process;
use super::output::*;
#[cfg(test)]
mod tests {
use super::*;
fn setup() {
std::env::set_var("HOME", "/home/test");
}
#[test]
fn check_expand_tilde() {
setup();
assert_eq!(
expand_path(Path::new("~/file")),
Path::new("/home/test/file")
);
}
#[test]
fn check_expand_invalid_tilde() {
setup();
assert_eq!(
expand_path(Path::new("/home/~/file")),
Path::new("/home/~/file")
);
}
#[test]
fn check_expand_home() {
setup();
assert_eq!(
expand_path(Path::new("$HOME/file")),
Path::new("/home/test/file")
);
assert_eq!(
expand_path(Path::new("${HOME}/file")),
Path::new("/home/test/file")
);
}
}
pub fn path_as_string(path: &Path) -> String {
path.to_path_buf().into_os_string().into_string().unwrap()
}
pub fn env_home() -> PathBuf {
match std::env::var("HOME") {
Ok(path) => Path::new(&path).to_path_buf(),
Err(e) => {
print_error(&format!("Unable to read HOME: {}", e));
process::exit(1);
}
}
}
pub fn expand_path(path: &Path) -> PathBuf {
fn home_dir() -> Option<PathBuf> {
Some(env_home())
}
let expanded_path = match shellexpand::full_with_context(
&path_as_string(path),
home_dir,
|name| -> Result<Option<String>, &'static str> {
match name {
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
_ => Ok(None),
}
},
) {
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
Ok(std::borrow::Cow::Owned(path)) => path,
Err(e) => {
print_error(&format!("Unable to expand root: {}", e));
process::exit(1);
}
};
Path::new(&expanded_path).to_path_buf()
}

145
src/provider/github.rs Normal file
View File

@@ -0,0 +1,145 @@
use serde::Deserialize;
use super::escape;
use super::ApiErrorResponse;
use super::Filter;
use super::JsonError;
use super::Project;
use super::Provider;
use super::SecretToken;
const PROVIDER_NAME: &str = "github";
const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json";
const GITHUB_API_BASEURL: &str =
option_env!("GITHUB_API_BASEURL").unwrap_or("https://api.github.com");
#[derive(Deserialize)]
pub struct GithubProject {
pub name: String,
pub full_name: String,
pub clone_url: String,
pub ssh_url: String,
pub private: bool,
}
#[derive(Deserialize)]
struct GithubUser {
#[serde(rename = "login")]
pub username: String,
}
impl Project for GithubProject {
fn name(&self) -> String {
self.name.clone()
}
fn namespace(&self) -> Option<String> {
if let Some((namespace, _name)) = self.full_name.rsplit_once('/') {
Some(namespace.to_string())
} else {
None
}
}
fn ssh_url(&self) -> String {
self.ssh_url.clone()
}
fn http_url(&self) -> String {
self.clone_url.clone()
}
fn private(&self) -> bool {
self.private
}
}
#[derive(Deserialize)]
pub struct GithubApiErrorResponse {
pub message: String,
}
impl JsonError for GithubApiErrorResponse {
fn to_string(self) -> String {
self.message
}
}
pub struct Github {
filter: Filter,
secret_token: SecretToken,
}
impl Provider for Github {
type Project = GithubProject;
type Error = GithubApiErrorResponse;
fn new(
filter: Filter,
secret_token: SecretToken,
api_url_override: Option<String>,
) -> Result<Self, String> {
if api_url_override.is_some() {
return Err("API URL overriding is not supported for Github".to_string());
}
Ok(Self {
filter,
secret_token,
})
}
fn name(&self) -> String {
String::from(PROVIDER_NAME)
}
fn filter(&self) -> Filter {
self.filter.clone()
}
fn secret_token(&self) -> SecretToken {
self.secret_token.clone()
}
fn auth_header_key() -> String {
"token".to_string()
}
fn get_user_projects(
&self,
user: &str,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/users/{}/repos", escape(user)),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_group_projects(
&self,
group: &str,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/orgs/{}/repos?type=all", escape(group)),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_accessible_projects(
&self,
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
self.call_list(
&format!("{GITHUB_API_BASEURL}/user/repos"),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> {
Ok(super::call::<GithubUser, GithubApiErrorResponse>(
&format!("{GITHUB_API_BASEURL}/user"),
&Self::auth_header_key(),
&self.secret_token(),
Some(ACCEPT_HEADER_JSON),
)?
.username)
}
}

166
src/provider/gitlab.rs Normal file
View File

@@ -0,0 +1,166 @@
use serde::Deserialize;
use super::escape;
use super::ApiErrorResponse;
use super::Filter;
use super::JsonError;
use super::Project;
use super::Provider;
use super::SecretToken;
const PROVIDER_NAME: &str = "gitlab";
const ACCEPT_HEADER_JSON: &str = "application/json";
const GITLAB_API_BASEURL: &str = option_env!("GITLAB_API_BASEURL").unwrap_or("https://gitlab.com");
#[derive(Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum GitlabVisibility {
Private,
Internal,
Public,
}
#[derive(Deserialize)]
pub struct GitlabProject {
#[serde(rename = "path")]
pub name: String,
pub path_with_namespace: String,
pub http_url_to_repo: String,
pub ssh_url_to_repo: String,
pub visibility: GitlabVisibility,
}
#[derive(Deserialize)]
struct GitlabUser {
pub username: String,
}
impl Project for GitlabProject {
fn name(&self) -> String {
self.name.clone()
}
fn namespace(&self) -> Option<String> {
if let Some((namespace, _name)) = self.path_with_namespace.rsplit_once('/') {
Some(namespace.to_string())
} else {
None
}
}
fn ssh_url(&self) -> String {
self.ssh_url_to_repo.clone()
}
fn http_url(&self) -> String {
self.http_url_to_repo.clone()
}
fn private(&self) -> bool {
!matches!(self.visibility, GitlabVisibility::Public)
}
}
#[derive(Deserialize)]
pub struct GitlabApiErrorResponse {
#[serde(alias = "error_description", alias = "error")]
pub message: String,
}
impl JsonError for GitlabApiErrorResponse {
fn to_string(self) -> String {
self.message
}
}
pub struct Gitlab {
filter: Filter,
secret_token: SecretToken,
api_url_override: Option<String>,
}
impl Gitlab {
fn api_url(&self) -> String {
self.api_url_override
.as_ref()
.unwrap_or(&GITLAB_API_BASEURL.to_string())
.trim_end_matches('/')
.to_string()
}
}
impl Provider for Gitlab {
type Project = GitlabProject;
type Error = GitlabApiErrorResponse;
fn new(
filter: Filter,
secret_token: SecretToken,
api_url_override: Option<String>,
) -> Result<Self, String> {
Ok(Self {
filter,
secret_token,
api_url_override,
})
}
fn name(&self) -> String {
String::from(PROVIDER_NAME)
}
fn filter(&self) -> Filter {
self.filter.clone()
}
fn secret_token(&self) -> SecretToken {
self.secret_token.clone()
}
fn auth_header_key() -> String {
"bearer".to_string()
}
fn get_user_projects(
&self,
user: &str,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!("{}/api/v4/users/{}/projects", self.api_url(), escape(user)),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_group_projects(
&self,
group: &str,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!(
"{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false",
self.api_url(),
escape(group),
),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_accessible_projects(
&self,
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
self.call_list(
&format!("{}/api/v4/projects", self.api_url(),),
Some(ACCEPT_HEADER_JSON),
)
}
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> {
Ok(super::call::<GitlabUser, GitlabApiErrorResponse>(
&format!("{}/api/v4/user", self.api_url()),
&Self::auth_header_key(),
&self.secret_token(),
Some(ACCEPT_HEADER_JSON),
)?
.username)
}
}

349
src/provider/mod.rs Normal file
View File

@@ -0,0 +1,349 @@
use serde::{Deserialize, Serialize};
// Required to use the `json()` method from the trait
use isahc::ReadResponseExt;
pub mod github;
pub mod gitlab;
pub use github::Github;
pub use gitlab::Gitlab;
use super::repo;
use std::collections::HashMap;
#[derive(Debug, Deserialize, Serialize, clap::ArgEnum, Clone)]
pub enum RemoteProvider {
#[serde(alias = "github", alias = "GitHub")]
Github,
#[serde(alias = "gitlab", alias = "GitLab")]
Gitlab,
}
#[derive(Deserialize)]
#[serde(untagged)]
enum ProjectResponse<T, U> {
Success(Vec<T>),
Failure(U),
}
pub fn escape(s: &str) -> String {
url_escape::encode_component(s).to_string()
}
pub trait Project {
fn into_repo_config(
self,
provider_name: &str,
worktree_setup: bool,
force_ssh: bool,
) -> repo::Repo
where
Self: Sized,
{
repo::Repo {
name: self.name(),
namespace: self.namespace(),
worktree_setup,
remotes: Some(vec![repo::Remote {
name: String::from(provider_name),
url: if force_ssh || self.private() {
self.ssh_url()
} else {
self.http_url()
},
remote_type: if force_ssh || self.private() {
repo::RemoteType::Ssh
} else {
repo::RemoteType::Https
},
}]),
}
}
fn name(&self) -> String;
fn namespace(&self) -> Option<String>;
fn ssh_url(&self) -> String;
fn http_url(&self) -> String;
fn private(&self) -> bool;
}
type SecretToken = String;
#[derive(Clone)]
pub struct Filter {
users: Vec<String>,
groups: Vec<String>,
owner: bool,
access: bool,
}
impl Filter {
pub fn new(users: Vec<String>, groups: Vec<String>, owner: bool, access: bool) -> Self {
Filter {
users,
groups,
owner,
access,
}
}
}
pub enum ApiErrorResponse<T>
where
T: JsonError,
{
Json(T),
String(String),
}
impl<T> From<String> for ApiErrorResponse<T>
where
T: JsonError,
{
fn from(s: String) -> ApiErrorResponse<T> {
ApiErrorResponse::String(s)
}
}
pub trait JsonError {
fn to_string(self) -> String;
}
pub trait Provider {
type Project: serde::de::DeserializeOwned + Project;
type Error: serde::de::DeserializeOwned + JsonError;
fn new(
filter: Filter,
secret_token: SecretToken,
api_url_override: Option<String>,
) -> Result<Self, String>
where
Self: Sized;
fn name(&self) -> String;
fn filter(&self) -> Filter;
fn secret_token(&self) -> SecretToken;
fn auth_header_key() -> String;
fn get_user_projects(
&self,
user: &str,
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
fn get_group_projects(
&self,
group: &str,
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
fn get_own_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
self.get_user_projects(&self.get_current_user()?)
}
fn get_accessible_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
fn get_current_user(&self) -> Result<String, ApiErrorResponse<Self::Error>>;
///
/// Calls the API at specific uri and expects a successful response of Vec<T> back, or an error
/// response U
///
/// Handles paging with "link" HTTP headers properly and reads all pages to
/// the end.
fn call_list(
&self,
uri: &str,
accept_header: Option<&str>,
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
let mut results = vec![];
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
let request = isahc::Request::builder()
.uri(uri)
.method("GET")
.header("accept", accept_header.unwrap_or("application/json"))
.header(
"authorization",
format!("{} {}", Self::auth_header_key(), &self.secret_token()),
)
.body(())
.map_err(|error| error.to_string())?;
let mut response = client
.send(request)
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
if !response.status().is_success() {
let r: Self::Error = response
.json()
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
return Err(ApiErrorResponse::Json(r));
}
let result: Vec<Self::Project> = response
.json()
.map_err(|error| format!("Failed deserializing response: {}", error))?;
results.extend(result);
if let Some(link_header) = response.headers().get("link") {
let link_header = link_header.to_str().map_err(|error| error.to_string())?;
let link_header =
parse_link_header::parse(link_header).map_err(|error| error.to_string())?;
let next_page = link_header.get(&Some(String::from("next")));
if let Some(page) = next_page {
let following_repos = self.call_list(&page.raw_uri, accept_header)?;
results.extend(following_repos);
}
}
Ok(results)
}
fn get_repos(
&self,
worktree_setup: bool,
force_ssh: bool,
) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> {
let mut repos = vec![];
if self.filter().owner {
repos.extend(self.get_own_projects().map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?);
}
if self.filter().access {
let accessible_projects =
self.get_accessible_projects()
.map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?;
for accessible_project in accessible_projects {
let mut already_present = false;
for repo in &repos {
if repo.name() == accessible_project.name()
&& repo.namespace() == accessible_project.namespace()
{
already_present = true;
}
}
if !already_present {
repos.push(accessible_project);
}
}
}
for user in &self.filter().users {
let user_projects = self.get_user_projects(user).map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?;
for user_project in user_projects {
let mut already_present = false;
for repo in &repos {
if repo.name() == user_project.name()
&& repo.namespace() == user_project.namespace()
{
already_present = true;
}
}
if !already_present {
repos.push(user_project);
}
}
}
for group in &self.filter().groups {
let group_projects = self
.get_group_projects(group)
.map_err(|error| match error {
ApiErrorResponse::Json(x) => x.to_string(),
ApiErrorResponse::String(s) => s,
})?;
for group_project in group_projects {
let mut already_present = false;
for repo in &repos {
if repo.name() == group_project.name()
&& repo.namespace() == group_project.namespace()
{
already_present = true;
}
}
if !already_present {
repos.push(group_project);
}
}
}
let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new();
for repo in repos {
let namespace = repo.namespace();
let mut repo = repo.into_repo_config(&self.name(), worktree_setup, force_ssh);
// Namespace is already part of the hashmap key. I'm not too happy
// about the data exchange format here.
repo.remove_namespace();
ret.entry(namespace).or_insert(vec![]).push(repo);
}
Ok(ret)
}
}
fn call<T, U>(
uri: &str,
auth_header_key: &str,
secret_token: &str,
accept_header: Option<&str>,
) -> Result<T, ApiErrorResponse<U>>
where
T: serde::de::DeserializeOwned,
U: serde::de::DeserializeOwned + JsonError,
{
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
let request = isahc::Request::builder()
.uri(uri)
.header("accept", accept_header.unwrap_or("application/json"))
.header(
"authorization",
format!("{} {}", &auth_header_key, &secret_token),
)
.body(())
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
let mut response = client
.send(request)
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
let success = response.status().is_success();
if !success {
let response: U = response
.json()
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
return Err(ApiErrorResponse::Json(response));
}
let response: T = response
.json()
.map_err(|error| format!("Failed deserializing response: {}", error))?;
Ok(response)
}

View File

@@ -3,11 +3,15 @@ use std::path::Path;
use git2::Repository; use git2::Repository;
use crate::output::*; use super::output::*;
use super::path;
use super::worktree;
const WORKTREE_CONFIG_FILE_NAME: &str = "grm.toml"; const WORKTREE_CONFIG_FILE_NAME: &str = "grm.toml";
const GIT_CONFIG_BARE_KEY: &str = "core.bare";
const GIT_CONFIG_PUSH_DEFAULT: &str = "push.default";
#[derive(Debug, Serialize, Deserialize, PartialEq)] #[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
pub enum RemoteType { pub enum RemoteType {
Ssh, Ssh,
@@ -31,7 +35,7 @@ pub enum GitPushDefaultSetting {
Upstream, Upstream,
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq, Eq)]
pub enum RepoErrorKind { pub enum RepoErrorKind {
NotFound, NotFound,
Unknown(String), Unknown(String),
@@ -104,28 +108,32 @@ impl std::fmt::Display for RepoError {
} }
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug)]
#[serde(deny_unknown_fields)]
pub struct Remote { pub struct Remote {
pub name: String, pub name: String,
pub url: String, pub url: String,
#[serde(rename = "type")]
pub remote_type: RemoteType, pub remote_type: RemoteType,
} }
fn worktree_setup_default() -> bool { #[derive(Debug)]
false pub struct Repo {
pub name: String,
pub namespace: Option<String>,
pub worktree_setup: bool,
pub remotes: Option<Vec<Remote>>,
} }
#[derive(Debug, Serialize, Deserialize)] impl Repo {
#[serde(deny_unknown_fields)] pub fn fullname(&self) -> String {
pub struct RepoConfig { match &self.namespace {
pub name: String, Some(namespace) => format!("{}/{}", namespace, self.name),
None => self.name.clone(),
}
}
#[serde(default = "worktree_setup_default")] pub fn remove_namespace(&mut self) {
pub worktree_setup: bool, self.namespace = None
}
pub remotes: Option<Vec<Remote>>,
} }
pub struct RepoChanges { pub struct RepoChanges {
@@ -181,16 +189,29 @@ impl Worktree {
&self.name &self.name
} }
pub fn forward_branch(&self, rebase: bool) -> Result<Option<String>, String> { pub fn forward_branch(&self, rebase: bool, stash: bool) -> Result<Option<String>, String> {
let repo = Repo::open(Path::new(&self.name), false) let repo = RepoHandle::open(Path::new(&self.name), false)
.map_err(|error| format!("Error opening worktree: {}", error))?; .map_err(|error| format!("Error opening worktree: {}", error))?;
if let Ok(remote_branch) = repo.find_local_branch(&self.name)?.upstream() { if let Ok(remote_branch) = repo.find_local_branch(&self.name)?.upstream() {
let status = repo.status(false)?; let status = repo.status(false)?;
let mut stashed_changes = false;
if !status.clean() { if !status.clean() {
if stash {
repo.stash()?;
stashed_changes = true;
} else {
return Ok(Some(String::from("Worktree contains changes"))); return Ok(Some(String::from("Worktree contains changes")));
} }
}
let unstash = || -> Result<(), String> {
if stashed_changes {
repo.stash_pop()?;
}
Ok(())
};
let remote_annotated_commit = repo let remote_annotated_commit = repo
.0 .0
@@ -231,6 +252,7 @@ impl Worktree {
continue; continue;
} }
rebase.abort().map_err(convert_libgit2_error)?; rebase.abort().map_err(convert_libgit2_error)?;
unstash()?;
return Err(convert_libgit2_error(error)); return Err(convert_libgit2_error(error));
} }
} }
@@ -243,9 +265,11 @@ impl Worktree {
.map_err(convert_libgit2_error)?; .map_err(convert_libgit2_error)?;
if analysis.is_up_to_date() { if analysis.is_up_to_date() {
unstash()?;
return Ok(None); return Ok(None);
} }
if !analysis.is_fast_forward() { if !analysis.is_fast_forward() {
unstash()?;
return Ok(Some(String::from("Worktree cannot be fast forwarded"))); return Ok(Some(String::from("Worktree cannot be fast forwarded")));
} }
@@ -257,17 +281,20 @@ impl Worktree {
) )
.map_err(convert_libgit2_error)?; .map_err(convert_libgit2_error)?;
} }
unstash()?;
} else { } else {
return Ok(Some(String::from("No remote branch to rebase onto"))); return Ok(Some(String::from("No remote branch to rebase onto")));
}; };
Ok(None) Ok(None)
} }
pub fn rebase_onto_default( pub fn rebase_onto_default(
&self, &self,
config: &Option<WorktreeRootConfig>, config: &Option<WorktreeRootConfig>,
stash: bool,
) -> Result<Option<String>, String> { ) -> Result<Option<String>, String> {
let repo = Repo::open(Path::new(&self.name), false) let repo = RepoHandle::open(Path::new(&self.name), false)
.map_err(|error| format!("Error opening worktree: {}", error))?; .map_err(|error| format!("Error opening worktree: {}", error))?;
let guess_default_branch = || { let guess_default_branch = || {
@@ -291,6 +318,25 @@ impl Worktree {
}, },
}; };
let status = repo.status(false)?;
let mut stashed_changes = false;
if !status.clean() {
if stash {
repo.stash()?;
stashed_changes = true;
} else {
return Ok(Some(String::from("Worktree contains changes")));
}
}
let unstash = || -> Result<(), String> {
if stashed_changes {
repo.stash_pop()?;
}
Ok(())
};
let base_branch = repo.find_local_branch(&default_branch_name)?; let base_branch = repo.find_local_branch(&default_branch_name)?;
let base_annotated_commit = repo let base_annotated_commit = repo
.0 .0
@@ -330,11 +376,13 @@ impl Worktree {
continue; continue;
} }
rebase.abort().map_err(convert_libgit2_error)?; rebase.abort().map_err(convert_libgit2_error)?;
unstash()?;
return Err(convert_libgit2_error(error)); return Err(convert_libgit2_error(error));
} }
} }
rebase.finish(None).map_err(convert_libgit2_error)?; rebase.finish(None).map_err(convert_libgit2_error)?;
unstash()?;
Ok(None) Ok(None)
} }
} }
@@ -403,6 +451,26 @@ mod tests {
fn check_unsupported_protocol_git() { fn check_unsupported_protocol_git() {
detect_remote_type("git://example.com"); detect_remote_type("git://example.com");
} }
#[test]
fn repo_check_fullname() {
let with_namespace = Repo {
name: "name".to_string(),
namespace: Some("namespace".to_string()),
worktree_setup: false,
remotes: None,
};
let without_namespace = Repo {
name: "name".to_string(),
namespace: None,
worktree_setup: false,
remotes: None,
};
assert_eq!(with_namespace.fullname(), "namespace/name");
assert_eq!(without_namespace.fullname(), "name");
}
} }
pub fn detect_remote_type(remote_url: &str) -> Option<RemoteType> { pub fn detect_remote_type(remote_url: &str) -> Option<RemoteType> {
@@ -428,21 +496,21 @@ pub fn detect_remote_type(remote_url: &str) -> Option<RemoteType> {
None None
} }
pub struct Repo(git2::Repository); pub struct RepoHandle(git2::Repository);
pub struct Branch<'a>(git2::Branch<'a>); pub struct Branch<'a>(git2::Branch<'a>);
fn convert_libgit2_error(error: git2::Error) -> String { fn convert_libgit2_error(error: git2::Error) -> String {
error.message().to_string() error.message().to_string()
} }
impl Repo { impl RepoHandle {
pub fn open(path: &Path, is_worktree: bool) -> Result<Self, RepoError> { pub fn open(path: &Path, is_worktree: bool) -> Result<Self, RepoError> {
let open_func = match is_worktree { let open_func = match is_worktree {
true => Repository::open_bare, true => Repository::open_bare,
false => Repository::open, false => Repository::open,
}; };
let path = match is_worktree { let path = match is_worktree {
true => path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY), true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
false => path.to_path_buf(), false => path.to_path_buf(),
}; };
match open_func(path) { match open_func(path) {
@@ -456,6 +524,35 @@ impl Repo {
} }
} }
pub fn stash(&self) -> Result<(), String> {
let head_branch = self.head_branch()?;
let head = head_branch.commit()?;
let author = head.author();
// This is honestly quite horrible. The problem is that all stash operations expect a
// mutable reference (as they, well, mutate the repo after all). But we are heavily using
// immutable references a lot with this struct. I'm really not sure how to best solve this.
// Right now, we just open the repo AGAIN. It is safe, as we are only accessing the stash
// with the second reference, so there are no cross effects. But it just smells. Also,
// using `unwrap()` here as we are already sure that the repo is openable(?).
let mut repo = RepoHandle::open(self.0.path(), false).unwrap();
repo.0
.stash_save2(&author, None, Some(git2::StashFlags::INCLUDE_UNTRACKED))
.map_err(convert_libgit2_error)?;
Ok(())
}
pub fn stash_pop(&self) -> Result<(), String> {
let mut repo = RepoHandle::open(self.0.path(), false).unwrap();
repo.0
.stash_pop(
0,
Some(git2::StashApplyOptions::new().reinstantiate_index()),
)
.map_err(convert_libgit2_error)?;
Ok(())
}
pub fn rename_remote(&self, remote: &RemoteHandle, new_name: &str) -> Result<(), String> { pub fn rename_remote(&self, remote: &RemoteHandle, new_name: &str) -> Result<(), String> {
let failed_refspecs = self let failed_refspecs = self
.0 .0
@@ -586,11 +683,11 @@ impl Repo {
pub fn init(path: &Path, is_worktree: bool) -> Result<Self, String> { pub fn init(path: &Path, is_worktree: bool) -> Result<Self, String> {
let repo = match is_worktree { let repo = match is_worktree {
false => Repository::init(path).map_err(convert_libgit2_error)?, false => Repository::init(path).map_err(convert_libgit2_error)?,
true => Repository::init_bare(path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY)) true => Repository::init_bare(path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY))
.map_err(convert_libgit2_error)?, .map_err(convert_libgit2_error)?,
}; };
let repo = Repo(repo); let repo = RepoHandle(repo);
if is_worktree { if is_worktree {
repo.set_config_push(GitPushDefaultSetting::Upstream)?; repo.set_config_push(GitPushDefaultSetting::Upstream)?;
@@ -603,10 +700,8 @@ impl Repo {
self.0.config().map_err(convert_libgit2_error) self.0.config().map_err(convert_libgit2_error)
} }
pub fn find_worktree(&self, path: &Path) -> Result<(), String> { pub fn find_worktree(&self, name: &str) -> Result<(), String> {
self.0 self.0.find_worktree(name).map_err(convert_libgit2_error)?;
.find_worktree(path.to_str().expect("Worktree path is not valid utf-8"))
.map_err(convert_libgit2_error)?;
Ok(()) Ok(())
} }
@@ -651,8 +746,8 @@ impl Repo {
let mut config = self.config()?; let mut config = self.config()?;
config config
.set_bool(crate::GIT_CONFIG_BARE_KEY, value) .set_bool(GIT_CONFIG_BARE_KEY, value)
.map_err(|error| format!("Could not set {}: {}", crate::GIT_CONFIG_BARE_KEY, error)) .map_err(|error| format!("Could not set {}: {}", GIT_CONFIG_BARE_KEY, error))
} }
pub fn convert_to_worktree( pub fn convert_to_worktree(
@@ -675,7 +770,7 @@ impl Repo {
return Err(WorktreeConversionFailureReason::Ignored); return Err(WorktreeConversionFailureReason::Ignored);
} }
std::fs::rename(".git", crate::GIT_MAIN_WORKTREE_DIRECTORY).map_err(|error| { std::fs::rename(".git", worktree::GIT_MAIN_WORKTREE_DIRECTORY).map_err(|error| {
WorktreeConversionFailureReason::Error(format!( WorktreeConversionFailureReason::Error(format!(
"Error moving .git directory: {}", "Error moving .git directory: {}",
error error
@@ -695,7 +790,7 @@ impl Repo {
Ok(entry) => { Ok(entry) => {
let path = entry.path(); let path = entry.path();
// unwrap is safe here, the path will ALWAYS have a file component // unwrap is safe here, the path will ALWAYS have a file component
if path.file_name().unwrap() == crate::GIT_MAIN_WORKTREE_DIRECTORY { if path.file_name().unwrap() == worktree::GIT_MAIN_WORKTREE_DIRECTORY {
continue; continue;
} }
if path.is_file() || path.is_symlink() { if path.is_file() || path.is_symlink() {
@@ -721,7 +816,7 @@ impl Repo {
} }
} }
let worktree_repo = Repo::open(root_dir, true).map_err(|error| { let worktree_repo = RepoHandle::open(root_dir, true).map_err(|error| {
WorktreeConversionFailureReason::Error(format!( WorktreeConversionFailureReason::Error(format!(
"Opening newly converted repository failed: {}", "Opening newly converted repository failed: {}",
error error
@@ -744,18 +839,12 @@ impl Repo {
config config
.set_str( .set_str(
crate::GIT_CONFIG_PUSH_DEFAULT, GIT_CONFIG_PUSH_DEFAULT,
match value { match value {
GitPushDefaultSetting::Upstream => "upstream", GitPushDefaultSetting::Upstream => "upstream",
}, },
) )
.map_err(|error| { .map_err(|error| format!("Could not set {}: {}", GIT_CONFIG_PUSH_DEFAULT, error))
format!(
"Could not set {}: {}",
crate::GIT_CONFIG_PUSH_DEFAULT,
error
)
})
} }
pub fn has_untracked_files(&self, is_worktree: bool) -> Result<bool, String> { pub fn has_untracked_files(&self, is_worktree: bool) -> Result<bool, String> {
@@ -1001,7 +1090,7 @@ impl Repo {
name name
))); )));
} }
let worktree_repo = Repo::open(worktree_dir, false).map_err(|error| { let worktree_repo = RepoHandle::open(worktree_dir, false).map_err(|error| {
WorktreeRemoveFailureReason::Error(format!("Error opening repo: {}", error)) WorktreeRemoveFailureReason::Error(format!("Error opening repo: {}", error))
})?; })?;
@@ -1014,7 +1103,7 @@ impl Repo {
})?; })?;
if branch_name != name if branch_name != name
&& !branch_name.ends_with(&format!("{}{}", crate::BRANCH_NAMESPACE_SEPARATOR, name)) && !branch_name.ends_with(&format!("{}{}", super::BRANCH_NAMESPACE_SEPARATOR, name))
{ {
return Err(WorktreeRemoveFailureReason::Error(format!( return Err(WorktreeRemoveFailureReason::Error(format!(
"Branch {} is checked out in worktree, this does not look correct", "Branch {} is checked out in worktree, this does not look correct",
@@ -1184,7 +1273,7 @@ impl Repo {
let mut unmanaged_worktrees = Vec::new(); let mut unmanaged_worktrees = Vec::new();
for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? { for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? {
let dirname = crate::path_as_string( let dirname = path::path_as_string(
entry entry
.map_err(|error| error.to_string())? .map_err(|error| error.to_string())?
.path() .path()
@@ -1204,28 +1293,30 @@ impl Repo {
}; };
let default_branch_name = match &config { let default_branch_name = match &config {
None => guess_default_branch()?, None => guess_default_branch().ok(),
Some(config) => match &config.persistent_branches { Some(config) => match &config.persistent_branches {
None => guess_default_branch()?, None => guess_default_branch().ok(),
Some(persistent_branches) => { Some(persistent_branches) => {
if persistent_branches.is_empty() { if persistent_branches.is_empty() {
guess_default_branch()? guess_default_branch().ok()
} else { } else {
persistent_branches[0].clone() Some(persistent_branches[0].clone())
} }
} }
}, },
}; };
if dirname == crate::GIT_MAIN_WORKTREE_DIRECTORY { if dirname == worktree::GIT_MAIN_WORKTREE_DIRECTORY {
continue; continue;
} }
if dirname == WORKTREE_CONFIG_FILE_NAME { if dirname == WORKTREE_CONFIG_FILE_NAME {
continue; continue;
} }
if let Some(default_branch_name) = default_branch_name {
if dirname == default_branch_name { if dirname == default_branch_name {
continue; continue;
} }
}
if !&worktrees.iter().any(|worktree| worktree.name() == dirname) { if !&worktrees.iter().any(|worktree| worktree.name() == dirname) {
unmanaged_worktrees.push(dirname); unmanaged_worktrees.push(dirname);
} }
@@ -1234,7 +1325,7 @@ impl Repo {
} }
pub fn detect_worktree(path: &Path) -> bool { pub fn detect_worktree(path: &Path) -> bool {
path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY).exists() path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY).exists()
} }
} }
@@ -1253,6 +1344,10 @@ impl Commit<'_> {
pub fn id(&self) -> Oid { pub fn id(&self) -> Oid {
Oid(self.0.id()) Oid(self.0.id())
} }
pub(self) fn author(&self) -> git2::Signature {
self.0.author()
}
} }
impl<'a> Branch<'a> { impl<'a> Branch<'a> {
@@ -1323,9 +1418,7 @@ fn get_remote_callbacks() -> git2::RemoteCallbacks<'static> {
Some(username) => username, Some(username) => username,
None => panic!("Could not get username. This is a bug"), None => panic!("Could not get username. This is a bug"),
}; };
git2::Cred::ssh_key_from_agent(username).or_else(|_| { git2::Cred::ssh_key_from_agent(username)
git2::Cred::ssh_key(username, None, &crate::env_home().join(".ssh/id_rsa"), None)
})
}); });
callbacks callbacks
@@ -1356,7 +1449,7 @@ impl RemoteHandle<'_> {
&mut self, &mut self,
local_branch_name: &str, local_branch_name: &str,
remote_branch_name: &str, remote_branch_name: &str,
_repo: &Repo, _repo: &RepoHandle,
) -> Result<(), String> { ) -> Result<(), String> {
if !self.is_pushable()? { if !self.is_pushable()? {
return Err(String::from("Trying to push to a non-pushable remote")); return Err(String::from("Trying to push to a non-pushable remote"));
@@ -1391,7 +1484,7 @@ pub fn clone_repo(
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let clone_target = match is_worktree { let clone_target = match is_worktree {
false => path.to_path_buf(), false => path.to_path_buf(),
true => path.join(crate::GIT_MAIN_WORKTREE_DIRECTORY), true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
}; };
print_action(&format!( print_action(&format!(
@@ -1422,7 +1515,7 @@ pub fn clone_repo(
} }
} }
let repo = Repo::open(&clone_target, false)?; let repo = RepoHandle::open(&clone_target, false)?;
if is_worktree { if is_worktree {
repo.set_config_push(GitPushDefaultSetting::Upstream)?; repo.set_config_push(GitPushDefaultSetting::Upstream)?;
@@ -1436,8 +1529,11 @@ pub fn clone_repo(
repo.rename_remote(&origin, &remote.name)?; repo.rename_remote(&origin, &remote.name)?;
} }
let mut active_branch = repo.head_branch()?; // If there is no head_branch, we most likely cloned an empty repository and
// there is no point in setting any upstreams.
if let Ok(mut active_branch) = repo.head_branch() {
active_branch.set_upstream(&remote.name, &active_branch.name()?)?; active_branch.set_upstream(&remote.name, &active_branch.name()?)?;
};
Ok(()) Ok(())
} }

View File

@@ -1,3 +1,7 @@
use super::config;
use super::path;
use super::repo;
use comfy_table::{Cell, Table}; use comfy_table::{Cell, Table};
use std::path::Path; use std::path::Path;
@@ -19,7 +23,7 @@ fn add_table_header(table: &mut Table) {
fn add_repo_status( fn add_repo_status(
table: &mut Table, table: &mut Table,
repo_name: &str, repo_name: &str,
repo_handle: &crate::Repo, repo_handle: &repo::RepoHandle,
is_worktree: bool, is_worktree: bool,
) -> Result<(), String> { ) -> Result<(), String> {
let repo_status = repo_handle.status(is_worktree)?; let repo_status = repo_handle.status(is_worktree)?;
@@ -63,11 +67,11 @@ fn add_repo_status(
" <{}>{}", " <{}>{}",
remote_branch_name, remote_branch_name,
&match remote_tracking_status { &match remote_tracking_status {
crate::RemoteTrackingStatus::UpToDate => repo::RemoteTrackingStatus::UpToDate =>
String::from(" \u{2714}"), String::from(" \u{2714}"),
crate::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d), repo::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
crate::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d), repo::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
crate::RemoteTrackingStatus::Diverged(d1, d2) => repo::RemoteTrackingStatus::Diverged(d1, d2) =>
format!(" [+{}/-{}]", &d1, &d2), format!(" [+{}/-{}]", &d1, &d2),
} }
) )
@@ -97,7 +101,7 @@ fn add_repo_status(
// Don't return table, return a type that implements Display(?) // Don't return table, return a type that implements Display(?)
pub fn get_worktree_status_table( pub fn get_worktree_status_table(
repo: &crate::Repo, repo: &repo::RepoHandle,
directory: &Path, directory: &Path,
) -> Result<(impl std::fmt::Display, Vec<String>), String> { ) -> Result<(impl std::fmt::Display, Vec<String>), String> {
let worktrees = repo.get_worktrees()?; let worktrees = repo.get_worktrees()?;
@@ -109,7 +113,7 @@ pub fn get_worktree_status_table(
for worktree in &worktrees { for worktree in &worktrees {
let worktree_dir = &directory.join(&worktree.name()); let worktree_dir = &directory.join(&worktree.name());
if worktree_dir.exists() { if worktree_dir.exists() {
let repo = match crate::Repo::open(worktree_dir, false) { let repo = match repo::RepoHandle::open(worktree_dir, false) {
Ok(repo) => repo, Ok(repo) => repo,
Err(error) => { Err(error) => {
errors.push(format!( errors.push(format!(
@@ -130,36 +134,22 @@ pub fn get_worktree_status_table(
)); ));
} }
} }
for entry in std::fs::read_dir(&directory).map_err(|error| error.to_string())? { for worktree in repo::RepoHandle::find_unmanaged_worktrees(repo, directory)? {
let dirname = crate::path_as_string(
entry
.map_err(|error| error.to_string())?
.path()
.strip_prefix(&directory)
// this unwrap is safe, as we can be sure that each subentry of
// &directory also has the prefix &dir
.unwrap(),
);
if dirname == crate::GIT_MAIN_WORKTREE_DIRECTORY {
continue;
}
if !&worktrees.iter().any(|worktree| worktree.name() == dirname) {
errors.push(format!( errors.push(format!(
"Found {}, which is not a valid worktree directory!", "Found {}, which is not a valid worktree directory!",
&dirname &worktree
)); ));
} }
}
Ok((table, errors)) Ok((table, errors))
} }
pub fn get_status_table(config: crate::Config) -> Result<(Vec<Table>, Vec<String>), String> { pub fn get_status_table(config: config::Config) -> Result<(Vec<Table>, Vec<String>), String> {
let mut errors = Vec::new(); let mut errors = Vec::new();
let mut tables = Vec::new(); let mut tables = Vec::new();
for tree in config.trees.as_vec() { for tree in config.trees()? {
let repos = tree.repos.unwrap_or_default(); let repos = tree.repos.unwrap_or_default();
let root_path = crate::expand_path(Path::new(&tree.root)); let root_path = path::expand_path(Path::new(&tree.root));
let mut table = Table::new(); let mut table = Table::new();
add_table_header(&mut table); add_table_header(&mut table);
@@ -175,12 +165,12 @@ pub fn get_status_table(config: crate::Config) -> Result<(Vec<Table>, Vec<String
continue; continue;
} }
let repo_handle = crate::Repo::open(&repo_path, repo.worktree_setup); let repo_handle = repo::RepoHandle::open(&repo_path, repo.worktree_setup);
let repo_handle = match repo_handle { let repo_handle = match repo_handle {
Ok(repo) => repo, Ok(repo) => repo,
Err(error) => { Err(error) => {
if error.kind == crate::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
errors.push(format!( errors.push(format!(
"{}: No git repository found. Run sync?", "{}: No git repository found. Run sync?",
&repo.name &repo.name
@@ -218,8 +208,8 @@ fn add_worktree_table_header(table: &mut Table) {
fn add_worktree_status( fn add_worktree_status(
table: &mut Table, table: &mut Table,
worktree: &crate::repo::Worktree, worktree: &repo::Worktree,
repo: &crate::Repo, repo: &repo::RepoHandle,
) -> Result<(), String> { ) -> Result<(), String> {
let repo_status = repo.status(false)?; let repo_status = repo.status(false)?;
@@ -284,13 +274,13 @@ pub fn show_single_repo_status(
let mut table = Table::new(); let mut table = Table::new();
let mut warnings = Vec::new(); let mut warnings = Vec::new();
let is_worktree = crate::Repo::detect_worktree(path); let is_worktree = repo::RepoHandle::detect_worktree(path);
add_table_header(&mut table); add_table_header(&mut table);
let repo_handle = crate::Repo::open(path, is_worktree); let repo_handle = repo::RepoHandle::open(path, is_worktree);
if let Err(error) = repo_handle { if let Err(error) = repo_handle {
if error.kind == crate::RepoErrorKind::NotFound { if error.kind == repo::RepoErrorKind::NotFound {
return Err(String::from("Directory is not a git directory")); return Err(String::from("Directory is not a git directory"));
} else { } else {
return Err(format!("Opening repository failed: {}", error)); return Err(format!("Opening repository failed: {}", error));

268
src/tree.rs Normal file
View File

@@ -0,0 +1,268 @@
use std::fs;
use std::path::{Path, PathBuf};
use super::config;
use super::output::*;
use super::path;
use super::repo;
use super::worktree;
pub struct Tree {
pub root: String,
pub repos: Vec<repo::Repo>,
}
pub fn find_unmanaged_repos(
root_path: &Path,
managed_repos: &[repo::Repo],
) -> Result<Vec<PathBuf>, String> {
let mut unmanaged_repos = Vec::new();
for repo_path in find_repo_paths(root_path)? {
if !managed_repos
.iter()
.any(|r| Path::new(root_path).join(r.fullname()) == repo_path)
{
unmanaged_repos.push(repo_path);
}
}
Ok(unmanaged_repos)
}
pub fn sync_trees(config: config::Config, init_worktree: bool) -> Result<bool, String> {
let mut failures = false;
let mut unmanaged_repos_absolute_paths = vec![];
let mut managed_repos_absolute_paths = vec![];
let trees = config.trees()?;
for tree in trees {
let repos: Vec<repo::Repo> = tree
.repos
.unwrap_or_default()
.into_iter()
.map(|repo| repo.into_repo())
.collect();
let root_path = path::expand_path(Path::new(&tree.root));
for repo in &repos {
managed_repos_absolute_paths.push(root_path.join(repo.fullname()));
match sync_repo(&root_path, repo, init_worktree) {
Ok(_) => print_repo_success(&repo.name, "OK"),
Err(error) => {
print_repo_error(&repo.name, &error);
failures = true;
}
}
}
match find_unmanaged_repos(&root_path, &repos) {
Ok(repos) => {
unmanaged_repos_absolute_paths.extend(repos);
}
Err(error) => {
print_error(&format!("Error getting unmanaged repos: {}", error));
failures = true;
}
}
}
for unmanaged_repo_absolute_path in &unmanaged_repos_absolute_paths {
if managed_repos_absolute_paths
.iter()
.any(|managed_repo_absolute_path| {
managed_repo_absolute_path == unmanaged_repo_absolute_path
})
{
continue;
}
print_warning(&format!(
"Found unmanaged repository: \"{}\"",
path::path_as_string(unmanaged_repo_absolute_path)
));
}
Ok(!failures)
}
/// Finds repositories recursively, returning their path
pub fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
let mut repos = Vec::new();
let git_dir = path.join(".git");
let git_worktree = path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY);
if git_dir.exists() || git_worktree.exists() {
repos.push(path.to_path_buf());
} else {
match fs::read_dir(path) {
Ok(contents) => {
for content in contents {
match content {
Ok(entry) => {
let path = entry.path();
if path.is_symlink() {
continue;
}
if path.is_dir() {
match find_repo_paths(&path) {
Ok(ref mut r) => repos.append(r),
Err(error) => return Err(error),
}
}
}
Err(e) => {
return Err(format!("Error accessing directory: {}", e));
}
};
}
}
Err(e) => {
return Err(format!(
"Failed to open \"{}\": {}",
&path.display(),
match e.kind() {
std::io::ErrorKind::NotADirectory =>
String::from("directory expected, but path is not a directory"),
std::io::ErrorKind::NotFound => String::from("not found"),
_ => format!("{:?}", e.kind()),
}
));
}
};
}
Ok(repos)
}
fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result<(), String> {
let repo_path = root_path.join(&repo.fullname());
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
let mut newly_created = false;
if repo_path.exists() {
if repo.worktree_setup && !actual_git_directory.exists() {
return Err(String::from(
"Repo already exists, but is not using a worktree setup",
));
};
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
print_repo_action(
&repo.name,
"Repository does not have remotes configured, initializing new",
);
match repo::RepoHandle::init(&repo_path, repo.worktree_setup) {
Ok(r) => {
print_repo_success(&repo.name, "Repository created");
Some(r)
}
Err(e) => {
return Err(format!("Repository failed during init: {}", e));
}
};
} else {
let first = repo.remotes.as_ref().unwrap().first().unwrap();
match repo::clone_repo(first, &repo_path, repo.worktree_setup) {
Ok(_) => {
print_repo_success(&repo.name, "Repository successfully cloned");
}
Err(e) => {
return Err(format!("Repository failed during clone: {}", e));
}
};
newly_created = true;
}
let repo_handle = match repo::RepoHandle::open(&repo_path, repo.worktree_setup) {
Ok(repo) => repo,
Err(error) => {
if !repo.worktree_setup && repo::RepoHandle::open(&repo_path, true).is_ok() {
return Err(String::from(
"Repo already exists, but is using a worktree setup",
));
} else {
return Err(format!("Opening repository failed: {}", error));
}
}
};
if newly_created && repo.worktree_setup && init_worktree {
match repo_handle.default_branch() {
Ok(branch) => {
worktree::add_worktree(&repo_path, &branch.name()?, None, None, false)?;
}
Err(_error) => print_repo_error(
&repo.name,
"Could not determine default branch, skipping worktree initializtion",
),
}
}
if let Some(remotes) = &repo.remotes {
let current_remotes: Vec<String> = repo_handle
.remotes()
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
for remote in remotes {
let current_remote = repo_handle.find_remote(&remote.name)?;
match current_remote {
Some(current_remote) => {
let current_url = current_remote.url();
if remote.url != current_url {
print_repo_action(
&repo.name,
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
);
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
};
}
}
None => {
print_repo_action(
&repo.name,
&format!(
"Setting up new remote \"{}\" to \"{}\"",
&remote.name, &remote.url
),
);
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
return Err(format!(
"Repository failed during setting the remotes: {}",
e
));
}
}
}
}
for current_remote in &current_remotes {
if !remotes.iter().any(|r| &r.name == current_remote) {
print_repo_action(
&repo.name,
&format!("Deleting remote \"{}\"", &current_remote,),
);
if let Err(e) = repo_handle.remote_delete(current_remote) {
return Err(format!(
"Repository failed during deleting remote \"{}\": {}",
&current_remote, e
));
}
}
}
}
Ok(())
}
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
match is_worktree {
false => path.to_path_buf(),
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
}
}

166
src/worktree.rs Normal file
View File

@@ -0,0 +1,166 @@
use std::path::Path;
use super::repo;
pub const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
pub fn add_worktree(
directory: &Path,
name: &str,
subdirectory: Option<&Path>,
track: Option<(&str, &str)>,
no_track: bool,
) -> Result<(), String> {
let repo = repo::RepoHandle::open(directory, true).map_err(|error| match error.kind {
repo::RepoErrorKind::NotFound => {
String::from("Current directory does not contain a worktree setup")
}
_ => format!("Error opening repo: {}", error),
})?;
let config = repo::read_worktree_root_config(directory)?;
if repo.find_worktree(name).is_ok() {
return Err(format!("Worktree {} already exists", &name));
}
let path = match subdirectory {
Some(dir) => directory.join(dir).join(name),
None => directory.join(Path::new(name)),
};
let mut remote_branch_exists = false;
let default_checkout = || repo.default_branch()?.to_commit();
let checkout_commit;
if no_track {
checkout_commit = default_checkout()?;
} else {
match track {
Some((remote_name, remote_branch_name)) => {
let remote_branch = repo.find_remote_branch(remote_name, remote_branch_name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
remote_branch_exists = false;
checkout_commit = default_checkout()?;
}
}
}
None => match &config {
None => checkout_commit = default_checkout()?,
Some(config) => match &config.track {
None => checkout_commit = default_checkout()?,
Some(track_config) => {
if track_config.default {
let remote_branch =
repo.find_remote_branch(&track_config.default_remote, name);
match remote_branch {
Ok(branch) => {
remote_branch_exists = true;
checkout_commit = branch.to_commit()?;
}
Err(_) => {
checkout_commit = default_checkout()?;
}
}
} else {
checkout_commit = default_checkout()?;
}
}
},
},
};
}
let mut target_branch = match repo.find_local_branch(name) {
Ok(branchref) => branchref,
Err(_) => repo.create_branch(name, &checkout_commit)?,
};
fn push(
remote: &mut repo::RemoteHandle,
branch_name: &str,
remote_branch_name: &str,
repo: &repo::RepoHandle,
) -> Result<(), String> {
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
remote.push(branch_name, remote_branch_name, repo)
}
if !no_track {
if let Some((remote_name, remote_branch_name)) = track {
if remote_branch_exists {
target_branch.set_upstream(remote_name, remote_branch_name)?;
} else {
let mut remote = repo
.find_remote(remote_name)
.map_err(|error| format!("Error getting remote {}: {}", remote_name, error))?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
push(
&mut remote,
&target_branch.name()?,
remote_branch_name,
&repo,
)?;
target_branch.set_upstream(remote_name, remote_branch_name)?;
}
} else if let Some(config) = config {
if let Some(track_config) = config.track {
if track_config.default {
let remote_name = track_config.default_remote;
if remote_branch_exists {
target_branch.set_upstream(&remote_name, name)?;
} else {
let remote_branch_name = match track_config.default_remote_prefix {
Some(prefix) => {
format!("{}{}{}", &prefix, super::BRANCH_NAMESPACE_SEPARATOR, &name)
}
None => name.to_string(),
};
let mut remote = repo
.find_remote(&remote_name)
.map_err(|error| {
format!("Error getting remote {}: {}", remote_name, error)
})?
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
if !remote.is_pushable()? {
return Err(format!(
"Cannot push to non-pushable remote {}",
remote.url()
));
}
push(
&mut remote,
&target_branch.name()?,
&remote_branch_name,
&repo,
)?;
target_branch.set_upstream(&remote_name, &remote_branch_name)?;
}
}
}
}
}
if let Some(subdirectory) = subdirectory {
std::fs::create_dir_all(subdirectory).map_err(|error| error.to_string())?;
}
repo.new_worktree(name, &path, &target_branch)?;
Ok(())
}

View File

@@ -8,13 +8,13 @@ use helpers::*;
fn open_empty_repo() { fn open_empty_repo() {
let tmpdir = init_tmpdir(); let tmpdir = init_tmpdir();
assert!(matches!( assert!(matches!(
Repo::open(tmpdir.path(), true), RepoHandle::open(tmpdir.path(), true),
Err(RepoError { Err(RepoError {
kind: RepoErrorKind::NotFound kind: RepoErrorKind::NotFound
}) })
)); ));
assert!(matches!( assert!(matches!(
Repo::open(tmpdir.path(), false), RepoHandle::open(tmpdir.path(), false),
Err(RepoError { Err(RepoError {
kind: RepoErrorKind::NotFound kind: RepoErrorKind::NotFound
}) })
@@ -25,7 +25,7 @@ fn open_empty_repo() {
#[test] #[test]
fn create_repo() -> Result<(), Box<dyn std::error::Error>> { fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
let tmpdir = init_tmpdir(); let tmpdir = init_tmpdir();
let repo = Repo::init(tmpdir.path(), false)?; let repo = RepoHandle::init(tmpdir.path(), false)?;
assert!(!repo.is_bare()); assert!(!repo.is_bare());
assert!(repo.is_empty()?); assert!(repo.is_empty()?);
cleanup_tmpdir(tmpdir); cleanup_tmpdir(tmpdir);
@@ -35,7 +35,7 @@ fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
#[test] #[test]
fn create_repo_with_worktree() -> Result<(), Box<dyn std::error::Error>> { fn create_repo_with_worktree() -> Result<(), Box<dyn std::error::Error>> {
let tmpdir = init_tmpdir(); let tmpdir = init_tmpdir();
let repo = Repo::init(tmpdir.path(), true)?; let repo = RepoHandle::init(tmpdir.path(), true)?;
assert!(repo.is_bare()); assert!(repo.is_bare());
assert!(repo.is_empty()?); assert!(repo.is_empty()?);
cleanup_tmpdir(tmpdir); cleanup_tmpdir(tmpdir);