diff --git a/Cargo.lock b/Cargo.lock index b45a897..c356d4e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11,6 +11,17 @@ dependencies = [ "memchr", ] +[[package]] +name = "async-channel" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + [[package]] name = "atty" version = "0.2.14" @@ -34,6 +45,24 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + +[[package]] +name = "cache-padded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c" + +[[package]] +name = "castaway" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" + [[package]] name = "cc" version = "1.0.73" @@ -100,6 +129,15 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "concurrent-queue" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3" +dependencies = [ + "cache-padded", +] + [[package]] name = "console" version = "0.15.0" @@ -115,6 +153,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +dependencies = [ + "cfg-if", + "lazy_static", +] + [[package]] name = "crossterm" version = "0.23.2" @@ -140,6 +188,37 @@ dependencies = [ "winapi", ] +[[package]] +name = "curl" +version = "0.4.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d855aeef205b43f65a5001e0997d81f8efca7badad4fad7d897aa7f0d0651f" +dependencies = [ + "curl-sys", + "libc", + "openssl-probe", + "openssl-sys", + "schannel", + "socket2", + "winapi", +] + +[[package]] +name = "curl-sys" +version = "0.4.55+curl-7.83.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23734ec77368ec583c2e61dd3f0b0e5c98b93abe6d2a004ca06b91dd7e3e2762" +dependencies = [ + "cc", + "libc", + "libnghttp2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", + "winapi", +] + [[package]] name = "dirs-next" version = "2.0.0" @@ -167,6 +246,36 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +[[package]] +name = "encoding_rs" +version = "0.8.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "event-listener" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" + +[[package]] +name = "fastrand" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +dependencies = [ + "instant", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "form_urlencoded" version = "1.0.1" @@ -183,6 +292,33 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" +[[package]] +name = "futures-core" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" + +[[package]] +name = "futures-io" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" + +[[package]] +name = "futures-lite" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + [[package]] name = "getrandom" version = "0.2.6" @@ -202,8 +338,11 @@ dependencies = [ "comfy-table", "console", "git2", + "isahc", + "parse_link_header", "regex", "serde", + "serde_json", "serde_yaml", "shellexpand", "tempdir", @@ -255,6 +394,17 @@ dependencies = [ "libc", ] +[[package]] +name = "http" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff8670570af52249509a86f5e3e18a08c60b177071826898fde8997cf5f6bfbb" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.2", +] + [[package]] name = "idna" version = "0.2.3" @@ -276,6 +426,56 @@ dependencies = [ "hashbrown", ] +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "isahc" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "480d9158c9977bff0bc024a11dcad04efcd3955c1e55301092b13fc439d41720" +dependencies = [ + "async-channel", + "castaway", + "crossbeam-utils", + "curl", + "curl-sys", + "encoding_rs", + "event-listener", + "futures-lite", + "http", + "log", + "mime", + "once_cell", + "polling", + "serde", + "serde_json", + "slab", + "sluice", + "tracing", + "tracing-futures", + "url", + "waker-fn", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" + [[package]] name = "jobserver" version = "0.1.24" @@ -311,6 +511,16 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "libnghttp2-sys" +version = "0.1.7+1.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "libssh2-sys" version = "0.2.23" @@ -374,6 +584,12 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + [[package]] name = "mio" version = "0.8.3" @@ -417,6 +633,12 @@ version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa" +[[package]] +name = "parking" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" + [[package]] name = "parking_lot" version = "0.12.0" @@ -440,18 +662,68 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "parse_link_header" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40728c9c01de984c45f49385ab054fdc31cd3322658a6934347887e72cb48df9" +dependencies = [ + "http", + "lazy_static", + "regex", +] + [[package]] name = "percent-encoding" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +[[package]] +name = "pin-project" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + [[package]] name = "pkg-config" version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" +[[package]] +name = "polling" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" +dependencies = [ + "cfg-if", + "libc", + "log", + "wepoll-ffi", + "winapi", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -589,6 +861,16 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +[[package]] +name = "schannel" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" +dependencies = [ + "lazy_static", + "windows-sys", +] + [[package]] name = "scopeguard" version = "1.1.0" @@ -615,6 +897,17 @@ dependencies = [ "syn", ] +[[package]] +name = "serde_json" +version = "1.0.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95" +dependencies = [ + "itoa 0.4.8", + "ryu", + "serde", +] + [[package]] name = "serde_yaml" version = "0.8.24" @@ -666,12 +959,39 @@ dependencies = [ "libc", ] +[[package]] +name = "slab" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" + +[[package]] +name = "sluice" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" +dependencies = [ + "async-channel", + "futures-core", + "futures-io", +] + [[package]] name = "smallvec" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +[[package]] +name = "socket2" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "strsim" version = "0.10.0" @@ -787,6 +1107,49 @@ dependencies = [ "serde", ] +[[package]] +name = "tracing" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f54c8ca710e81886d498c2fd3331b56c93aa248d49de2222ad2742247c60072f" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + [[package]] name = "unicode-bidi" version = "0.3.8" @@ -844,6 +1207,12 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + [[package]] name = "wasi" version = "0.10.2+wasi-snapshot-preview1" @@ -856,6 +1225,15 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wepoll-ffi" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" +dependencies = [ + "cc", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index 974bbe0..25fed11 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,9 @@ rust-version = "1.57" license = "GPL-3.0-only" +[profile.e2e-tests] +inherits = "release" + [lib] name = "grm" path = "src/lib.rs" @@ -65,5 +68,15 @@ version = "=5.0.1" [dependencies.serde_yaml] version = "=0.8.24" +[dependencies.serde_json] +version = "=1.0.59" + +[dependencies.isahc] +version = "=1.7.1" +features = ["json"] + +[dependencies.parse_link_header] +version = "=0.3.2" + [dev-dependencies.tempdir] version = "=0.3.7" diff --git a/Justfile b/Justfile index bf74f55..84e7886 100644 --- a/Justfile +++ b/Justfile @@ -1,4 +1,6 @@ -check: check-cargo-lock check-pip-requirements test +set positional-arguments + +check: check-cargo-lock test cargo check cargo fmt --check cargo clippy --no-deps -- -Dwarnings @@ -12,6 +14,18 @@ lint-fix: release: cargo build --release +test-binary-docker: + env \ + GITHUB_API_BASEURL=http://rest:5000/github \ + GITLAB_API_BASEURL=http://rest:5000/gitlab \ + cargo build --profile e2e-tests + +test-binary: + env \ + GITHUB_API_BASEURL=http://localhost:5000/github \ + GITLAB_API_BASEURL=http://localhost:5000/gitlab \ + cargo build --profile e2e-tests + install: cargo install --path . @@ -23,19 +37,26 @@ test-unit: test-integration: cargo test --test "*" -e2e-venv: +test-e2e-docker +tests=".": test-binary-docker cd ./e2e_tests \ - && python3 -m venv venv \ - && . ./venv/bin/activate \ - && pip --disable-pip-version-check install -r ./requirements.txt >/dev/null + && docker-compose rm --stop -f \ + && docker-compose build \ + && docker-compose run \ + --rm \ + -v $PWD/../target/e2e-tests/grm:/grm \ + pytest \ + "GRM_BINARY=/grm python3 ALTERNATE_DOMAIN=alternate-rest -m pytest -p no:cacheprovider --color=yes "$@"" \ + && docker-compose rm --stop -f - -test-e2e +tests=".": e2e-venv release +test-e2e +tests=".": test-binary cd ./e2e_tests \ - && . ./venv/bin/activate \ - && TMPDIR=/dev/shm python -m pytest --color=yes {{tests}} + && docker-compose rm --stop -f \ + && docker-compose build \ + && docker-compose up -d rest \ + && GRM_BINARY={{justfile_directory()}}/target/e2e-tests/grm ALTERNATE_DOMAIN=127.0.0.1 python3 -m pytest -p no:cacheprovider --color=yes {{tests}} \ + && docker-compose rm --stop -f -update-dependencies: update-cargo-dependencies update-pip-requirements +update-dependencies: update-cargo-dependencies update-cargo-dependencies: @cd ./depcheck \ @@ -43,15 +64,3 @@ update-cargo-dependencies: && . ./venv/bin/activate \ && pip --disable-pip-version-check install -r ./requirements.txt > /dev/null \ && ./update-cargo-dependencies.py - -update-pip-requirements: e2e-venv - @cd ./e2e_tests \ - && ./update_requirementstxt.sh - -check-pip-requirements: e2e-venv - @cd ./e2e_tests \ - && . ./venv/bin/activate \ - && pip list --outdated | grep -q '.' && exit 1 || exit 0 - -clean: - cargo clean diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 5af1148..cd7b635 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -4,5 +4,6 @@ - [Getting started](./getting_started.md) - [Repository trees](./repos.md) - [Git Worktrees](./worktrees.md) +- [Forge Integrations](./forge_integration.md) - [FAQ](./faq.md) - [Contributing](./contributing.md) diff --git a/docs/src/forge_integration.md b/docs/src/forge_integration.md new file mode 100644 index 0000000..f9382a7 --- /dev/null +++ b/docs/src/forge_integration.md @@ -0,0 +1,205 @@ +# Forge Integrations + +In addition to manging repositories locally, `grm` also integrates with source +code hosting platforms. Right now, the following platforms are supported: + +* [GitHub](https://github.com/) +* [GitLab](https://gitlab.com/) + +Imagine you are just starting out with `grm` and want to clone all your repositories +from GitHub. This is as simple as: + +```bash +$ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token --path ~/projects" +``` + +You will end up with your projects cloned into `~/projects/{your_github_username}/` + +## Authentication + +The only currently supported authentication option is using personal access +token. + +### GitHub + +See the GitHub documentation for personal access tokens: +[Link](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token). + +The only required permission is the "repo" scope. + +### GitHub + +See the GitLab documentation for personal access tokens: +[Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html). + +The required scopes are a bit weird. Actually, the following should suffice: + +* * `read_user` to get user information (required to get the current authenticated + user name for the `--owner` filter. +* A scope that allows reading private repositories. (`read_repository` is just + for *cloning* private repos). This unfortunately does not exist. + +So currently, you'll need to select the `read_api` scope. + +## Filters + +By default, `grm` will sync **nothing**. This is quite boring, so you have to +tell the command what repositories to include. They are all inclusive (i.e. act +as a logical OR), so you can easily chain many filters to clone a bunch of +repositories. It's quite simple: + +* `--user ` syncs all repositories of that remote user +* `--group ` syncs all repositories of that remote group/organization +* `--owner` syncs all repositories of the user that is used for authentication. + This is effectively a shortcut for `--user $YOUR_USER` +* `--access` syncs all repositories that the current user has access to + +Easiest to see in an example: + +```bash +$ grm repos sync remote --provider github --user torvals --owner --group zalando [...] +``` + +This would sync all of Torvald's repositories, all of my own repositories and +all (public) repositories in the "zalando" group. + +## Strategies + +There are generally three ways how you can use `grm` with forges: + +### Ad-hoc cloning + +This is the easiest, there are no local files involved. You just run the +command, `grm` clones the repos, that's it. If you run the command again, `grm` +will figure out the differences between local and remote repositories and +resolve them locally. + +### Create a file + +This is effectively `grm repos find local`, but using the forge instead of the +local file system. You will end up with a normal repository file that you can +commit to git. To update the list of repositories, just run the command again +and commit the new file. + +### Define options in a file + +This is a hybrid approach: You define filtering options in a file that you can +commit to source control. Effectively, you are persisting the options you gave +to `grm` on the command line with the ad-hoc approach. Similarly, `grm` will +figure out differences between local and remote and resolve them. + +A file would look like this: + +```toml +provider = "github" +token_command = "cat ~/.github_token" +root = "~/projects" + +[filters] +owner = true +groups = [ + "zalando" +] +``` + +The options in the file map to the command line options of the `grm repos sync +remote` command. + +You'd then run the `grm repos sync` command the same way as with a list of +repositories in a config: + +```bash +$ grm repos sync --config example.config.toml +``` + +You can even use that file to generate a repository list that you can feed into +`grm repos sync`: + +```bash +$ grm repos find config --config example.config.toml > repos.toml +$ grm repos sync config --config repos.toml +``` + +## Using with selfhosted GitLab + +By default, `grm` uses the default GitLab API endpoint +([https://gitlab.com](https://gitlab.com)). You can override the +endpoint by specifying the `--api-url` parameter. Like this: + +```bash +$ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...] +``` + +## The cloning protocol + +By default, `grm` will use HTTPS for public repositories and SSH otherwise. This +can be overridden with the `--force-ssh` switch. + +## About the token command + +To ensure maximum flexibility, `grm` has a single way to get the token it uses +to authenticate: Specify a command that returns the token via stdout. This easily +integrates with password managers like [`pass`](https://www.passwordstore.org/). + +Of course, you are also free to specify something like `echo mytoken` as the +command, as long as you are ok with the security implications (like having the +token in cleartext in your shell history). It may be better to have the token +in a file instead and read it: `cat ~/.gitlab_token`. + +Generally, use whatever you want. The command just has to return sucessfully and +return the token as the first line of stdout. + +## Examples + +Maybe you just want to locally clone all repos from your github user? + +```bash +$ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token" +``` + +This will clone all repositories into `~/github_projects/{your_github_username}`. + +If instead you want to clone **all** repositories you have access to (e.g. via +organizations or other users' private repos you have access to), just change the +filter a little bit: + +```bash +$ grm repos sync remote --provider github --access --root ~/github_projects --token-command "pass show github_grm_access_token" +``` + +## Limitations + +### GitHub + +Unfortunately, GitHub does not have a nice API endpoint to get **private** +repositories for a certain user ([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user) only returns public +repositories). + +Therefore, using `--user {user}` will only show public repositories for GitHub. +Note that this does not apply to `--access`: If you have access to another user's +private repository, it will be listed. + +## Adding integrations + +Adding a new integration involves writing some Rust code. Most of the logic is +generic, so you will not have to reinvent the wheel. Generally, you will need to +gather the following information: + +* A list of repositories for a single user +* A list of repositories for a group (or any similar concept if applicable) +* A list of repositories for the user that the API token belongs to +* The username of the currently authenticated user + +Authentication currently only works via a bearer token passed via the +`Authorization` HTTP header. + +Each repo has to have the following properties: + +* A name (which also acts as the identifier for diff between local and remote + repositories) +* An SSH url to push to +* An HTTPS url to clone and fetch from +* A flag that marks the repository as private + +If you plan to implement another forge, please first open an issue so we can +go through the required setup. I'm happy to help! diff --git a/docs/src/repos.md b/docs/src/repos.md index 69004f8..cd8fe10 100644 --- a/docs/src/repos.md +++ b/docs/src/repos.md @@ -17,7 +17,7 @@ Then, you're ready to run the first sync. This will clone all configured reposit and set up the remotes. ```bash -$ grm repos sync --config example.config.toml +$ grm repos sync config --config example.config.toml [⚙] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git" [✔] git-repo-manager: Repository successfully cloned [⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git" @@ -30,7 +30,7 @@ $ grm repos sync --config example.config.toml If you run it again, it will report no changes: ``` -$ grm repos sync --config example.config.toml +$ grm repos sync config -c example.config.toml [✔] git-repo-manager: OK [✔] dotfiles: OK ``` @@ -42,7 +42,7 @@ a configuration from scratch. Luckily, GRM has a way to generate a configuration from an existing file tree: ```bash -$ grm repos find ~/your/project/root > config.toml +$ grm repos find local ~/your/project/root > config.toml ``` This will detect all repositories and remotes and write them to `config.toml`. diff --git a/e2e_tests/.gitignore b/e2e_tests/.gitignore deleted file mode 100644 index e79509f..0000000 --- a/e2e_tests/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/venv/ -/__pycache__/ diff --git a/e2e_tests/conftest.py b/e2e_tests/conftest.py new file mode 100644 index 0000000..ac3ba25 --- /dev/null +++ b/e2e_tests/conftest.py @@ -0,0 +1,8 @@ +import os + + +def pytest_configure(config): + os.environ["GIT_AUTHOR_NAME"] = "Example user" + os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com" + os.environ["GIT_COMMITTER_NAME"] = "Example user" + os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com" diff --git a/e2e_tests/docker-compose.yml b/e2e_tests/docker-compose.yml new file mode 100644 index 0000000..f75f12a --- /dev/null +++ b/e2e_tests/docker-compose.yml @@ -0,0 +1,34 @@ +version: "3.7" + +services: + pytest: + build: ./docker + volumes: + - type: bind + source: ./ + target: /tests + read_only: true + - type: tmpfs + target: /tmp + environment: + TMPDIR: /tmp + depends_on: + - rest + command: + - "true" + networks: + main: + + rest: + build: ./docker-rest/ + expose: + - "5000" + ports: + - "5000:5000" + networks: + main: + aliases: + - alternate-rest + +networks: + main: diff --git a/e2e_tests/docker-rest/Dockerfile b/e2e_tests/docker-rest/Dockerfile new file mode 100644 index 0000000..b4abb91 --- /dev/null +++ b/e2e_tests/docker-rest/Dockerfile @@ -0,0 +1,19 @@ +FROM docker.io/debian:11.3 + +WORKDIR /app + +ENV FLASK_APP=app.py + +RUN apt-get update \ + && apt-get install -y \ + dumb-init \ + python3-flask \ + python3-jinja2 \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +EXPOSE 5000 + +COPY flask . + +CMD ["/usr/bin/dumb-init", "--", "flask", "run", "--port", "5000", "--host", "0.0.0.0"] diff --git a/e2e_tests/docker-rest/flask/app.py b/e2e_tests/docker-rest/flask/app.py new file mode 100644 index 0000000..e22c533 --- /dev/null +++ b/e2e_tests/docker-rest/flask/app.py @@ -0,0 +1,7 @@ +from flask import Flask + +app = Flask(__name__) +app.url_map.strict_slashes = False + +import github +import gitlab diff --git a/e2e_tests/docker-rest/flask/github.py b/e2e_tests/docker-rest/flask/github.py new file mode 100644 index 0000000..6a1f29d --- /dev/null +++ b/e2e_tests/docker-rest/flask/github.py @@ -0,0 +1,103 @@ +import os.path + +from app import app + +from flask import Flask, request, abort, jsonify, make_response + +import jinja2 + + +def check_headers(): + if request.headers.get("accept") != "application/vnd.github.v3+json": + app.logger.error("Invalid accept header") + abort(500) + auth_header = request.headers.get("authorization") + if auth_header != "token authtoken": + app.logger.error("Invalid authorization header: %s", auth_header) + abort( + make_response( + jsonify( + { + "message": "Bad credentials", + "documentation_url": "https://docs.example.com/rest", + } + ), + 401, + ) + ) + + +def add_pagination(response, page, last_page): + host = request.headers["host"] + link_header = "" + + def args(page): + args = request.args.copy() + args["page"] = page + return "&".join([f"{k}={v}" for k, v in args.items()]) + + if page < last_page: + link_header += ( + f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", ' + ) + link_header += ( + f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"' + ) + response.headers["link"] = link_header + + +def read_project_files(namespaces=[]): + last_page = 4 + page = username = int(request.args.get("page", "1")) + response_file = f"./github_api_page_{page}.json.j2" + if not os.path.exists(response_file): + return jsonify([]) + + response = make_response( + jinja2.Template(open(response_file).read()).render( + namespace=namespaces[page - 1] + ) + ) + add_pagination(response, page, last_page) + response.headers["content-type"] = "application/json" + return response + + +def single_namespaced_projects(namespace): + return read_project_files([namespace] * 4) + + +def mixed_projects(namespaces): + return read_project_files(namespaces) + + +@app.route("/github/users//repos/") +def github_user_repos(user): + check_headers() + if user == "myuser1": + return single_namespaced_projects("myuser1") + return jsonify([]) + + +@app.route("/github/orgs//repos/") +def github_group_repos(group): + check_headers() + if not (request.args.get("type") == "all"): + abort(500, "wrong arguments") + if group == "mygroup1": + return single_namespaced_projects("mygroup1") + return jsonify([]) + + +@app.route("/github/user/repos/") +def github_own_repos(): + check_headers() + return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"]) + + +@app.route("/github/user/") +def github_user(): + check_headers() + response = make_response(open("./github_api_user.json").read()) + response.headers["content-type"] = "application/json" + return response diff --git a/e2e_tests/docker-rest/flask/github_api_page_1.json.j2 b/e2e_tests/docker-rest/flask/github_api_page_1.json.j2 new file mode 100644 index 0000000..9606852 --- /dev/null +++ b/e2e_tests/docker-rest/flask/github_api_page_1.json.j2 @@ -0,0 +1,228 @@ +[ + { + "id": 1, + "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", + "name": "myproject1", + "full_name": "{{ namespace }}/myproject1", + "private": true, + "owner": { + "login": "someuser", + "id": 1, + "node_id": "MDQ6VXNlcjM3NDg2OTY=", + "avatar_url": "https://example.com/u/3748696?v=4", + "gravatar_id": "", + "url": "https://api.example.com/users/{{ namespace }}", + "html_url": "https://example.com/{{ namespace }}", + "followers_url": "https://api.example.com/users/{{ namespace }}/followers", + "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", + "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", + "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", + "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", + "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", + "repos_url": "https://api.example.com/users/{{ namespace }}/repos", + "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", + "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://example.com/{{ namespace }}/myproject1", + "description": "Shell script for automatically building ACI containers from scratch using acbuild.", + "fork": false, + "url": "https://api.example.com/repos/{{ namespace }}/myproject1", + "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/forks", + "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject1/keys{/key_id}", + "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject1/collaborators{/collaborator}", + "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject1/teams", + "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/hooks", + "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/events{/number}", + "events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/events", + "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/assignees{/user}", + "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject1/branches{/branch}", + "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/tags", + "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/blobs{/sha}", + "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/tags{/sha}", + "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/refs{/sha}", + "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/trees{/sha}", + "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject1/statuses/{sha}", + "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject1/languages", + "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/stargazers", + "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contributors", + "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscribers", + "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscription", + "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/commits{/sha}", + "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/commits{/sha}", + "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/comments{/number}", + "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/comments{/number}", + "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contents/{+path}", + "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject1/compare/{base}...{head}", + "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject1/merges", + "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject1/{archive_format}{/ref}", + "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject1/downloads", + "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues{/number}", + "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject1/pulls{/number}", + "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject1/milestones{/number}", + "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject1/notifications{?since,all,participating}", + "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject1/labels{/name}", + "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject1/releases{/id}", + "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/deployments", + "created_at": "2016-01-07T22:27:54Z", + "updated_at": "2021-11-20T16:15:37Z", + "pushed_at": "2021-11-20T16:15:34Z", + "git_url": "git://example.com/{{ namespace }}/myproject1.git", + "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject1.git", + "clone_url": "https://example.com/{{ namespace }}/myproject1.git", + "svn_url": "https://example.com/{{ namespace }}/myproject1", + "homepage": null, + "size": 12, + "stargazers_count": 0, + "watchers_count": 0, + "language": "Shell", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 0, + "license": { + "key": "apache-2.0", + "name": "Apache License 2.0", + "spdx_id": "Apache-2.0", + "url": "https://api.example.com/licenses/apache-2.0", + "node_id": "MDc6TGljZW5zZTI=" + }, + "allow_forking": true, + "is_template": false, + "topics": [ + + ], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "master", + "permissions": { + "admin": true, + "maintain": true, + "push": true, + "triage": true, + "pull": true + } + }, + { + "id": 2, + "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", + "name": "myproject2", + "full_name": "{{ namespace }}/myproject2", + "private": false, + "owner": { + "login": "someuser", + "id": 1, + "node_id": "MDQ6VXNlcjM3NDg2OTY=", + "avatar_url": "https://example.com/u/3748696?v=4", + "gravatar_id": "", + "url": "https://api.example.com/users/{{ namespace }}", + "html_url": "https://example.com/{{ namespace }}", + "followers_url": "https://api.example.com/users/{{ namespace }}/followers", + "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", + "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", + "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", + "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", + "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", + "repos_url": "https://api.example.com/users/{{ namespace }}/repos", + "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", + "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://example.com/{{ namespace }}/myproject2", + "description": "Shell script for automatically building ACI containers from scratch using acbuild.", + "fork": false, + "url": "https://api.example.com/repos/{{ namespace }}/myproject2", + "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/forks", + "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject2/keys{/key_id}", + "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject2/collaborators{/collaborator}", + "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject2/teams", + "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/hooks", + "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/events{/number}", + "events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/events", + "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/assignees{/user}", + "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject2/branches{/branch}", + "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/tags", + "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/blobs{/sha}", + "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/tags{/sha}", + "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/refs{/sha}", + "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/trees{/sha}", + "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject2/statuses/{sha}", + "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject2/languages", + "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/stargazers", + "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contributors", + "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscribers", + "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscription", + "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/commits{/sha}", + "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/commits{/sha}", + "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/comments{/number}", + "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/comments{/number}", + "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contents/{+path}", + "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject2/compare/{base}...{head}", + "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject2/merges", + "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject2/{archive_format}{/ref}", + "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject2/downloads", + "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues{/number}", + "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject2/pulls{/number}", + "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject2/milestones{/number}", + "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject2/notifications{?since,all,participating}", + "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject2/labels{/name}", + "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject2/releases{/id}", + "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/deployments", + "created_at": "2016-01-07T22:27:54Z", + "updated_at": "2021-11-20T16:15:37Z", + "pushed_at": "2021-11-20T16:15:34Z", + "git_url": "git://example.com/{{ namespace }}/myproject2.git", + "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject2.git", + "clone_url": "https://example.com/{{ namespace }}/myproject2.git", + "svn_url": "https://example.com/{{ namespace }}/myproject2", + "homepage": null, + "size": 12, + "stargazers_count": 0, + "watchers_count": 0, + "language": "Shell", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 0, + "license": { + "key": "apache-2.0", + "name": "Apache License 2.0", + "spdx_id": "Apache-2.0", + "url": "https://api.example.com/licenses/apache-2.0", + "node_id": "MDc6TGljZW5zZTI=" + }, + "allow_forking": true, + "is_template": false, + "topics": [ + + ], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "master", + "permissions": { + "admin": true, + "maintain": true, + "push": true, + "triage": true, + "pull": true + } + } +] diff --git a/e2e_tests/docker-rest/flask/github_api_page_2.json.j2 b/e2e_tests/docker-rest/flask/github_api_page_2.json.j2 new file mode 100644 index 0000000..c195453 --- /dev/null +++ b/e2e_tests/docker-rest/flask/github_api_page_2.json.j2 @@ -0,0 +1,115 @@ +[ + { + "id": 3, + "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", + "name": "myproject3", + "full_name": "{{ namespace }}/myproject3", + "private": false, + "owner": { + "login": "someuser", + "id": 1, + "node_id": "MDQ6VXNlcjM3NDg2OTY=", + "avatar_url": "https://example.com/u/3748696?v=4", + "gravatar_id": "", + "url": "https://api.example.com/users/{{ namespace }}", + "html_url": "https://example.com/{{ namespace }}", + "followers_url": "https://api.example.com/users/{{ namespace }}/followers", + "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", + "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", + "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", + "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", + "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", + "repos_url": "https://api.example.com/users/{{ namespace }}/repos", + "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", + "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://example.com/{{ namespace }}/myproject3", + "description": "Shell script for automatically building ACI containers from scratch using acbuild.", + "fork": false, + "url": "https://api.example.com/repos/{{ namespace }}/myproject3", + "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/forks", + "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject3/keys{/key_id}", + "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject3/collaborators{/collaborator}", + "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject3/teams", + "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/hooks", + "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/events{/number}", + "events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/events", + "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/assignees{/user}", + "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject3/branches{/branch}", + "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/tags", + "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/blobs{/sha}", + "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/tags{/sha}", + "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/refs{/sha}", + "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/trees{/sha}", + "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject3/statuses/{sha}", + "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject3/languages", + "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/stargazers", + "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contributors", + "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscribers", + "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscription", + "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/commits{/sha}", + "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/commits{/sha}", + "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/comments{/number}", + "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/comments{/number}", + "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contents/{+path}", + "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject3/compare/{base}...{head}", + "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject3/merges", + "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject3/{archive_format}{/ref}", + "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject3/downloads", + "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues{/number}", + "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject3/pulls{/number}", + "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject3/milestones{/number}", + "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject3/notifications{?since,all,participating}", + "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject3/labels{/name}", + "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject3/releases{/id}", + "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/deployments", + "created_at": "2016-01-07T22:27:54Z", + "updated_at": "2021-11-20T16:15:37Z", + "pushed_at": "2021-11-20T16:15:34Z", + "git_url": "git://example.com/{{ namespace }}/myproject3.git", + "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject3.git", + "clone_url": "https://example.com/{{ namespace }}/myproject3.git", + "svn_url": "https://example.com/{{ namespace }}/myproject3", + "homepage": null, + "size": 12, + "stargazers_count": 0, + "watchers_count": 0, + "language": "Shell", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 0, + "license": { + "key": "apache-2.0", + "name": "Apache License 2.0", + "spdx_id": "Apache-2.0", + "url": "https://api.example.com/licenses/apache-2.0", + "node_id": "MDc6TGljZW5zZTI=" + }, + "allow_forking": true, + "is_template": false, + "topics": [ + + ], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "master", + "permissions": { + "admin": true, + "maintain": true, + "push": true, + "triage": true, + "pull": true + } + } +] diff --git a/e2e_tests/docker-rest/flask/github_api_page_3.json.j2 b/e2e_tests/docker-rest/flask/github_api_page_3.json.j2 new file mode 100644 index 0000000..003f331 --- /dev/null +++ b/e2e_tests/docker-rest/flask/github_api_page_3.json.j2 @@ -0,0 +1,115 @@ +[ + { + "id": 3, + "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", + "name": "myproject4", + "full_name": "{{ namespace }}/myproject4", + "private": false, + "owner": { + "login": "someuser", + "id": 1, + "node_id": "MDQ6VXNlcjM3NDg2OTY=", + "avatar_url": "https://example.com/u/3748696?v=4", + "gravatar_id": "", + "url": "https://api.example.com/users/{{ namespace }}", + "html_url": "https://example.com/{{ namespace }}", + "followers_url": "https://api.example.com/users/{{ namespace }}/followers", + "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", + "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", + "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", + "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", + "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", + "repos_url": "https://api.example.com/users/{{ namespace }}/repos", + "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", + "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://example.com/{{ namespace }}/myproject4", + "description": "Shell script for automatically building ACI containers from scratch using acbuild.", + "fork": false, + "url": "https://api.example.com/repos/{{ namespace }}/myproject4", + "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/forks", + "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject4/keys{/key_id}", + "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject4/collaborators{/collaborator}", + "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject4/teams", + "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/hooks", + "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/events{/number}", + "events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/events", + "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/assignees{/user}", + "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject4/branches{/branch}", + "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/tags", + "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/blobs{/sha}", + "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/tags{/sha}", + "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/refs{/sha}", + "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/trees{/sha}", + "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject4/statuses/{sha}", + "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject4/languages", + "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/stargazers", + "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contributors", + "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscribers", + "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscription", + "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/commits{/sha}", + "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/commits{/sha}", + "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/comments{/number}", + "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/comments{/number}", + "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contents/{+path}", + "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject4/compare/{base}...{head}", + "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject4/merges", + "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject4/{archive_format}{/ref}", + "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject4/downloads", + "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues{/number}", + "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject4/pulls{/number}", + "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject4/milestones{/number}", + "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject4/notifications{?since,all,participating}", + "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject4/labels{/name}", + "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject4/releases{/id}", + "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/deployments", + "created_at": "2016-01-07T22:27:54Z", + "updated_at": "2021-11-20T16:15:37Z", + "pushed_at": "2021-11-20T16:15:34Z", + "git_url": "git://example.com/{{ namespace }}/myproject4.git", + "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject4.git", + "clone_url": "https://example.com/{{ namespace }}/myproject4.git", + "svn_url": "https://example.com/{{ namespace }}/myproject4", + "homepage": null, + "size": 12, + "stargazers_count": 0, + "watchers_count": 0, + "language": "Shell", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 0, + "license": { + "key": "apache-2.0", + "name": "Apache License 2.0", + "spdx_id": "Apache-2.0", + "url": "https://api.example.com/licenses/apache-2.0", + "node_id": "MDc6TGljZW5zZTI=" + }, + "allow_forking": true, + "is_template": false, + "topics": [ + + ], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "master", + "permissions": { + "admin": true, + "maintain": true, + "push": true, + "triage": true, + "pull": true + } + } +] diff --git a/e2e_tests/docker-rest/flask/github_api_page_4.json.j2 b/e2e_tests/docker-rest/flask/github_api_page_4.json.j2 new file mode 100644 index 0000000..e755be2 --- /dev/null +++ b/e2e_tests/docker-rest/flask/github_api_page_4.json.j2 @@ -0,0 +1,115 @@ +[ + { + "id": 3, + "node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==", + "name": "myproject5", + "full_name": "{{ namespace }}/myproject5", + "private": false, + "owner": { + "login": "someuser", + "id": 1, + "node_id": "MDQ6VXNlcjM3NDg2OTY=", + "avatar_url": "https://example.com/u/3748696?v=4", + "gravatar_id": "", + "url": "https://api.example.com/users/{{ namespace }}", + "html_url": "https://example.com/{{ namespace }}", + "followers_url": "https://api.example.com/users/{{ namespace }}/followers", + "following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}", + "gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}", + "starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}", + "subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions", + "organizations_url": "https://api.example.com/users/{{ namespace }}/orgs", + "repos_url": "https://api.example.com/users/{{ namespace }}/repos", + "events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}", + "received_events_url": "https://api.example.com/users/{{ namespace }}/received_events", + "type": "User", + "site_admin": false + }, + "html_url": "https://example.com/{{ namespace }}/myproject5", + "description": "Shell script for automatically building ACI containers from scratch using acbuild.", + "fork": false, + "url": "https://api.example.com/repos/{{ namespace }}/myproject5", + "forks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/forks", + "keys_url": "https://api.example.com/repos/{{ namespace }}/myproject5/keys{/key_id}", + "collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject5/collaborators{/collaborator}", + "teams_url": "https://api.example.com/repos/{{ namespace }}/myproject5/teams", + "hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/hooks", + "issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/events{/number}", + "events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/events", + "assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/assignees{/user}", + "branches_url": "https://api.example.com/repos/{{ namespace }}/myproject5/branches{/branch}", + "tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/tags", + "blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/blobs{/sha}", + "git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/tags{/sha}", + "git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/refs{/sha}", + "trees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/trees{/sha}", + "statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject5/statuses/{sha}", + "languages_url": "https://api.example.com/repos/{{ namespace }}/myproject5/languages", + "stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/stargazers", + "contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contributors", + "subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscribers", + "subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscription", + "commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/commits{/sha}", + "git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/commits{/sha}", + "comments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/comments{/number}", + "issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/comments{/number}", + "contents_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contents/{+path}", + "compare_url": "https://api.example.com/repos/{{ namespace }}/myproject5/compare/{base}...{head}", + "merges_url": "https://api.example.com/repos/{{ namespace }}/myproject5/merges", + "archive_url": "https://api.example.com/repos/{{ namespace }}/myproject5/{archive_format}{/ref}", + "downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject5/downloads", + "issues_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues{/number}", + "pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject5/pulls{/number}", + "milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject5/milestones{/number}", + "notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject5/notifications{?since,all,participating}", + "labels_url": "https://api.example.com/repos/{{ namespace }}/myproject5/labels{/name}", + "releases_url": "https://api.example.com/repos/{{ namespace }}/myproject5/releases{/id}", + "deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/deployments", + "created_at": "2016-01-07T22:27:54Z", + "updated_at": "2021-11-20T16:15:37Z", + "pushed_at": "2021-11-20T16:15:34Z", + "git_url": "git://example.com/{{ namespace }}/myproject5.git", + "ssh_url": "ssh://git@example.com/{{ namespace }}/myproject5.git", + "clone_url": "https://example.com/{{ namespace }}/myproject5.git", + "svn_url": "https://example.com/{{ namespace }}/myproject5", + "homepage": null, + "size": 12, + "stargazers_count": 0, + "watchers_count": 0, + "language": "Shell", + "has_issues": true, + "has_projects": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "archived": false, + "disabled": false, + "open_issues_count": 0, + "license": { + "key": "apache-2.0", + "name": "Apache License 2.0", + "spdx_id": "Apache-2.0", + "url": "https://api.example.com/licenses/apache-2.0", + "node_id": "MDc6TGljZW5zZTI=" + }, + "allow_forking": true, + "is_template": false, + "topics": [ + + ], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "master", + "permissions": { + "admin": true, + "maintain": true, + "push": true, + "triage": true, + "pull": true + } + } +] diff --git a/e2e_tests/docker-rest/flask/github_api_user.json b/e2e_tests/docker-rest/flask/github_api_user.json new file mode 100644 index 0000000..03df406 --- /dev/null +++ b/e2e_tests/docker-rest/flask/github_api_user.json @@ -0,0 +1,46 @@ +{ + "login": "myuser1", + "id": 1, + "node_id": "MDQ6VXNlcjE=", + "avatar_url": "https://example.com/images/error/octocat_happy.gif", + "gravatar_id": "", + "url": "https://api.example.com/users/octocat", + "html_url": "https://example.com/octocat", + "followers_url": "https://api.example.com/users/octocat/followers", + "following_url": "https://api.example.com/users/octocat/following{/other_user}", + "gists_url": "https://api.example.com/users/octocat/gists{/gist_id}", + "starred_url": "https://api.example.com/users/octocat/starred{/owner}{/repo}", + "subscriptions_url": "https://api.example.com/users/octocat/subscriptions", + "organizations_url": "https://api.example.com/users/octocat/orgs", + "repos_url": "https://api.example.com/users/octocat/repos", + "events_url": "https://api.example.com/users/octocat/events{/privacy}", + "received_events_url": "https://api.example.com/users/octocat/received_events", + "type": "User", + "site_admin": false, + "name": "monalisa octocat", + "company": "GitHub", + "blog": "https://example.com/blog", + "location": "San Francisco", + "email": "octocat@example.com", + "hireable": false, + "bio": "There once was...", + "twitter_username": "monatheoctocat", + "public_repos": 2, + "public_gists": 1, + "followers": 20, + "following": 0, + "created_at": "2008-01-14T04:33:35Z", + "updated_at": "2008-01-14T04:33:35Z", + "private_gists": 81, + "total_private_repos": 100, + "owned_private_repos": 100, + "disk_usage": 10000, + "collaborators": 8, + "two_factor_authentication": true, + "plan": { + "name": "Medium", + "space": 400, + "private_repos": 20, + "collaborators": 0 + } +} diff --git a/e2e_tests/docker-rest/flask/gitlab.py b/e2e_tests/docker-rest/flask/gitlab.py new file mode 100644 index 0000000..12f070c --- /dev/null +++ b/e2e_tests/docker-rest/flask/gitlab.py @@ -0,0 +1,106 @@ +import os.path + +from app import app + +from flask import Flask, request, abort, jsonify, make_response + +import jinja2 + + +def check_headers(): + if request.headers.get("accept") != "application/json": + app.logger.error("Invalid accept header") + abort(500) + auth_header = request.headers.get("authorization") + if auth_header != "bearer authtoken": + app.logger.error("Invalid authorization header: %s", auth_header) + abort( + make_response( + jsonify( + { + "message": "Bad credentials", + "documentation_url": "https://docs.example.com/rest", + } + ), + 401, + ) + ) + + +def add_pagination(response, page, last_page): + host = request.headers["host"] + link_header = "" + + def args(page): + args = request.args.copy() + args["page"] = page + return "&".join([f"{k}={v}" for k, v in args.items()]) + + if page < last_page: + link_header += ( + f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", ' + ) + link_header += ( + f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"' + ) + response.headers["link"] = link_header + + +def read_project_files(namespaces=[]): + last_page = 4 + page = username = int(request.args.get("page", "1")) + response_file = f"./gitlab_api_page_{page}.json" + if not os.path.exists(response_file): + return jsonify([]) + + response = make_response( + jinja2.Template(open(response_file).read()).render( + namespace=namespaces[page - 1] + ) + ) + add_pagination(response, page, last_page) + response.headers["content-type"] = "application/json" + return response + + +def single_namespaced_projects(namespace): + return read_project_files([namespace] * 4) + + +def mixed_projects(namespaces): + return read_project_files(namespaces) + + +@app.route("/gitlab/api/v4/users//projects") +def gitlab_user_repos(user): + check_headers() + if user == "myuser1": + return single_namespaced_projects("myuser1") + return jsonify([]) + + +@app.route("/gitlab/api/v4/groups//projects") +def gitlab_group_repos(group): + check_headers() + if not ( + request.args.get("include_subgroups") == "true" + and request.args.get("archived") == "false" + ): + abort(500, "wrong arguments") + if group == "mygroup1": + return single_namespaced_projects("mygroup1") + return jsonify([]) + + +@app.route("/gitlab/api/v4/projects/") +def gitlab_own_repos(): + check_headers() + return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"]) + + +@app.route("/gitlab/api/v4/user/") +def gitlab_user(): + check_headers() + response = make_response(open("./gitlab_api_user.json").read()) + response.headers["content-type"] = "application/json" + return response diff --git a/e2e_tests/docker-rest/flask/gitlab_api_page_1.json b/e2e_tests/docker-rest/flask/gitlab_api_page_1.json new file mode 100644 index 0000000..1ea538f --- /dev/null +++ b/e2e_tests/docker-rest/flask/gitlab_api_page_1.json @@ -0,0 +1,236 @@ +[ + { + "id": 1, + "description": "", + "name": "myproject1", + "name_with_namespace": "{{ namespace }} / myproject1", + "path": "myproject1", + "path_with_namespace": "{{ namespace }}/myproject1", + "created_at": "2020-11-26T17:23:39.904Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject1.git", + "http_url_to_repo": "https://example.com/{{ namespace }}/myproject1.git", + "web_url": "https://example.com/{{ namespace }}/myproject1", + "readme_url": null, + "avatar_url": null, + "forks_count": 0, + "star_count": 0, + "last_activity_at": "2020-11-26T17:23:39.904Z", + "namespace": { + "id": 3, + "name": "{{ namespace }}", + "path": "{{ namespace }}", + "kind": "group", + "full_path": "{{ namespace }}", + "parent_id": null, + "avatar_url": "/uploads/-/system/group/avatar/5/x.png", + "web_url": "https://example.com/groups/{{ namespace }}" + }, + "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject1", + "_links": { + "self": "https://example.com/api/v4/projects/2", + "issues": "https://example.com/api/v4/projects/2/issues", + "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", + "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", + "labels": "https://example.com/api/v4/projects/2/labels", + "events": "https://example.com/api/v4/projects/2/events", + "members": "https://example.com/api/v4/projects/2/members", + "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" + }, + "packages_enabled": true, + "empty_repo": false, + "archived": false, + "visibility": "private", + "resolve_outdated_diff_discussions": false, + "container_expiration_policy": { + "cadence": "1d", + "enabled": false, + "keep_n": 10, + "older_than": "90d", + "name_regex": ".*", + "name_regex_keep": null, + "next_run_at": "2020-11-27T17:23:39.927Z" + }, + "issues_enabled": true, + "merge_requests_enabled": true, + "wiki_enabled": true, + "jobs_enabled": true, + "snippets_enabled": true, + "container_registry_enabled": true, + "service_desk_enabled": true, + "service_desk_address": "contact-for-myproject1-2-issue-@incoming.example.com", + "can_create_merge_request_in": true, + "issues_access_level": "enabled", + "repository_access_level": "enabled", + "merge_requests_access_level": "enabled", + "forking_access_level": "enabled", + "wiki_access_level": "enabled", + "builds_access_level": "enabled", + "snippets_access_level": "enabled", + "pages_access_level": "private", + "operations_access_level": "enabled", + "analytics_access_level": "enabled", + "container_registry_access_level": "enabled", + "security_and_compliance_access_level": "private", + "emails_disabled": null, + "shared_runners_enabled": true, + "lfs_enabled": true, + "creator_id": 1803951, + "import_url": null, + "import_type": null, + "import_status": "none", + "open_issues_count": 0, + "ci_default_git_depth": 50, + "ci_forward_deployment_enabled": true, + "ci_job_token_scope_enabled": false, + "ci_separated_caches": true, + "public_jobs": true, + "build_timeout": 3600, + "auto_cancel_pending_pipelines": "enabled", + "build_coverage_regex": null, + "ci_config_path": "", + "shared_with_groups": [], + "only_allow_merge_if_pipeline_succeeds": false, + "allow_merge_on_skipped_pipeline": null, + "restrict_user_defined_variables": false, + "request_access_enabled": true, + "only_allow_merge_if_all_discussions_are_resolved": false, + "remove_source_branch_after_merge": true, + "printing_merge_request_link_enabled": true, + "merge_method": "merge", + "squash_option": "default_off", + "enforce_auth_checks_on_uploads": true, + "suggestion_commit_message": null, + "merge_commit_template": null, + "squash_commit_template": null, + "auto_devops_enabled": false, + "auto_devops_deploy_strategy": "continuous", + "autoclose_referenced_issues": true, + "keep_latest_artifact": true, + "runner_token_expiration_interval": null, + "external_authorization_classification_label": "", + "requirements_enabled": false, + "requirements_access_level": "enabled", + "security_and_compliance_enabled": true, + "compliance_frameworks": [] + }, + { + "id": 2, + "description": "", + "name": "myproject2", + "name_with_namespace": "{{ namespace }} / myproject2", + "path": "myproject2", + "path_with_namespace": "{{ namespace }}/myproject2", + "created_at": "2020-11-26T17:23:39.904Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject2.git", + "http_url_to_repo": "https://example.com/{{ namespace }}/myproject2.git", + "web_url": "https://example.com/{{ namespace }}/myproject2", + "readme_url": null, + "avatar_url": null, + "forks_count": 0, + "star_count": 0, + "last_activity_at": "2020-11-26T17:23:39.904Z", + "namespace": { + "id": 3, + "name": "{{ namespace }}", + "path": "{{ namespace }}", + "kind": "group", + "full_path": "{{ namespace }}", + "parent_id": null, + "avatar_url": "/uploads/-/system/group/avatar/5/x.png", + "web_url": "https://example.com/groups/{{ namespace }}" + }, + "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject2", + "_links": { + "self": "https://example.com/api/v4/projects/2", + "issues": "https://example.com/api/v4/projects/2/issues", + "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", + "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", + "labels": "https://example.com/api/v4/projects/2/labels", + "events": "https://example.com/api/v4/projects/2/events", + "members": "https://example.com/api/v4/projects/2/members", + "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" + }, + "packages_enabled": true, + "empty_repo": false, + "archived": false, + "visibility": "public", + "resolve_outdated_diff_discussions": false, + "container_expiration_policy": { + "cadence": "1d", + "enabled": false, + "keep_n": 10, + "older_than": "90d", + "name_regex": ".*", + "name_regex_keep": null, + "next_run_at": "2020-11-27T17:23:39.927Z" + }, + "issues_enabled": true, + "merge_requests_enabled": true, + "wiki_enabled": true, + "jobs_enabled": true, + "snippets_enabled": true, + "container_registry_enabled": true, + "service_desk_enabled": true, + "service_desk_address": "contact-for-myproject2-2-issue-@incoming.example.com", + "can_create_merge_request_in": true, + "issues_access_level": "enabled", + "repository_access_level": "enabled", + "merge_requests_access_level": "enabled", + "forking_access_level": "enabled", + "wiki_access_level": "enabled", + "builds_access_level": "enabled", + "snippets_access_level": "enabled", + "pages_access_level": "private", + "operations_access_level": "enabled", + "analytics_access_level": "enabled", + "container_registry_access_level": "enabled", + "security_and_compliance_access_level": "private", + "emails_disabled": null, + "shared_runners_enabled": true, + "lfs_enabled": true, + "creator_id": 1803951, + "import_url": null, + "import_type": null, + "import_status": "none", + "open_issues_count": 0, + "ci_default_git_depth": 50, + "ci_forward_deployment_enabled": true, + "ci_job_token_scope_enabled": false, + "ci_separated_caches": true, + "public_jobs": true, + "build_timeout": 3600, + "auto_cancel_pending_pipelines": "enabled", + "build_coverage_regex": null, + "ci_config_path": "", + "shared_with_groups": [], + "only_allow_merge_if_pipeline_succeeds": false, + "allow_merge_on_skipped_pipeline": null, + "restrict_user_defined_variables": false, + "request_access_enabled": true, + "only_allow_merge_if_all_discussions_are_resolved": false, + "remove_source_branch_after_merge": true, + "printing_merge_request_link_enabled": true, + "merge_method": "merge", + "squash_option": "default_off", + "enforce_auth_checks_on_uploads": true, + "suggestion_commit_message": null, + "merge_commit_template": null, + "squash_commit_template": null, + "auto_devops_enabled": false, + "auto_devops_deploy_strategy": "continuous", + "autoclose_referenced_issues": true, + "keep_latest_artifact": true, + "runner_token_expiration_interval": null, + "external_authorization_classification_label": "", + "requirements_enabled": false, + "requirements_access_level": "enabled", + "security_and_compliance_enabled": true, + "compliance_frameworks": [] + } +] diff --git a/e2e_tests/docker-rest/flask/gitlab_api_page_2.json b/e2e_tests/docker-rest/flask/gitlab_api_page_2.json new file mode 100644 index 0000000..8964151 --- /dev/null +++ b/e2e_tests/docker-rest/flask/gitlab_api_page_2.json @@ -0,0 +1,119 @@ +[ + { + "id": 3, + "description": "", + "name": "myproject3", + "name_with_namespace": "{{ namespace }} / myproject3", + "path": "myproject3", + "path_with_namespace": "{{ namespace }}/myproject3", + "created_at": "2020-11-26T17:23:39.904Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject3.git", + "http_url_to_repo": "https://example.com/{{ namespace }}/myproject3.git", + "web_url": "https://example.com/{{ namespace }}/myproject3", + "readme_url": null, + "avatar_url": null, + "forks_count": 0, + "star_count": 0, + "last_activity_at": "2020-11-26T17:23:39.904Z", + "namespace": { + "id": 3, + "name": "{{ namespace }}", + "path": "{{ namespace }}", + "kind": "group", + "full_path": "{{ namespace }}", + "parent_id": null, + "avatar_url": "/uploads/-/system/group/avatar/5/x.png", + "web_url": "https://example.com/groups/{{ namespace }}" + }, + "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject3", + "_links": { + "self": "https://example.com/api/v4/projects/2", + "issues": "https://example.com/api/v4/projects/2/issues", + "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", + "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", + "labels": "https://example.com/api/v4/projects/2/labels", + "events": "https://example.com/api/v4/projects/2/events", + "members": "https://example.com/api/v4/projects/2/members", + "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" + }, + "packages_enabled": true, + "empty_repo": false, + "archived": false, + "visibility": "public", + "resolve_outdated_diff_discussions": false, + "container_expiration_policy": { + "cadence": "1d", + "enabled": false, + "keep_n": 10, + "older_than": "90d", + "name_regex": ".*", + "name_regex_keep": null, + "next_run_at": "2020-11-27T17:23:39.927Z" + }, + "issues_enabled": true, + "merge_requests_enabled": true, + "wiki_enabled": true, + "jobs_enabled": true, + "snippets_enabled": true, + "container_registry_enabled": true, + "service_desk_enabled": true, + "service_desk_address": "contact-for-myproject3-2-issue-@incoming.example.com", + "can_create_merge_request_in": true, + "issues_access_level": "enabled", + "repository_access_level": "enabled", + "merge_requests_access_level": "enabled", + "forking_access_level": "enabled", + "wiki_access_level": "enabled", + "builds_access_level": "enabled", + "snippets_access_level": "enabled", + "pages_access_level": "private", + "operations_access_level": "enabled", + "analytics_access_level": "enabled", + "container_registry_access_level": "enabled", + "security_and_compliance_access_level": "private", + "emails_disabled": null, + "shared_runners_enabled": true, + "lfs_enabled": true, + "creator_id": 1803951, + "import_url": null, + "import_type": null, + "import_status": "none", + "open_issues_count": 0, + "ci_default_git_depth": 50, + "ci_forward_deployment_enabled": true, + "ci_job_token_scope_enabled": false, + "ci_separated_caches": true, + "public_jobs": true, + "build_timeout": 3600, + "auto_cancel_pending_pipelines": "enabled", + "build_coverage_regex": null, + "ci_config_path": "", + "shared_with_groups": [], + "only_allow_merge_if_pipeline_succeeds": false, + "allow_merge_on_skipped_pipeline": null, + "restrict_user_defined_variables": false, + "request_access_enabled": true, + "only_allow_merge_if_all_discussions_are_resolved": false, + "remove_source_branch_after_merge": true, + "printing_merge_request_link_enabled": true, + "merge_method": "merge", + "squash_option": "default_off", + "enforce_auth_checks_on_uploads": true, + "suggestion_commit_message": null, + "merge_commit_template": null, + "squash_commit_template": null, + "auto_devops_enabled": false, + "auto_devops_deploy_strategy": "continuous", + "autoclose_referenced_issues": true, + "keep_latest_artifact": true, + "runner_token_expiration_interval": null, + "external_authorization_classification_label": "", + "requirements_enabled": false, + "requirements_access_level": "enabled", + "security_and_compliance_enabled": true, + "compliance_frameworks": [] + } +] diff --git a/e2e_tests/docker-rest/flask/gitlab_api_page_3.json b/e2e_tests/docker-rest/flask/gitlab_api_page_3.json new file mode 100644 index 0000000..33e36be --- /dev/null +++ b/e2e_tests/docker-rest/flask/gitlab_api_page_3.json @@ -0,0 +1,119 @@ +[ + { + "id": 4, + "description": "", + "name": "myproject4", + "name_with_namespace": "{{ namespace }} / myproject4", + "path": "myproject4", + "path_with_namespace": "{{ namespace }}/myproject4", + "created_at": "2020-11-26T17:23:39.904Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject4.git", + "http_url_to_repo": "https://example.com/{{ namespace }}/myproject4.git", + "web_url": "https://example.com/{{ namespace }}/myproject4", + "readme_url": null, + "avatar_url": null, + "forks_count": 0, + "star_count": 0, + "last_activity_at": "2020-11-26T17:23:39.904Z", + "namespace": { + "id": 3, + "name": "{{ namespace }}", + "path": "{{ namespace }}", + "kind": "group", + "full_path": "{{ namespace }}", + "parent_id": null, + "avatar_url": "/uploads/-/system/group/avatar/5/x.png", + "web_url": "https://example.com/groups/{{ namespace }}" + }, + "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject4", + "_links": { + "self": "https://example.com/api/v4/projects/2", + "issues": "https://example.com/api/v4/projects/2/issues", + "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", + "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", + "labels": "https://example.com/api/v4/projects/2/labels", + "events": "https://example.com/api/v4/projects/2/events", + "members": "https://example.com/api/v4/projects/2/members", + "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" + }, + "packages_enabled": true, + "empty_repo": false, + "archived": false, + "visibility": "public", + "resolve_outdated_diff_discussions": false, + "container_expiration_policy": { + "cadence": "1d", + "enabled": false, + "keep_n": 10, + "older_than": "90d", + "name_regex": ".*", + "name_regex_keep": null, + "next_run_at": "2020-11-27T17:23:39.927Z" + }, + "issues_enabled": true, + "merge_requests_enabled": true, + "wiki_enabled": true, + "jobs_enabled": true, + "snippets_enabled": true, + "container_registry_enabled": true, + "service_desk_enabled": true, + "service_desk_address": "contact-for-myproject4-2-issue-@incoming.example.com", + "can_create_merge_request_in": true, + "issues_access_level": "enabled", + "repository_access_level": "enabled", + "merge_requests_access_level": "enabled", + "forking_access_level": "enabled", + "wiki_access_level": "enabled", + "builds_access_level": "enabled", + "snippets_access_level": "enabled", + "pages_access_level": "private", + "operations_access_level": "enabled", + "analytics_access_level": "enabled", + "container_registry_access_level": "enabled", + "security_and_compliance_access_level": "private", + "emails_disabled": null, + "shared_runners_enabled": true, + "lfs_enabled": true, + "creator_id": 1803951, + "import_url": null, + "import_type": null, + "import_status": "none", + "open_issues_count": 0, + "ci_default_git_depth": 50, + "ci_forward_deployment_enabled": true, + "ci_job_token_scope_enabled": false, + "ci_separated_caches": true, + "public_jobs": true, + "build_timeout": 3600, + "auto_cancel_pending_pipelines": "enabled", + "build_coverage_regex": null, + "ci_config_path": "", + "shared_with_groups": [], + "only_allow_merge_if_pipeline_succeeds": false, + "allow_merge_on_skipped_pipeline": null, + "restrict_user_defined_variables": false, + "request_access_enabled": true, + "only_allow_merge_if_all_discussions_are_resolved": false, + "remove_source_branch_after_merge": true, + "printing_merge_request_link_enabled": true, + "merge_method": "merge", + "squash_option": "default_off", + "enforce_auth_checks_on_uploads": true, + "suggestion_commit_message": null, + "merge_commit_template": null, + "squash_commit_template": null, + "auto_devops_enabled": false, + "auto_devops_deploy_strategy": "continuous", + "autoclose_referenced_issues": true, + "keep_latest_artifact": true, + "runner_token_expiration_interval": null, + "external_authorization_classification_label": "", + "requirements_enabled": false, + "requirements_access_level": "enabled", + "security_and_compliance_enabled": true, + "compliance_frameworks": [] + } +] diff --git a/e2e_tests/docker-rest/flask/gitlab_api_page_4.json b/e2e_tests/docker-rest/flask/gitlab_api_page_4.json new file mode 100644 index 0000000..dc7e17c --- /dev/null +++ b/e2e_tests/docker-rest/flask/gitlab_api_page_4.json @@ -0,0 +1,119 @@ +[ + { + "id": 5, + "description": "", + "name": "myproject5", + "name_with_namespace": "{{ namespace }} / myproject5", + "path": "myproject5", + "path_with_namespace": "{{ namespace }}/myproject5", + "created_at": "2020-11-26T17:23:39.904Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject5.git", + "http_url_to_repo": "https://example.com/{{ namespace }}/myproject5.git", + "web_url": "https://example.com/{{ namespace }}/myproject5", + "readme_url": null, + "avatar_url": null, + "forks_count": 0, + "star_count": 0, + "last_activity_at": "2020-11-26T17:23:39.904Z", + "namespace": { + "id": 3, + "name": "{{ namespace }}", + "path": "{{ namespace }}", + "kind": "group", + "full_path": "{{ namespace }}", + "parent_id": null, + "avatar_url": "/uploads/-/system/group/avatar/5/x.png", + "web_url": "https://example.com/groups/{{ namespace }}" + }, + "container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject5", + "_links": { + "self": "https://example.com/api/v4/projects/2", + "issues": "https://example.com/api/v4/projects/2/issues", + "merge_requests": "https://example.com/api/v4/projects/2/merge_requests", + "repo_branches": "https://example.com/api/v4/projects/2/repository/branches", + "labels": "https://example.com/api/v4/projects/2/labels", + "events": "https://example.com/api/v4/projects/2/events", + "members": "https://example.com/api/v4/projects/2/members", + "cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents" + }, + "packages_enabled": true, + "empty_repo": false, + "archived": false, + "visibility": "public", + "resolve_outdated_diff_discussions": false, + "container_expiration_policy": { + "cadence": "1d", + "enabled": false, + "keep_n": 10, + "older_than": "90d", + "name_regex": ".*", + "name_regex_keep": null, + "next_run_at": "2020-11-27T17:23:39.927Z" + }, + "issues_enabled": true, + "merge_requests_enabled": true, + "wiki_enabled": true, + "jobs_enabled": true, + "snippets_enabled": true, + "container_registry_enabled": true, + "service_desk_enabled": true, + "service_desk_address": "contact-for-myproject5-2-issue-@incoming.example.com", + "can_create_merge_request_in": true, + "issues_access_level": "enabled", + "repository_access_level": "enabled", + "merge_requests_access_level": "enabled", + "forking_access_level": "enabled", + "wiki_access_level": "enabled", + "builds_access_level": "enabled", + "snippets_access_level": "enabled", + "pages_access_level": "private", + "operations_access_level": "enabled", + "analytics_access_level": "enabled", + "container_registry_access_level": "enabled", + "security_and_compliance_access_level": "private", + "emails_disabled": null, + "shared_runners_enabled": true, + "lfs_enabled": true, + "creator_id": 1803951, + "import_url": null, + "import_type": null, + "import_status": "none", + "open_issues_count": 0, + "ci_default_git_depth": 50, + "ci_forward_deployment_enabled": true, + "ci_job_token_scope_enabled": false, + "ci_separated_caches": true, + "public_jobs": true, + "build_timeout": 3600, + "auto_cancel_pending_pipelines": "enabled", + "build_coverage_regex": null, + "ci_config_path": "", + "shared_with_groups": [], + "only_allow_merge_if_pipeline_succeeds": false, + "allow_merge_on_skipped_pipeline": null, + "restrict_user_defined_variables": false, + "request_access_enabled": true, + "only_allow_merge_if_all_discussions_are_resolved": false, + "remove_source_branch_after_merge": true, + "printing_merge_request_link_enabled": true, + "merge_method": "merge", + "squash_option": "default_off", + "enforce_auth_checks_on_uploads": true, + "suggestion_commit_message": null, + "merge_commit_template": null, + "squash_commit_template": null, + "auto_devops_enabled": false, + "auto_devops_deploy_strategy": "continuous", + "autoclose_referenced_issues": true, + "keep_latest_artifact": true, + "runner_token_expiration_interval": null, + "external_authorization_classification_label": "", + "requirements_enabled": false, + "requirements_access_level": "enabled", + "security_and_compliance_enabled": true, + "compliance_frameworks": [] + } +] diff --git a/e2e_tests/docker-rest/flask/gitlab_api_user.json b/e2e_tests/docker-rest/flask/gitlab_api_user.json new file mode 100644 index 0000000..b09e810 --- /dev/null +++ b/e2e_tests/docker-rest/flask/gitlab_api_user.json @@ -0,0 +1,42 @@ +{ + "id": 1, + "username": "myuser1", + "name": "My User", + "state": "active", + "avatar_url": "https://example.com/avatar", + "web_url": "https://example.com/myuser1", + "created_at": "2016-12-10T10:09:11.585Z", + "bio": "", + "location": "", + "public_email": "", + "skype": "", + "linkedin": "", + "twitter": "", + "website_url": "", + "organization": "", + "job_title": "", + "pronouns": "", + "bot": false, + "work_information": null, + "followers": 0, + "following": 0, + "is_followed": false, + "local_time": "11:59 PM", + "last_sign_in_at": "2020-03-14T09:13:44.977Z", + "confirmed_at": "2022-05-19T23:48:47.033Z", + "last_activity_on": "2022-05-19", + "email": "myuser1@example.com", + "theme_id": null, + "color_scheme_id": 1, + "projects_limit": 100000, + "current_sign_in_at": "2022-05-19T23:45:49.661Z", + "identities": [], + "can_create_group": true, + "can_create_project": true, + "two_factor_enabled": false, + "external": false, + "private_profile": false, + "commit_email": "myuser1@example.com", + "shared_runners_minutes_limit": 2000, + "extra_shared_runners_minutes_limit": null +} diff --git a/e2e_tests/docker/Dockerfile b/e2e_tests/docker/Dockerfile new file mode 100644 index 0000000..b6e75ce --- /dev/null +++ b/e2e_tests/docker/Dockerfile @@ -0,0 +1,14 @@ +FROM docker.io/debian:11.3 + +RUN apt-get update \ + && apt-get install -y \ + python3-pytest \ + python3-toml \ + python3-git \ + python3-yaml \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /tests + +ENTRYPOINT ["/bin/sh", "-c", "--"] diff --git a/e2e_tests/helpers.py b/e2e_tests/helpers.py index a5b837f..c66289b 100644 --- a/e2e_tests/helpers.py +++ b/e2e_tests/helpers.py @@ -8,9 +8,7 @@ import hashlib import git -binary = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "target/release/grm" -) +binary = os.environ["GRM_BINARY"] def grm(args, cwd=None, is_invalid=False): @@ -208,7 +206,7 @@ class RepoTree: """ ) - cmd = grm(["repos", "sync", "--config", self.config.name]) + cmd = grm(["repos", "sync", "config", "--config", self.config.name]) assert cmd.returncode == 0 return (self.root.name, self.config.name, ["test", "test_worktree"]) diff --git a/e2e_tests/requirements.txt b/e2e_tests/requirements.txt deleted file mode 100644 index 6f444a8..0000000 --- a/e2e_tests/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -attrs==21.4.0 -gitdb==4.0.9 -GitPython==3.1.27 -iniconfig==1.1.1 -packaging==21.3 -pluggy==1.0.0 -py==1.11.0 -pyparsing==3.0.8 -pytest==7.1.2 -PyYAML==6.0 -smmap==5.0.0 -toml==0.10.2 -tomli==2.0.1 -typing_extensions==4.2.0 diff --git a/e2e_tests/test_repos_find.py b/e2e_tests/test_repos_find.py index cf6e692..9f97c6d 100644 --- a/e2e_tests/test_repos_find.py +++ b/e2e_tests/test_repos_find.py @@ -11,7 +11,7 @@ from helpers import * def test_repos_find_nonexistent(): with NonExistentPath() as nonexistent_dir: - cmd = grm(["repos", "find", nonexistent_dir]) + cmd = grm(["repos", "find", "local", nonexistent_dir]) assert "does not exist" in cmd.stderr.lower() assert cmd.returncode != 0 assert not os.path.exists(nonexistent_dir) @@ -19,14 +19,14 @@ def test_repos_find_nonexistent(): def test_repos_find_file(): with tempfile.NamedTemporaryFile() as tmpfile: - cmd = grm(["repos", "find", tmpfile.name]) + cmd = grm(["repos", "find", "local", tmpfile.name]) assert "not a directory" in cmd.stderr.lower() assert cmd.returncode != 0 def test_repos_find_empty(): with tempfile.TemporaryDirectory() as tmpdir: - cmd = grm(["repos", "find", tmpdir]) + cmd = grm(["repos", "find", "local", tmpdir]) assert cmd.returncode == 0 assert len(cmd.stdout) == 0 assert len(cmd.stderr) != 0 @@ -35,7 +35,8 @@ def test_repos_find_empty(): def test_repos_find_invalid_format(): with tempfile.TemporaryDirectory() as tmpdir: cmd = grm( - ["repos", "find", tmpdir, "--format", "invalidformat"], is_invalid=True + ["repos", "find", "local", tmpdir, "--format", "invalidformat"], + is_invalid=True, ) assert cmd.returncode != 0 assert len(cmd.stdout) == 0 @@ -55,7 +56,7 @@ def test_repos_find_non_git_repos(): """ ) - cmd = grm(["repos", "find", tmpdir]) + cmd = grm(["repos", "find", "local", tmpdir]) assert cmd.returncode == 0 assert len(cmd.stdout) == 0 @@ -83,7 +84,7 @@ def test_repos_find(configtype, default): ( cd ./repo2 git init - git co -b main + git checkout -b main echo test > test git add test git commit -m "commit1" @@ -97,7 +98,7 @@ def test_repos_find(configtype, default): """ ) - args = ["repos", "find", tmpdir] + args = ["repos", "find", "local", tmpdir] if not default: args += ["--format", configtype] cmd = grm(args) @@ -152,7 +153,7 @@ def test_repos_find(configtype, default): def test_repos_find_in_root(configtype, default): with TempGitRepository() as repo_dir: - args = ["repos", "find", repo_dir] + args = ["repos", "find", "local", repo_dir] if not default: args += ["--format", configtype] cmd = grm(args) @@ -213,7 +214,7 @@ def test_repos_find_with_invalid_repo(configtype, default): ( cd ./repo2 git init - git co -b main + git checkout -b main echo test > test git add test git commit -m "commit1" @@ -227,7 +228,7 @@ def test_repos_find_with_invalid_repo(configtype, default): """ ) - args = ["repos", "find", tmpdir] + args = ["repos", "find", "local", tmpdir] if not default: args += ["--format", configtype] cmd = grm(args) diff --git a/e2e_tests/test_repos_find_remote.py b/e2e_tests/test_repos_find_remote.py new file mode 100644 index 0000000..40452a6 --- /dev/null +++ b/e2e_tests/test_repos_find_remote.py @@ -0,0 +1,950 @@ +#!/usr/bin/env python3 + +import re +import os + +import toml +import pytest +import yaml + +from helpers import * + + +ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"] +PROVIDERS = ["github", "gitlab"] + + +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_invalid_provider(use_config): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + f.write( + """ + provider = "thisproviderdoesnotexist" + token_command = "true" + root = "/" + """ + ) + args = ["repos", "find", "config", "--config", config.name] + cmd = grm(args, is_invalid=True) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + "thisproviderdoesnotexist", + "--token-command", + "true", + "--root", + "/", + ] + cmd = grm(args, is_invalid=True) + assert cmd.returncode != 0 + assert len(cmd.stdout) == 0 + if not use_config: + assert re.match(".*isn't a valid value for.*provider", cmd.stderr) + + +@pytest.mark.parametrize("provider", PROVIDERS) +def test_repos_find_remote_invalid_format(provider): + cmd = grm( + [ + "repos", + "find", + "remote", + "--provider", + provider, + "--format", + "invalidformat", + "--token-command", + "true", + "--root", + "/myroot", + ], + is_invalid=True, + ) + assert cmd.returncode != 0 + assert len(cmd.stdout) == 0 + assert "isn't a valid value" in cmd.stderr + + +@pytest.mark.parametrize("provider", PROVIDERS) +def test_repos_find_remote_token_command_failed(provider): + cmd = grm( + [ + "repos", + "find", + "remote", + "--provider", + provider, + "--format", + "yaml", + "--token-command", + "false", + "--root", + "/myroot", + ], + is_invalid=True, + ) + assert cmd.returncode != 0 + assert len(cmd.stdout) == 0 + assert "token command failed" in cmd.stderr.lower() + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_wrong_token(provider, use_config): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + f.write( + f""" + provider = "{provider}" + token_command = "echo wrongtoken" + root = "/myroot" + [filters] + access = true + """ + ) + args = ["repos", "find", "config", "--config", config.name] + cmd = grm(args, is_invalid=True) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo wrongtoken", + "--root", + "/myroot", + "--access", + ] + cmd = grm(args, is_invalid=True) + + assert cmd.returncode != 0 + assert len(cmd.stdout) == 0 + assert "bad credentials" in cmd.stderr.lower() + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("default", [True, False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_no_filter(provider, configtype, default, use_config): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + f.write( + f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + """ + ) + args = ["repos", "find", "config", "--config", config.name] + if not default: + args += ["--format", configtype] + cmd = grm(args) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + ] + if not default: + args += ["--format", configtype] + cmd = grm(args) + + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 0 + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("configtype_default", [True, False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_user_empty( + provider, configtype, configtype_default, use_config +): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + cfg = f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + + [filters] + users = ["someotheruser"] + """ + + f.write(cfg) + args = ["repos", "find", "config", "--config", config.name] + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + "--user", + "someotheruser", + ] + + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if configtype_default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 0 + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("configtype_default", [True, False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("worktree_default", [True, False]) +@pytest.mark.parametrize("worktree", [True, False]) +@pytest.mark.parametrize("use_owner", [True, False]) +@pytest.mark.parametrize("force_ssh", [True, False]) +@pytest.mark.parametrize("use_alternate_endpoint", [True, False]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_user( + provider, + configtype, + configtype_default, + worktree, + worktree_default, + use_owner, + force_ssh, + use_alternate_endpoint, + use_config, +): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + cfg = f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + """ + + if use_alternate_endpoint: + cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' + if not worktree_default: + cfg += f"worktree = {str(worktree).lower()}\n" + if force_ssh: + cfg += f"force_ssh = true\n" + if use_owner: + cfg += """ + [filters] + owner = true\n + """ + else: + cfg += """ + [filters] + users = ["myuser1"]\n + """ + + print(cfg) + f.write(cfg) + + args = ["repos", "find", "config", "--config", config.name] + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + ] + if use_owner: + args += ["--owner"] + else: + args += ["--user", "myuser1"] + if force_ssh: + args += ["--force-ssh"] + if not worktree_default: + args += ["--worktree", str(worktree).lower()] + if use_alternate_endpoint: + args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"] + + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + + if use_alternate_endpoint and provider == "github": + assert cmd.returncode != 0 + assert "overriding is not supported for github" in cmd.stderr.lower() + return + + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if configtype_default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 1 + + assert set(output["trees"][0].keys()) == {"root", "repos"} + assert isinstance(output["trees"][0]["repos"], list) + assert len(output["trees"][0]["repos"]) == 5 + + for i in range(1, 6): + repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][ + 0 + ] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh or i == 1: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("configtype_default", [False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("use_alternate_endpoint", [True, False]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_group_empty( + provider, configtype, configtype_default, use_alternate_endpoint, use_config +): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + cfg = f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + """ + + if use_alternate_endpoint: + cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' + cfg += """ + [filters] + groups = ["someothergroup"]\n + """ + + f.write(cfg) + + args = ["repos", "find", "config", "--config", config.name] + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + "--group", + "someothergroup", + ] + if use_alternate_endpoint: + args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"] + + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + + if use_alternate_endpoint and provider == "github": + assert cmd.returncode != 0 + assert "overriding is not supported for github" in cmd.stderr.lower() + return + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if configtype_default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 0 + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("configtype_default", [False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("worktree_default", [True, False]) +@pytest.mark.parametrize("worktree", [True, False]) +@pytest.mark.parametrize("force_ssh", [True, False]) +@pytest.mark.parametrize("use_alternate_endpoint", [True, False]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_group( + provider, + configtype, + configtype_default, + worktree, + worktree_default, + force_ssh, + use_alternate_endpoint, + use_config, +): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + cfg = f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + """ + + if not worktree_default: + cfg += f"worktree = {str(worktree).lower()}\n" + if force_ssh: + cfg += f"force_ssh = true\n" + if use_alternate_endpoint: + cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' + cfg += """ + [filters] + groups = ["mygroup1"]\n + """ + + f.write(cfg) + + args = ["repos", "find", "config", "--config", config.name] + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + "--group", + "mygroup1", + ] + if not worktree_default: + args += ["--worktree", str(worktree).lower()] + if force_ssh: + args += ["--force-ssh"] + if use_alternate_endpoint: + args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"] + + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + if use_alternate_endpoint and provider == "github": + assert cmd.returncode != 0 + assert "overriding is not supported for github" in cmd.stderr.lower() + return + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if configtype_default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 1 + + assert set(output["trees"][0].keys()) == {"root", "repos"} + assert isinstance(output["trees"][0]["repos"], list) + assert len(output["trees"][0]["repos"]) == 5 + + for i in range(1, 6): + repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][ + 0 + ] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + if force_ssh or i == 1: + assert repo["remotes"][0]["name"] == provider + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/mygroup1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert repo["remotes"][0]["name"] == provider + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/mygroup1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("configtype_default", [False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("worktree_default", [True, False]) +@pytest.mark.parametrize("worktree", [True, False]) +@pytest.mark.parametrize("use_owner", [True, False]) +@pytest.mark.parametrize("force_ssh", [True, False]) +@pytest.mark.parametrize("use_alternate_endpoint", [True, False]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_user_and_group( + provider, + configtype, + configtype_default, + worktree, + worktree_default, + use_owner, + force_ssh, + use_alternate_endpoint, + use_config, +): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + cfg = f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + """ + + if not worktree_default: + cfg += f"worktree = {str(worktree).lower()}\n" + if force_ssh: + cfg += f"force_ssh = true\n" + if use_alternate_endpoint: + cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' + cfg += """ + [filters] + groups = ["mygroup1"]\n + """ + + if use_owner: + cfg += "owner = true\n" + else: + cfg += 'users = ["myuser1"]\n' + + f.write(cfg) + + args = ["repos", "find", "config", "--config", config.name] + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + "--group", + "mygroup1", + ] + if use_owner: + args += ["--owner"] + else: + args += ["--user", "myuser1"] + if not worktree_default: + args += ["--worktree", str(worktree).lower()] + if force_ssh: + args += ["--force-ssh"] + if use_alternate_endpoint: + args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"] + + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + if use_alternate_endpoint and provider == "github": + assert cmd.returncode != 0 + assert "overriding is not supported for github" in cmd.stderr.lower() + return + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if configtype_default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 2 + + user_namespace = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0] + + assert set(user_namespace.keys()) == {"root", "repos"} + assert isinstance(user_namespace["repos"], list) + assert len(user_namespace["repos"]) == 5 + + for i in range(1, 6): + repo = [r for r in user_namespace["repos"] if r["name"] == f"myproject{i}"][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh or i == 1: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + + group_namespace = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][0] + + assert set(group_namespace.keys()) == {"root", "repos"} + assert isinstance(group_namespace["repos"], list) + assert len(group_namespace["repos"]) == 5 + + for i in range(1, 6): + repo = [r for r in group_namespace["repos"] if r["name"] == f"myproject{i}"][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh or i == 1: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/mygroup1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/mygroup1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + + +@pytest.mark.parametrize("provider", PROVIDERS) +@pytest.mark.parametrize("configtype_default", [False]) +@pytest.mark.parametrize("configtype", ["toml", "yaml"]) +@pytest.mark.parametrize("worktree_default", [True, False]) +@pytest.mark.parametrize("worktree", [True, False]) +@pytest.mark.parametrize("with_user_filter", [True, False]) +@pytest.mark.parametrize("with_group_filter", [True, False]) +@pytest.mark.parametrize("force_ssh", [True, False]) +@pytest.mark.parametrize("use_alternate_endpoint", [True, False]) +@pytest.mark.parametrize("use_config", [True, False]) +def test_repos_find_remote_owner( + provider, + configtype, + configtype_default, + worktree, + worktree_default, + with_user_filter, + with_group_filter, + force_ssh, + use_alternate_endpoint, + use_config, +): + if use_config: + with tempfile.NamedTemporaryFile() as config: + with open(config.name, "w") as f: + cfg = f""" + provider = "{provider}" + token_command = "echo authtoken" + root = "/myroot" + """ + + if not worktree_default: + cfg += f"worktree = {str(worktree).lower()}\n" + if force_ssh: + cfg += f"force_ssh = true\n" + if use_alternate_endpoint: + cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n' + cfg += """ + [filters] + access = true\n + """ + + if with_user_filter: + cfg += 'users = ["myuser1"]\n' + if with_group_filter: + cfg += 'groups = ["mygroup1"]\n' + + f.write(cfg) + + args = ["repos", "find", "config", "--config", config.name] + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + + else: + args = [ + "repos", + "find", + "remote", + "--provider", + provider, + "--token-command", + "echo authtoken", + "--root", + "/myroot", + "--access", + ] + if not worktree_default: + args += ["--worktree", str(worktree).lower()] + if with_user_filter: + args += ["--user", "myuser1"] + if with_group_filter: + args += ["--group", "mygroup1"] + if force_ssh: + args += ["--force-ssh"] + if use_alternate_endpoint: + args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"] + + if not configtype_default: + args += ["--format", configtype] + cmd = grm(args) + if use_alternate_endpoint and provider == "github": + assert cmd.returncode != 0 + assert "overriding is not supported for github" in cmd.stderr.lower() + return + assert cmd.returncode == 0 + assert len(cmd.stderr) == 0 + + if configtype_default or configtype == "toml": + output = toml.loads(cmd.stdout) + elif configtype == "yaml": + output = yaml.safe_load(cmd.stdout) + else: + raise NotImplementedError() + + assert isinstance(output, dict) + assert set(output.keys()) == {"trees"} + assert isinstance(output["trees"], list) + assert len(output["trees"]) == 4 + + user_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0] + + assert set(user_namespace_1.keys()) == {"root", "repos"} + assert isinstance(user_namespace_1["repos"], list) + + if with_user_filter: + assert len(user_namespace_1["repos"]) == 5 + + for i in range(1, 6): + repo = [ + r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}" + ][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh or i == 1: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + else: + assert len(user_namespace_1["repos"]) == 2 + + for i in range(1, 3): + repo = [ + r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}" + ][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh or i == 1: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/myuser1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + + user_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/myuser2"][0] + + assert set(user_namespace_2.keys()) == {"root", "repos"} + assert isinstance(user_namespace_2["repos"], list) + assert len(user_namespace_2["repos"]) == 1 + + repo = user_namespace_2["repos"][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh: + assert ( + repo["remotes"][0]["url"] == f"ssh://git@example.com/myuser2/myproject3.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] == f"https://example.com/myuser2/myproject3.git" + ) + assert repo["remotes"][0]["type"] == "https" + + group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][ + 0 + ] + + assert set(group_namespace_1.keys()) == {"root", "repos"} + assert isinstance(group_namespace_1["repos"], list) + + if with_group_filter: + assert len(group_namespace_1["repos"]) == 5 + + for i in range(1, 6): + repo = [ + r for r in group_namespace_1["repos"] if r["name"] == f"myproject{i}" + ][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh or i == 1: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/mygroup1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/mygroup1/myproject{i}.git" + ) + assert repo["remotes"][0]["type"] == "https" + else: + assert len(group_namespace_1["repos"]) == 1 + + repo = group_namespace_1["repos"][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/mygroup1/myproject4.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] + == f"https://example.com/mygroup1/myproject4.git" + ) + assert repo["remotes"][0]["type"] == "https" + + group_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup2"][ + 0 + ] + + assert set(group_namespace_2.keys()) == {"root", "repos"} + assert isinstance(group_namespace_2["repos"], list) + assert len(group_namespace_2["repos"]) == 1 + + repo = group_namespace_2["repos"][0] + assert repo["worktree_setup"] is (not worktree_default and worktree) + assert isinstance(repo["remotes"], list) + assert len(repo["remotes"]) == 1 + assert repo["remotes"][0]["name"] == provider + if force_ssh: + assert ( + repo["remotes"][0]["url"] + == f"ssh://git@example.com/mygroup2/myproject5.git" + ) + assert repo["remotes"][0]["type"] == "ssh" + else: + assert ( + repo["remotes"][0]["url"] == f"https://example.com/mygroup2/myproject5.git" + ) + assert repo["remotes"][0]["type"] == "https" diff --git a/e2e_tests/test_repos_sync.py b/e2e_tests/test_repos_sync.py index 2586943..67cdf3c 100644 --- a/e2e_tests/test_repos_sync.py +++ b/e2e_tests/test_repos_sync.py @@ -154,7 +154,7 @@ def test_repos_sync_config_is_valid_symlink(configtype): subprocess.run(["cat", config.name]) - cmd = grm(["repos", "sync", "--config", config_symlink]) + cmd = grm(["repos", "sync", "config", "--config", config_symlink]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -174,7 +174,7 @@ def test_repos_sync_config_is_invalid_symlink(): config_symlink = os.path.join(config_dir, "cfglink") os.symlink(nonexistent_dir, config_symlink) - cmd = grm(["repos", "sync", "--config", config_symlink]) + cmd = grm(["repos", "sync", "config", "--config", config_symlink]) assert cmd.returncode != 0 assert len(cmd.stdout) == 0 @@ -185,7 +185,7 @@ def test_repos_sync_config_is_invalid_symlink(): def test_repos_sync_config_is_directory(): with tempfile.TemporaryDirectory() as config: - cmd = grm(["repos", "sync", "--config", config]) + cmd = grm(["repos", "sync", "config", "--config", config]) assert cmd.returncode != 0 assert len(cmd.stdout) == 0 @@ -197,12 +197,11 @@ def test_repos_sync_config_is_unreadable(): config_path = os.path.join(config_dir, "cfg") open(config_path, "w") os.chmod(config_path, 0o0000) - cmd = grm(["repos", "sync", "--config", config_path]) + cmd = grm(["repos", "sync", "config", "--config", config_path]) assert os.path.exists(config_path) assert cmd.returncode != 0 assert len(cmd.stdout) == 0 - assert "permission denied" in cmd.stderr.lower() @pytest.mark.parametrize("configtype", ["toml", "yaml"]) @@ -213,7 +212,7 @@ def test_repos_sync_unmanaged_repos(configtype): with open(config.name, "w") as f: f.write(templates["repo_simple"][configtype].format(root=root)) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(root, "test") @@ -232,7 +231,7 @@ def test_repos_sync_root_is_file(configtype): with open(config.name, "w") as f: f.write(templates["repo_simple"][configtype].format(root=target.name)) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode != 0 assert len(cmd.stdout) == 0 assert "not a directory" in cmd.stderr.lower() @@ -251,7 +250,7 @@ def test_repos_sync_normal_clone(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -283,7 +282,7 @@ def test_repos_sync_normal_init(configtype): with open(config.name, "w") as f: f.write(templates["repo_simple"][configtype].format(root=target)) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -309,7 +308,7 @@ def test_repos_sync_normal_add_remote(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -329,7 +328,7 @@ def test_repos_sync_normal_add_remote(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 with git.Repo(git_dir) as repo: assert set([str(r) for r in repo.remotes]) == { @@ -359,7 +358,7 @@ def test_repos_sync_normal_remove_remote(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -382,7 +381,7 @@ def test_repos_sync_normal_remove_remote(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 shell(f"cd {git_dir} && git remote -v") with git.Repo(git_dir) as repo: @@ -424,7 +423,7 @@ def test_repos_sync_normal_change_remote_url(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -444,7 +443,7 @@ def test_repos_sync_normal_change_remote_url(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 with git.Repo(git_dir) as repo: assert set([str(r) for r in repo.remotes]) == {"origin"} @@ -467,7 +466,7 @@ def test_repos_sync_normal_change_remote_name(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -487,7 +486,7 @@ def test_repos_sync_normal_change_remote_name(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 with git.Repo(git_dir) as repo: # See the note in `test_repos_sync_normal_remove_remote()` @@ -512,7 +511,7 @@ def test_repos_sync_worktree_clone(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 worktree_dir = f"{target}/test" @@ -538,7 +537,7 @@ def test_repos_sync_worktree_init(configtype): templates["worktree_repo_simple"][configtype].format(root=target) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 worktree_dir = f"{target}/test" @@ -573,7 +572,7 @@ def test_repos_sync_invalid_syntax(configtype): ) else: raise NotImplementedError() - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode != 0 @@ -590,11 +589,11 @@ def test_repos_sync_unchanged(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 before = checksum_directory(target) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) after = checksum_directory(target) assert cmd.returncode == 0 @@ -614,7 +613,7 @@ def test_repos_sync_normal_change_to_worktree(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -626,7 +625,7 @@ def test_repos_sync_normal_change_to_worktree(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode != 0 assert "already exists" in cmd.stderr assert "not using a worktree setup" in cmd.stderr @@ -645,7 +644,7 @@ def test_repos_sync_worktree_change_to_normal(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode == 0 git_dir = os.path.join(target, "test") @@ -657,7 +656,7 @@ def test_repos_sync_worktree_change_to_normal(configtype): ) ) - cmd = grm(["repos", "sync", "--config", config.name]) + cmd = grm(["repos", "sync", "config", "--config", config.name]) assert cmd.returncode != 0 assert "already exists" in cmd.stderr assert "using a worktree setup" in cmd.stderr diff --git a/e2e_tests/update_requirementstxt.sh b/e2e_tests/update_requirementstxt.sh deleted file mode 100755 index 2f3db3d..0000000 --- a/e2e_tests/update_requirementstxt.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit - -# shellcheck disable=SC1091 -source ./venv/bin/activate - -pip --disable-pip-version-check install -r ./requirements.txt - -pip3 list --outdated --format=freeze | grep -v '^\-e' | cut -d = -f 1 | while read -r package ; do - [[ "$package" == "pip" ]] && continue - [[ "$package" == "setuptools" ]] && continue - pip install --upgrade "${package}" - version="$(pip show "${package}" | grep '^Version' | cut -d ' ' -f 2)" - message="e2e_tests/pip: Update ${package} to ${version}" - pip freeze | grep -v '^pkg_resources' > requirements.txt - git add ./requirements.txt - git commit --message "${message}" -done diff --git a/src/config.rs b/src/config.rs index e81c6a0..36873fe 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,19 +1,61 @@ use serde::{Deserialize, Serialize}; +use std::process; + +use crate::output::*; use super::repo::RepoConfig; +use std::path::Path; + +use crate::get_token_from_command; +use crate::provider; +use crate::provider::Filter; +use crate::provider::Provider; + +pub type RemoteProvider = crate::provider::RemoteProvider; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum Config { + ConfigTree(ConfigTree), + ConfigProvider(ConfigProvider), +} + #[derive(Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] -pub struct Config { +pub struct ConfigTree { pub trees: Trees, } +#[derive(Debug, Serialize, Deserialize)] +pub struct ConfigProviderFilter { + pub access: Option, + pub owner: Option, + pub users: Option>, + pub groups: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ConfigProvider { + pub provider: RemoteProvider, + pub token_command: String, + pub root: String, + pub filters: Option, + + pub force_ssh: Option, + + pub api_url: Option, + + pub worktree: Option, + pub init_worktree: Option, +} + #[derive(Debug, Serialize, Deserialize)] pub struct Trees(Vec); impl Trees { pub fn to_config(self) -> Config { - Config { trees: self } + Config::ConfigTree(ConfigTree { trees: self }) } pub fn from_vec(vec: Vec) -> Self { @@ -30,6 +72,81 @@ impl Trees { } impl Config { + pub fn trees(self) -> Result { + match self { + Config::ConfigTree(config) => Ok(config.trees), + Config::ConfigProvider(config) => { + let token = match get_token_from_command(&config.token_command) { + Ok(token) => token, + Err(error) => { + print_error(&format!("Getting token from command failed: {}", error)); + process::exit(1); + } + }; + + let filters = config.filters.unwrap_or(ConfigProviderFilter { + access: Some(false), + owner: Some(false), + users: Some(vec![]), + groups: Some(vec![]), + }); + + let filter = Filter::new( + filters.users.unwrap_or_default(), + filters.groups.unwrap_or_default(), + filters.owner.unwrap_or(false), + filters.access.unwrap_or(false), + ); + + let repos = match config.provider { + RemoteProvider::Github => { + match provider::Github::new(filter, token, config.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos( + config.worktree.unwrap_or(false), + config.force_ssh.unwrap_or(false), + )? + } + RemoteProvider::Gitlab => { + match provider::Gitlab::new(filter, token, config.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos( + config.worktree.unwrap_or(false), + config.force_ssh.unwrap_or(false), + )? + } + }; + + let mut trees = vec![]; + + for (namespace, namespace_repos) in repos { + let tree = Tree { + root: crate::path_as_string(&Path::new(&config.root).join(namespace)), + repos: Some(namespace_repos), + }; + trees.push(tree); + } + Ok(Trees(trees)) + } + } + } + + pub fn from_trees(trees: Vec) -> Self { + Config::ConfigTree(ConfigTree { + trees: Trees::from_vec(trees), + }) + } + pub fn as_toml(&self) -> Result { match toml::to_string(self) { Ok(toml) => Ok(toml), @@ -49,7 +166,10 @@ pub struct Tree { pub repos: Option>, } -pub fn read_config(path: &str) -> Result { +pub fn read_config<'a, T>(path: &str) -> Result +where + T: for<'de> serde::Deserialize<'de>, +{ let content = match std::fs::read_to_string(&path) { Ok(s) => s, Err(e) => { @@ -64,7 +184,7 @@ pub fn read_config(path: &str) -> Result { } }; - let config: Config = match toml::from_str(&content) { + let config: T = match toml::from_str(&content) { Ok(c) => c, Err(_) => match serde_yaml::from_str(&content) { Ok(c) => c, diff --git a/src/grm/cmd.rs b/src/grm/cmd.rs index 7b88379..27a1c82 100644 --- a/src/grm/cmd.rs +++ b/src/grm/cmd.rs @@ -31,20 +31,53 @@ pub struct Repos { #[derive(Parser)] pub enum ReposAction { - #[clap( - visible_alias = "run", - about = "Synchronize the repositories to the configured values" - )] - Sync(Sync), - #[clap(about = "Generate a repository configuration from an existing file tree")] - Find(Find), + #[clap(subcommand)] + Sync(SyncAction), + #[clap(subcommand)] + Find(FindAction), #[clap(about = "Show status of configured repositories")] Status(OptionalConfig), } #[derive(Parser)] -#[clap()] -pub struct Sync { +#[clap(about = "Sync local repositories with a configured list")] +pub enum SyncAction { + #[clap( + about = "Synchronize the repositories to the configured values" + )] + Config(Config), + #[clap(about = "Synchronize the repositories from a remote provider")] + Remote(SyncRemoteArgs), +} + +#[derive(Parser)] +#[clap(about = "Generate a repository configuration from existing repositories")] +pub enum FindAction { + #[clap(about = "Find local repositories")] + Local(FindLocalArgs), + #[clap(about = "Find repositories on remote provider")] + Remote(FindRemoteArgs), + #[clap(about = "Find repositories as defined in the configuration file")] + Config(FindConfigArgs), +} + +#[derive(Parser)] +pub struct FindLocalArgs { + #[clap(help = "The path to search through")] + pub path: String, + + #[clap( + arg_enum, + short, + long, + help = "Format to produce", + default_value_t = ConfigFormat::Toml, + )] + pub format: ConfigFormat, +} + +#[derive(Parser)] +pub struct FindConfigArgs { #[clap( short, long, @@ -52,6 +85,145 @@ pub struct Sync { help = "Path to the configuration file" )] pub config: String, + + #[clap( + arg_enum, + short, + long, + help = "Format to produce", + default_value_t = ConfigFormat::Toml, + )] + pub format: ConfigFormat, +} + +#[derive(Parser)] +#[clap()] +pub struct FindRemoteArgs { + #[clap(short, long, help = "Path to the configuration file")] + pub config: Option, + + #[clap(arg_enum, short, long, help = "Remote provider to use")] + pub provider: RemoteProvider, + + #[clap( + multiple_occurrences = true, + name = "user", + long, + help = "Users to get repositories from" + )] + pub users: Vec, + + #[clap( + multiple_occurrences = true, + name = "group", + long, + help = "Groups to get repositories from" + )] + pub groups: Vec, + + #[clap(long, help = "Get repositories that belong to the requesting user")] + pub owner: bool, + + #[clap(long, help = "Get repositories that the requesting user has access to")] + pub access: bool, + + #[clap(long, help = "Always use SSH, even for public repositories")] + pub force_ssh: bool, + + #[clap(long, help = "Command to get API token")] + pub token_command: String, + + #[clap(long, help = "Root of the repo tree to produce")] + pub root: String, + + #[clap( + arg_enum, + short, + long, + help = "Format to produce", + default_value_t = ConfigFormat::Toml, + )] + pub format: ConfigFormat, + + #[clap( + long, + help = "Use worktree setup for repositories", + possible_values = &["true", "false"], + default_value = "false", + default_missing_value = "true", + min_values = 0, + max_values = 1, + )] + pub worktree: String, + + #[clap(long, help = "Base URL for the API")] + pub api_url: Option, +} + +#[derive(Parser)] +#[clap()] +pub struct Config { + #[clap( + short, + long, + default_value = "./config.toml", + help = "Path to the configuration file" + )] + pub config: String, +} + +pub type RemoteProvider = grm::provider::RemoteProvider; + +#[derive(Parser)] +#[clap()] +pub struct SyncRemoteArgs { + #[clap(arg_enum, short, long, help = "Remote provider to use")] + pub provider: RemoteProvider, + + #[clap( + multiple_occurrences = true, + name = "user", + long, + help = "Users to get repositories from" + )] + pub users: Vec, + + #[clap( + multiple_occurrences = true, + name = "group", + long, + help = "Groups to get repositories from" + )] + pub groups: Vec, + + #[clap(long, help = "Get repositories that belong to the requesting user")] + pub owner: bool, + + #[clap(long, help = "Get repositories that the requesting user has access to")] + pub access: bool, + + #[clap(long, help = "Always use SSH, even for public repositories")] + pub force_ssh: bool, + + #[clap(long, help = "Command to get API token")] + pub token_command: String, + + #[clap(long, help = "Root of the repo tree to produce")] + pub root: String, + + #[clap( + long, + help = "Use worktree setup for repositories", + possible_values = &["true", "false"], + default_value = "false", + default_missing_value = "true", + min_values = 0, + max_values = 1, + )] + pub worktree: String, + + #[clap(long, help = "Base URL for the API")] + pub api_url: Option, } #[derive(Parser)] @@ -67,21 +239,6 @@ pub enum ConfigFormat { Toml, } -#[derive(Parser)] -pub struct Find { - #[clap(help = "The path to search through")] - pub path: String, - - #[clap( - arg_enum, - short, - long, - help = "Format to produce", - default_value_t = ConfigFormat::Toml, - )] - pub format: ConfigFormat, -} - #[derive(Parser)] pub struct Worktree { #[clap(subcommand, name = "action")] diff --git a/src/grm/main.rs b/src/grm/main.rs index 50547da..40a8652 100644 --- a/src/grm/main.rs +++ b/src/grm/main.rs @@ -5,6 +5,8 @@ mod cmd; use grm::config; use grm::output::*; +use grm::provider; +use grm::provider::Provider; use grm::repo; fn main() { @@ -12,26 +14,104 @@ fn main() { match opts.subcmd { cmd::SubCommand::Repos(repos) => match repos.action { - cmd::ReposAction::Sync(sync) => { - let config = match config::read_config(&sync.config) { - Ok(config) => config, - Err(error) => { - print_error(&error); - process::exit(1); - } - }; - match grm::sync_trees(config) { - Ok(success) => { - if !success { - process::exit(1) + cmd::ReposAction::Sync(sync) => match sync { + cmd::SyncAction::Config(args) => { + let config = match config::read_config(&args.config) { + Ok(config) => config, + Err(error) => { + print_error(&error); + process::exit(1); + } + }; + match grm::sync_trees(config) { + Ok(success) => { + if !success { + process::exit(1) + } + } + Err(error) => { + print_error(&format!("Error syncing trees: {}", error)); + process::exit(1); } } - Err(error) => { - print_error(&format!("Error syncing trees: {}", error)); - process::exit(1); + } + cmd::SyncAction::Remote(args) => { + let token = match grm::get_token_from_command(&args.token_command) { + Ok(token) => token, + Err(error) => { + print_error(&format!("Getting token from command failed: {}", error)); + process::exit(1); + } + }; + + let filter = grm::provider::Filter::new( + args.users, + args.groups, + args.owner, + args.access, + ); + + let worktree = args.worktree == "true"; + + let repos = match args.provider { + cmd::RemoteProvider::Github => { + match grm::provider::Github::new(filter, token, args.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos(worktree, args.force_ssh) + } + cmd::RemoteProvider::Gitlab => { + match grm::provider::Gitlab::new(filter, token, args.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos(worktree, args.force_ssh) + } + }; + + match repos { + Ok(repos) => { + let mut trees: Vec = vec![]; + + for (namespace, repolist) in repos { + let tree = config::Tree { + root: Path::new(&args.root) + .join(namespace) + .display() + .to_string(), + repos: Some(repolist), + }; + trees.push(tree); + } + + let config = config::Config::from_trees(trees); + + match grm::sync_trees(config) { + Ok(success) => { + if !success { + process::exit(1) + } + } + Err(error) => { + print_error(&format!("Error syncing trees: {}", error)); + process::exit(1); + } + } + } + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } } } - } + }, cmd::ReposAction::Status(args) => match &args.config { Some(config_path) => { let config = match config::read_config(config_path) { @@ -79,47 +159,166 @@ fn main() { } } }, - cmd::ReposAction::Find(find) => { - let path = Path::new(&find.path); - if !path.exists() { - print_error(&format!("Path \"{}\" does not exist", path.display())); - process::exit(1); - } - if !path.is_dir() { - print_error(&format!("Path \"{}\" is not a directory", path.display())); - process::exit(1); - } - - let path = match path.canonicalize() { - Ok(path) => path, - Err(error) => { - print_error(&format!( - "Failed to canonicalize path \"{}\". This is a bug. Error message: {}", - &path.display(), - error - )); + cmd::ReposAction::Find(find) => match find { + cmd::FindAction::Local(args) => { + let path = Path::new(&args.path); + if !path.exists() { + print_error(&format!("Path \"{}\" does not exist", path.display())); process::exit(1); } - }; - - let (found_repos, warnings) = match grm::find_in_tree(&path) { - Ok((repos, warnings)) => (repos, warnings), - Err(error) => { - print_error(&error); + if !path.is_dir() { + print_error(&format!("Path \"{}\" is not a directory", path.display())); process::exit(1); } - }; - let trees = grm::config::Trees::from_vec(vec![found_repos]); - if trees.as_vec_ref().iter().all(|t| match &t.repos { - None => false, - Some(r) => r.is_empty(), - }) { - print_warning("No repositories found"); - } else { - let config = trees.to_config(); + let path = match path.canonicalize() { + Ok(path) => path, + Err(error) => { + print_error(&format!( + "Failed to canonicalize path \"{}\". This is a bug. Error message: {}", + &path.display(), + error + )); + process::exit(1); + } + }; - match find.format { + let (found_repos, warnings) = match grm::find_in_tree(&path) { + Ok((repos, warnings)) => (repos, warnings), + Err(error) => { + print_error(&error); + process::exit(1); + } + }; + + let trees = grm::config::Trees::from_vec(vec![found_repos]); + if trees.as_vec_ref().iter().all(|t| match &t.repos { + None => false, + Some(r) => r.is_empty(), + }) { + print_warning("No repositories found"); + } else { + let config = trees.to_config(); + + match args.format { + cmd::ConfigFormat::Toml => { + let toml = match config.as_toml() { + Ok(toml) => toml, + Err(error) => { + print_error(&format!( + "Failed converting config to TOML: {}", + &error + )); + process::exit(1); + } + }; + print!("{}", toml); + } + cmd::ConfigFormat::Yaml => { + let yaml = match config.as_yaml() { + Ok(yaml) => yaml, + Err(error) => { + print_error(&format!( + "Failed converting config to YAML: {}", + &error + )); + process::exit(1); + } + }; + print!("{}", yaml); + } + } + } + for warning in warnings { + print_warning(&warning); + } + } + cmd::FindAction::Config(args) => { + let config: crate::config::ConfigProvider = + match config::read_config(&args.config) { + Ok(config) => config, + Err(error) => { + print_error(&error); + process::exit(1); + } + }; + + let token = match grm::get_token_from_command(&config.token_command) { + Ok(token) => token, + Err(error) => { + print_error(&format!("Getting token from command failed: {}", error)); + process::exit(1); + } + }; + + let filters = config.filters.unwrap_or(grm::config::ConfigProviderFilter { + access: Some(false), + owner: Some(false), + users: Some(vec![]), + groups: Some(vec![]), + }); + + let filter = provider::Filter::new( + filters.users.unwrap_or_default(), + filters.groups.unwrap_or_default(), + filters.owner.unwrap_or(false), + filters.access.unwrap_or(false), + ); + + let repos = match config.provider { + provider::RemoteProvider::Github => { + match match provider::Github::new(filter, token, config.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos( + config.worktree.unwrap_or(false), + config.force_ssh.unwrap_or(false), + ) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + } + provider::RemoteProvider::Gitlab => { + match match provider::Gitlab::new(filter, token, config.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos( + config.worktree.unwrap_or(false), + config.force_ssh.unwrap_or(false), + ) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + } + }; + + let mut trees = vec![]; + + for (namespace, namespace_repos) in repos { + let tree = config::Tree { + root: grm::path_as_string(&Path::new(&config.root).join(namespace)), + repos: Some(namespace_repos), + }; + trees.push(tree); + } + + let config = config::Config::from_trees(trees); + + match args.format { cmd::ConfigFormat::Toml => { let toml = match config.as_toml() { Ok(toml) => toml, @@ -148,10 +347,94 @@ fn main() { } } } - for warning in warnings { - print_warning(&warning); + cmd::FindAction::Remote(args) => { + let token = match grm::get_token_from_command(&args.token_command) { + Ok(token) => token, + Err(error) => { + print_error(&format!("Getting token from command failed: {}", error)); + process::exit(1); + } + }; + + let filter = grm::provider::Filter::new( + args.users, + args.groups, + args.owner, + args.access, + ); + + let worktree = args.worktree == "true"; + + let repos = match args.provider { + cmd::RemoteProvider::Github => { + match grm::provider::Github::new(filter, token, args.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos(worktree, args.force_ssh) + } + cmd::RemoteProvider::Gitlab => { + match grm::provider::Gitlab::new(filter, token, args.api_url) { + Ok(provider) => provider, + Err(error) => { + print_error(&format!("Error: {}", error)); + process::exit(1); + } + } + .get_repos(worktree, args.force_ssh) + } + }; + + let repos = repos.unwrap_or_else(|error| { + print_error(&format!("Error: {}", error)); + process::exit(1); + }); + + let mut trees: Vec = vec![]; + + for (namespace, repolist) in repos { + let tree = config::Tree { + root: Path::new(&args.root).join(namespace).display().to_string(), + repos: Some(repolist), + }; + trees.push(tree); + } + + let config = config::Config::from_trees(trees); + + match args.format { + cmd::ConfigFormat::Toml => { + let toml = match config.as_toml() { + Ok(toml) => toml, + Err(error) => { + print_error(&format!( + "Failed converting config to TOML: {}", + &error + )); + process::exit(1); + } + }; + print!("{}", toml); + } + cmd::ConfigFormat::Yaml => { + let yaml = match config.as_yaml() { + Ok(yaml) => yaml, + Err(error) => { + print_error(&format!( + "Failed converting config to YAML: {}", + &error + )); + process::exit(1); + } + }; + print!("{}", yaml); + } + } } - } + }, }, cmd::SubCommand::Worktree(args) => { let cwd = std::env::current_dir().unwrap_or_else(|error| { diff --git a/src/lib.rs b/src/lib.rs index d598aac..95b0b77 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,5 @@ #![feature(io_error_more)] +#![feature(const_option_ext)] use std::fs; use std::path::{Path, PathBuf}; @@ -6,13 +7,14 @@ use std::process; pub mod config; pub mod output; +pub mod provider; pub mod repo; pub mod table; use config::{Config, Tree}; use output::*; -use repo::{clone_repo, detect_remote_type, Remote, RepoConfig}; +use repo::{clone_repo, detect_remote_type, Remote, RemoteType, RepoConfig}; pub use repo::{RemoteTrackingStatus, Repo, RepoErrorKind, WorktreeRemoveFailureReason}; @@ -102,36 +104,57 @@ fn expand_path(path: &Path) -> PathBuf { Path::new(&expanded_path).to_path_buf() } +pub fn get_token_from_command(command: &str) -> Result { + let output = std::process::Command::new("/usr/bin/env") + .arg("sh") + .arg("-c") + .arg(command) + .output() + .map_err(|error| format!("Failed to run token-command: {}", error))?; + + let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?; + let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?; + + if !output.status.success() { + if !stderr.is_empty() { + return Err(format!("Token command failed: {}", stderr)); + } else { + return Err(String::from("Token command failed.")); + } + } + + if !stderr.is_empty() { + return Err(format!("Token command produced stderr: {}", stderr)); + } + + if stdout.is_empty() { + return Err(String::from("Token command did not produce output")); + } + + let token = stdout + .split('\n') + .next() + .ok_or_else(|| String::from("Output did not contain any newline"))?; + + Ok(token.to_string()) +} + fn sync_repo(root_path: &Path, repo: &RepoConfig) -> Result<(), String> { let repo_path = root_path.join(&repo.name); let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup); - let mut repo_handle = None; - if repo_path.exists() { if repo.worktree_setup && !actual_git_directory.exists() { return Err(String::from( "Repo already exists, but is not using a worktree setup", )); - } - repo_handle = match Repo::open(&repo_path, repo.worktree_setup) { - Ok(repo) => Some(repo), - Err(error) => { - if !repo.worktree_setup && Repo::open(&repo_path, true).is_ok() { - return Err(String::from( - "Repo already exists, but is using a worktree setup", - )); - } else { - return Err(format!("Opening repository failed: {}", error)); - } - } }; } else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() { print_repo_action( &repo.name, "Repository does not have remotes configured, initializing new", ); - repo_handle = match Repo::init(&repo_path, repo.worktree_setup) { + match Repo::init(&repo_path, repo.worktree_setup) { Ok(r) => { print_repo_success(&repo.name, "Repository created"); Some(r) @@ -139,7 +162,7 @@ fn sync_repo(root_path: &Path, repo: &RepoConfig) -> Result<(), String> { Err(e) => { return Err(format!("Repository failed during init: {}", e)); } - } + }; } else { let first = repo.remotes.as_ref().unwrap().first().unwrap(); @@ -152,11 +175,32 @@ fn sync_repo(root_path: &Path, repo: &RepoConfig) -> Result<(), String> { } }; } - if let Some(remotes) = &repo.remotes { - let repo_handle = repo_handle.unwrap_or_else(|| { - Repo::open(&repo_path, repo.worktree_setup).unwrap_or_else(|_| process::exit(1)) - }); + let repo_handle = match Repo::open(&repo_path, repo.worktree_setup) { + Ok(repo) => repo, + Err(error) => { + if !repo.worktree_setup && Repo::open(&repo_path, true).is_ok() { + return Err(String::from( + "Repo already exists, but is using a worktree setup", + )); + } else { + return Err(format!("Opening repository failed: {}", error)); + } + } + }; + + if repo.worktree_setup { + match repo_handle.default_branch() { + Ok(branch) => { + add_worktree(&repo_path, &branch.name()?, None, None, false)?; + } + Err(_error) => print_repo_error( + &repo.name, + "Could not determine default branch, skipping worktree initializtion", + ), + } + } + if let Some(remotes) = &repo.remotes { let current_remotes: Vec = repo_handle .remotes() .map_err(|error| format!("Repository failed during getting the remotes: {}", error))?; @@ -231,7 +275,7 @@ pub fn find_unmanaged_repos( pub fn sync_trees(config: Config) -> Result { let mut failures = false; - for tree in config.trees.as_vec() { + for tree in config.trees()?.as_vec() { let repos = tree.repos.unwrap_or_default(); let root_path = expand_path(Path::new(&tree.root)); diff --git a/src/provider/github.rs b/src/provider/github.rs new file mode 100644 index 0000000..536f3c5 --- /dev/null +++ b/src/provider/github.rs @@ -0,0 +1,144 @@ +use serde::Deserialize; + +use super::ApiErrorResponse; +use super::Filter; +use super::JsonError; +use super::Project; +use super::Provider; +use super::SecretToken; + +const PROVIDER_NAME: &str = "github"; +const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json"; +const GITHUB_API_BASEURL: &str = + option_env!("GITHUB_API_BASEURL").unwrap_or("https://api.github.com"); + +#[derive(Deserialize)] +pub struct GithubProject { + pub name: String, + pub full_name: String, + pub clone_url: String, + pub ssh_url: String, + pub private: bool, +} + +#[derive(Deserialize)] +struct GithubUser { + #[serde(rename = "login")] + pub username: String, +} + +impl Project for GithubProject { + fn name(&self) -> String { + self.name.clone() + } + + fn namespace(&self) -> String { + self.full_name + .rsplit_once('/') + .expect("Github project name did not include a namespace") + .0 + .to_string() + } + + fn ssh_url(&self) -> String { + self.ssh_url.clone() + } + + fn http_url(&self) -> String { + self.clone_url.clone() + } + + fn private(&self) -> bool { + self.private + } +} + +#[derive(Deserialize)] +pub struct GithubApiErrorResponse { + pub message: String, +} + +impl JsonError for GithubApiErrorResponse { + fn to_string(self) -> String { + self.message + } +} + +pub struct Github { + filter: Filter, + secret_token: SecretToken, +} + +impl Provider for Github { + type Project = GithubProject; + type Error = GithubApiErrorResponse; + + fn new( + filter: Filter, + secret_token: SecretToken, + api_url_override: Option, + ) -> Result { + if api_url_override.is_some() { + return Err("API URL overriding is not supported for Github".to_string()); + } + Ok(Self { + filter, + secret_token, + }) + } + + fn name(&self) -> String { + String::from(PROVIDER_NAME) + } + + fn filter(&self) -> Filter { + self.filter.clone() + } + + fn secret_token(&self) -> SecretToken { + self.secret_token.clone() + } + + fn auth_header_key() -> String { + "token".to_string() + } + + fn get_user_projects( + &self, + user: &str, + ) -> Result, ApiErrorResponse> { + self.call_list( + &format!("{GITHUB_API_BASEURL}/users/{user}/repos"), + Some(ACCEPT_HEADER_JSON), + ) + } + + fn get_group_projects( + &self, + group: &str, + ) -> Result, ApiErrorResponse> { + self.call_list( + &format!("{GITHUB_API_BASEURL}/orgs/{group}/repos?type=all"), + Some(ACCEPT_HEADER_JSON), + ) + } + + fn get_accessible_projects( + &self, + ) -> Result, ApiErrorResponse> { + self.call_list( + &format!("{GITHUB_API_BASEURL}/user/repos"), + Some(ACCEPT_HEADER_JSON), + ) + } + + fn get_current_user(&self) -> Result> { + Ok(super::call::( + &format!("{GITHUB_API_BASEURL}/user"), + &Self::auth_header_key(), + &self.secret_token(), + Some(ACCEPT_HEADER_JSON), + )? + .username) + } +} diff --git a/src/provider/gitlab.rs b/src/provider/gitlab.rs new file mode 100644 index 0000000..170141b --- /dev/null +++ b/src/provider/gitlab.rs @@ -0,0 +1,165 @@ +use serde::Deserialize; + +use super::ApiErrorResponse; +use super::Filter; +use super::JsonError; +use super::Project; +use super::Provider; +use super::SecretToken; + +const PROVIDER_NAME: &str = "gitlab"; +const ACCEPT_HEADER_JSON: &str = "application/json"; +const GITLAB_API_BASEURL: &str = option_env!("GITLAB_API_BASEURL").unwrap_or("https://gitlab.com"); + +#[derive(Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum GitlabVisibility { + Private, + Internal, + Public, +} + +#[derive(Deserialize)] +pub struct GitlabProject { + #[serde(rename = "path")] + pub name: String, + pub path_with_namespace: String, + pub http_url_to_repo: String, + pub ssh_url_to_repo: String, + pub visibility: GitlabVisibility, +} + +#[derive(Deserialize)] +struct GitlabUser { + pub username: String, +} + +impl Project for GitlabProject { + fn name(&self) -> String { + self.name.clone() + } + + fn namespace(&self) -> String { + self.path_with_namespace + .rsplit_once('/') + .expect("Gitlab project name did not include a namespace") + .0 + .to_string() + } + + fn ssh_url(&self) -> String { + self.ssh_url_to_repo.clone() + } + + fn http_url(&self) -> String { + self.http_url_to_repo.clone() + } + + fn private(&self) -> bool { + matches!(self.visibility, GitlabVisibility::Private) + } +} + +#[derive(Deserialize)] +pub struct GitlabApiErrorResponse { + #[serde(alias = "error_description")] + pub message: String, +} + +impl JsonError for GitlabApiErrorResponse { + fn to_string(self) -> String { + self.message + } +} + +pub struct Gitlab { + filter: Filter, + secret_token: SecretToken, + api_url_override: Option, +} + +impl Gitlab { + fn api_url(&self) -> String { + self.api_url_override + .as_ref() + .unwrap_or(&GITLAB_API_BASEURL.to_string()) + .trim_end_matches('/') + .to_string() + } +} + +impl Provider for Gitlab { + type Project = GitlabProject; + type Error = GitlabApiErrorResponse; + + fn new( + filter: Filter, + secret_token: SecretToken, + api_url_override: Option, + ) -> Result { + Ok(Self { + filter, + secret_token, + api_url_override, + }) + } + + fn name(&self) -> String { + String::from(PROVIDER_NAME) + } + + fn filter(&self) -> Filter { + self.filter.clone() + } + + fn secret_token(&self) -> SecretToken { + self.secret_token.clone() + } + + fn auth_header_key() -> String { + "bearer".to_string() + } + + fn get_user_projects( + &self, + user: &str, + ) -> Result, ApiErrorResponse> { + self.call_list( + &format!("{}/api/v4/users/{}/projects", self.api_url(), user), + Some(ACCEPT_HEADER_JSON), + ) + } + + fn get_group_projects( + &self, + group: &str, + ) -> Result, ApiErrorResponse> { + self.call_list( + &format!( + "{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false", + self.api_url(), + group + ), + Some(ACCEPT_HEADER_JSON), + ) + } + + fn get_accessible_projects( + &self, + ) -> Result, ApiErrorResponse> { + self.call_list( + &format!("{}/api/v4/projects", self.api_url(),), + Some(ACCEPT_HEADER_JSON), + ) + } + + fn get_current_user(&self) -> Result> { + Ok(super::call::( + &format!("{}/api/v4/user", self.api_url()), + &Self::auth_header_key(), + &self.secret_token(), + Some(ACCEPT_HEADER_JSON), + )? + .username) + } +} diff --git a/src/provider/mod.rs b/src/provider/mod.rs new file mode 100644 index 0000000..1811f8c --- /dev/null +++ b/src/provider/mod.rs @@ -0,0 +1,340 @@ +use serde::{Deserialize, Serialize}; + +// Required to use the `json()` method from the trait +use isahc::ReadResponseExt; + +pub mod github; +pub mod gitlab; + +pub use github::Github; +pub use gitlab::Gitlab; + +use crate::{Remote, RemoteType, RepoConfig}; + +use std::collections::HashMap; + +#[derive(Debug, Deserialize, Serialize, clap::ArgEnum, Clone)] +pub enum RemoteProvider { + #[serde(alias = "github", alias = "GitHub")] + Github, + #[serde(alias = "gitlab", alias = "GitLab")] + Gitlab, +} + +#[derive(Deserialize)] +#[serde(untagged)] +enum ProjectResponse { + Success(Vec), + Failure(U), +} + +pub trait Project { + fn into_repo_config( + self, + provider_name: &str, + worktree_setup: bool, + force_ssh: bool, + ) -> RepoConfig + where + Self: Sized, + { + RepoConfig { + name: self.name(), + worktree_setup, + remotes: Some(vec![Remote { + name: String::from(provider_name), + url: if force_ssh || self.private() { + self.ssh_url() + } else { + self.http_url() + }, + remote_type: if force_ssh || self.private() { + RemoteType::Ssh + } else { + RemoteType::Https + }, + }]), + } + } + + fn name(&self) -> String; + fn namespace(&self) -> String; + fn ssh_url(&self) -> String; + fn http_url(&self) -> String; + fn private(&self) -> bool; +} + +type SecretToken = String; + +#[derive(Clone)] +pub struct Filter { + users: Vec, + groups: Vec, + owner: bool, + access: bool, +} + +impl Filter { + pub fn new(users: Vec, groups: Vec, owner: bool, access: bool) -> Self { + Filter { + users, + groups, + owner, + access, + } + } +} + +pub enum ApiErrorResponse +where + T: JsonError, +{ + Json(T), + String(String), +} + +impl From for ApiErrorResponse +where + T: JsonError, +{ + fn from(s: String) -> ApiErrorResponse { + ApiErrorResponse::String(s) + } +} + +pub trait JsonError { + fn to_string(self) -> String; +} + +pub trait Provider { + type Project: serde::de::DeserializeOwned + Project; + type Error: serde::de::DeserializeOwned + JsonError; + + fn new( + filter: Filter, + secret_token: SecretToken, + api_url_override: Option, + ) -> Result + where + Self: Sized; + + fn name(&self) -> String; + fn filter(&self) -> Filter; + fn secret_token(&self) -> SecretToken; + fn auth_header_key() -> String; + + fn get_user_projects( + &self, + user: &str, + ) -> Result, ApiErrorResponse>; + + fn get_group_projects( + &self, + group: &str, + ) -> Result, ApiErrorResponse>; + + fn get_own_projects(&self) -> Result, ApiErrorResponse> { + self.get_user_projects(&self.get_current_user()?) + } + + fn get_accessible_projects(&self) -> Result, ApiErrorResponse>; + + fn get_current_user(&self) -> Result>; + + /// + /// Calls the API at specific uri and expects a successful response of Vec back, or an error + /// response U + /// + /// Handles paging with "link" HTTP headers properly and reads all pages to + /// the end. + fn call_list( + &self, + uri: &str, + accept_header: Option<&str>, + ) -> Result, ApiErrorResponse> { + let mut results = vec![]; + + let client = isahc::HttpClient::new().map_err(|error| error.to_string())?; + + let request = isahc::Request::builder() + .uri(uri) + .method("GET") + .header("accept", accept_header.unwrap_or("application/json")) + .header( + "authorization", + format!("{} {}", Self::auth_header_key(), &self.secret_token()), + ) + .body(()) + .map_err(|error| error.to_string())?; + + let mut response = client + .send(request) + .map_err(|error| ApiErrorResponse::String(error.to_string()))?; + + if !response.status().is_success() { + let r: Self::Error = response + .json() + .map_err(|error| format!("Failed deserializing error response: {}", error))?; + return Err(ApiErrorResponse::Json(r)); + } + + let result: Vec = response + .json() + .map_err(|error| format!("Failed deserializing response: {}", error))?; + + results.extend(result); + + if let Some(link_header) = response.headers().get("link") { + let link_header = link_header.to_str().map_err(|error| error.to_string())?; + + let link_header = + parse_link_header::parse(link_header).map_err(|error| error.to_string())?; + + let next_page = link_header.get(&Some(String::from("next"))); + + if let Some(page) = next_page { + let following_repos = self.call_list(&page.raw_uri, accept_header)?; + results.extend(following_repos); + } + } + + Ok(results) + } + + fn get_repos( + &self, + worktree_setup: bool, + force_ssh: bool, + ) -> Result>, String> { + let mut repos = vec![]; + + if self.filter().owner { + repos.extend(self.get_own_projects().map_err(|error| match error { + ApiErrorResponse::Json(x) => x.to_string(), + ApiErrorResponse::String(s) => s, + })?); + } + + if self.filter().access { + let accessible_projects = + self.get_accessible_projects() + .map_err(|error| match error { + ApiErrorResponse::Json(x) => x.to_string(), + ApiErrorResponse::String(s) => s, + })?; + + for accessible_project in accessible_projects { + let mut already_present = false; + for repo in &repos { + if repo.name() == accessible_project.name() + && repo.namespace() == accessible_project.namespace() + { + already_present = true; + } + } + if !already_present { + repos.push(accessible_project); + } + } + } + + for user in &self.filter().users { + let user_projects = self.get_user_projects(user).map_err(|error| match error { + ApiErrorResponse::Json(x) => x.to_string(), + ApiErrorResponse::String(s) => s, + })?; + + for user_project in user_projects { + let mut already_present = false; + for repo in &repos { + if repo.name() == user_project.name() + && repo.namespace() == user_project.namespace() + { + already_present = true; + } + } + if !already_present { + repos.push(user_project); + } + } + } + + for group in &self.filter().groups { + let group_projects = self + .get_group_projects(group) + .map_err(|error| match error { + ApiErrorResponse::Json(x) => x.to_string(), + ApiErrorResponse::String(s) => s, + })?; + for group_project in group_projects { + let mut already_present = false; + for repo in &repos { + if repo.name() == group_project.name() + && repo.namespace() == group_project.namespace() + { + already_present = true; + } + } + + if !already_present { + repos.push(group_project); + } + } + } + + let mut ret: HashMap> = HashMap::new(); + + for repo in repos { + let namespace = repo.namespace().clone(); + + let repo = repo.into_repo_config(&self.name(), worktree_setup, force_ssh); + + ret.entry(namespace).or_insert(vec![]).push(repo); + } + + Ok(ret) + } +} + +fn call( + uri: &str, + auth_header_key: &str, + secret_token: &str, + accept_header: Option<&str>, +) -> Result> +where + T: serde::de::DeserializeOwned, + U: serde::de::DeserializeOwned + JsonError, +{ + let client = isahc::HttpClient::new().map_err(|error| error.to_string())?; + + let request = isahc::Request::builder() + .uri(uri) + .header("accept", accept_header.unwrap_or("application/json")) + .header( + "authorization", + format!("{} {}", &auth_header_key, &secret_token), + ) + .body(()) + .map_err(|error| ApiErrorResponse::String(error.to_string()))?; + + let mut response = client + .send(request) + .map_err(|error| ApiErrorResponse::String(error.to_string()))?; + + let success = response.status().is_success(); + + if !success { + let response: U = response + .json() + .map_err(|error| format!("Failed deserializing error response: {}", error))?; + + return Err(ApiErrorResponse::Json(response)); + } + + let response: T = response + .json() + .map_err(|error| format!("Failed deserializing response: {}", error))?; + + Ok(response) +} diff --git a/src/table.rs b/src/table.rs index fcea35f..b395e20 100644 --- a/src/table.rs +++ b/src/table.rs @@ -144,7 +144,7 @@ pub fn get_worktree_status_table( pub fn get_status_table(config: crate::Config) -> Result<(Vec, Vec), String> { let mut errors = Vec::new(); let mut tables = Vec::new(); - for tree in config.trees.as_vec() { + for tree in config.trees()?.as_vec() { let repos = tree.repos.unwrap_or_default(); let root_path = crate::expand_path(Path::new(&tree.root));