Compare commits
166 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e75aead3a8 | |||
| dca2b3c9b4 | |||
| a71711978e | |||
| 90d188e01e | |||
| 2e6166e807 | |||
| 8aaaa55d45 | |||
| df39bb3076 | |||
| bc3d4e1c49 | |||
| 32eb4676ee | |||
| 5d7480f7a4 | |||
| 2d34ba1bd7 | |||
| 5b78c3ba9e | |||
| 95cffc5f0e | |||
| 4841920c64 | |||
| c439595d92 | |||
| 62c1e430b2 | |||
| 1212917fae | |||
| f41b9b1684 | |||
| b17f4d68ef | |||
| b8c552fb62 | |||
| f2d2482476 | |||
| 6ef759a14e | |||
| 10e02c20a1 | |||
| 433dc090e0 | |||
| 35e7c34d11 | |||
| 50a0f4d766 | |||
| 1db3eadd4c | |||
| af45b13612 | |||
| c994c90247 | |||
| 3e8aad2221 | |||
| 127dd0535e | |||
| 664d44eddc | |||
| ad206297d8 | |||
| f2f1d5bcaf | |||
| 881a33dc96 | |||
| 38c66cad62 | |||
| 7ad51ccb47 | |||
| dd65f2cd81 | |||
| f01568a695 | |||
| be085e9b0f | |||
| 3557dd2686 | |||
| 908094f48b | |||
| c3c1c98913 | |||
| e940ab69fb | |||
| 1cf4e85014 | |||
| 14c95f2704 | |||
| 5f878793fd | |||
| fd6400ed68 | |||
| faf68e2052 | |||
| 7296795aec | |||
| 88252fffc8 | |||
| e67f5a7db4 | |||
| 87e0247b48 | |||
| d490d3ab84 | |||
| f7870797ac | |||
| 17ffc793e0 | |||
| d3738f0887 | |||
| 7da879d483 | |||
| c0bb71f84f | |||
| 230f380a6a | |||
| 852f445b1f | |||
| 584f68ba42 | |||
| 92092ed4af | |||
| fadf687a3e | |||
| 3a18870537 | |||
| cf80678ccc | |||
| 08ce4b6add | |||
| 39075a6269 | |||
| 906ead80a4 | |||
| 7038661296 | |||
| 543bf94a51 | |||
| 453f73c2a0 | |||
| 7e673200c8 | |||
| 44a716248e | |||
| d20006a325 | |||
| f8adec1413 | |||
| 868269359c | |||
| 61d4a4a0d8 | |||
| 4e4de95a07 | |||
| 9b64de7991 | |||
| e45de3b498 | |||
| 6e4c388195 | |||
| 6436a8194e | |||
| f10ae25b2a | |||
| fd6b3b7438 | |||
| d68ff012f2 | |||
| 9aad65edac | |||
| c370ef5815 | |||
| 8f5b743ea4 | |||
| c0e981dbd4 | |||
| 4303621b30 | |||
| 63e04a9dcf | |||
| 08ee946f2e | |||
| 81de5a2d70 | |||
| 3bf118f99a | |||
| dbf93c8f0e | |||
| 1ae0ceff60 | |||
| 07fa3ca291 | |||
| ab1892cbeb | |||
| ed1edf5075 | |||
| e6e9940757 | |||
| ff48b2a017 | |||
| ec45678ce3 | |||
| 9acf5b10d5 | |||
| eaf8e2bfa2 | |||
| 31b90af066 | |||
| f9d9dc587a | |||
| a94bd19362 | |||
| ef8a57c60e | |||
| 7a2fa7ae3f | |||
| 599973e10a | |||
| 0485facf33 | |||
| 2f6405ea10 | |||
| a4e993b7fc | |||
| 0fb9a22d47 | |||
| 76130c5b48 | |||
| 9f6c84d78c | |||
| fc91ee7a01 | |||
| c0168c3650 | |||
| 717b0d3a74 | |||
| ef381c7421 | |||
| fcbad5a3eb | |||
| cbc9792755 | |||
| ae9a928d45 | |||
| 3ff7b61518 | |||
| 3ac88260b5 | |||
| 54fc48b37d | |||
| 61a8d63374 | |||
| 92e4856dd9 | |||
| 02e9de0cbd | |||
| 552b3a6aad | |||
| fcc22791e5 | |||
| b183590096 | |||
| 27586b5ff0 | |||
| 70eac10eaa | |||
| 1e941e02ed | |||
| 1ffc522d51 | |||
| 66814876a8 | |||
| 3501b785c9 | |||
| 24badb9b64 | |||
| 4c94ead06f | |||
| 7514d1fd3e | |||
| d85c98c3ef | |||
| e3563fcaa6 | |||
| 1a1231b672 | |||
| a3ccea9dcb | |||
| 4a9f1bc278 | |||
| 0b181b9b79 | |||
| 6d747d8e89 | |||
| 72dd861677 | |||
| 59c6164c1f | |||
| 4722d5a8ff | |||
| 4eb88260c8 | |||
| a51e5f8918 | |||
| c66620a0fc | |||
| f0c8805cf3 | |||
| da601c2d5f | |||
| ed06c52c8f | |||
| 14b200ee3d | |||
| d5eddc4476 | |||
| d26a76d064 | |||
| d39df526de | |||
| 6e6050c71b | |||
| 98580d32ad | |||
| 115eb7c74a | |||
|
|
8b7b91d005 |
0
BRANCH_NAMESPACE_SEPARATOR
Normal file
0
BRANCH_NAMESPACE_SEPARATOR
Normal file
49
CONTRIBUTING.md
Normal file
49
CONTRIBUTING.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# Contributing
|
||||
|
||||
GRM is still in very early development. I started GRM mainly to scratch my own
|
||||
itches (and am heavily dogfooding it). If you have a new use case for GRM, go
|
||||
for it!
|
||||
|
||||
The branching strategy is a simplified
|
||||
[git-flow](https://nvie.com/posts/a-successful-git-branching-model/).
|
||||
|
||||
* `master` is the "production" branch. Each commit is a new release.
|
||||
* `develop` is the branch where new stuff is coming in.
|
||||
* feature branches branch off of `develop` and merge back into it.
|
||||
|
||||
So to contribute, just fork the repo and create a pull request against
|
||||
`develop`. If you plan bigger changes, please consider opening an issue first,
|
||||
so we can discuss it.
|
||||
|
||||
If you want, add yourself to the `CONTRIBUTORS` file in your pull request.
|
||||
|
||||
## Code formatting
|
||||
|
||||
For Rust, just use `cargo fmt`. For Python, use
|
||||
[black](https://github.com/psf/black). I'd rather not spend any effort in
|
||||
configuring the formatters (not possible for black anyway).
|
||||
|
||||
## Tooling
|
||||
|
||||
GRM uses [`just`](https://github.com/casey/just) as a command runner. See
|
||||
[here](https://github.com/casey/just#installation) for installation
|
||||
instructions (it's most likely just a simple `cargo install just`).
|
||||
|
||||
## Testing
|
||||
|
||||
There are two distinct test suites: One for unit test (`just test-unit`) and
|
||||
integration tests (`just test-integration`) that is part of the rust crate, and
|
||||
a separate e2e test suite in python (`just test-e2e`).
|
||||
|
||||
To run all tests, run `just test`.
|
||||
|
||||
When contributing, consider whether it makes sense to add tests that to prevent
|
||||
regressions in the future. When fixing bugs, it makes sense to add tests that
|
||||
expose the wrong behaviour beforehand.
|
||||
|
||||
## Documentation
|
||||
|
||||
The documentation lives in `docs` and uses
|
||||
[mdBook](https://github.com/rust-lang/mdBook). Please document new user-facing
|
||||
features here!
|
||||
|
||||
1
CONTRIBUTORS
Normal file
1
CONTRIBUTORS
Normal file
@@ -0,0 +1 @@
|
||||
nonnominandus
|
||||
755
Cargo.lock
generated
755
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
52
Cargo.toml
52
Cargo.toml
@@ -1,7 +1,8 @@
|
||||
[package]
|
||||
name = "git-repo-manager"
|
||||
version = "0.4.0"
|
||||
version = "0.7.1"
|
||||
edition = "2021"
|
||||
|
||||
authors = [
|
||||
"Hannes Körber <hannes@hkoerber.de>",
|
||||
]
|
||||
@@ -26,40 +27,69 @@ rust-version = "1.57"
|
||||
|
||||
license = "GPL-3.0-only"
|
||||
|
||||
[profile.e2e-tests]
|
||||
inherits = "release"
|
||||
|
||||
[lib]
|
||||
name = "grm"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "grm"
|
||||
path = "src/main.rs"
|
||||
path = "src/grm/main.rs"
|
||||
|
||||
[dependencies]
|
||||
|
||||
[dependencies.toml]
|
||||
version = "0.5.8"
|
||||
version = "=0.5.9"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.130"
|
||||
version = "=1.0.137"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.git2]
|
||||
version = "0.13.24"
|
||||
version = "=0.14.4"
|
||||
|
||||
[dependencies.shellexpand]
|
||||
version = "2.1.0"
|
||||
version = "=2.1.0"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "3.0.0-beta.5"
|
||||
version = "=3.1.18"
|
||||
features = ["derive", "cargo"]
|
||||
|
||||
[dependencies.console]
|
||||
version = "0.15.0"
|
||||
version = "=0.15.0"
|
||||
|
||||
[dependencies.regex]
|
||||
version = "1.5.4"
|
||||
version = "=1.5.6"
|
||||
|
||||
[dependencies.comfy-table]
|
||||
version = "5.0.0"
|
||||
version = "=5.0.1"
|
||||
|
||||
[dependencies.serde_yaml]
|
||||
version = "=0.8.24"
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "=1.0.81"
|
||||
|
||||
[dependencies.isahc]
|
||||
version = "=1.7.2"
|
||||
default-features = false
|
||||
features = ["json", "http2", "text-decoding"]
|
||||
|
||||
[dependencies.parse_link_header]
|
||||
version = "=0.3.2"
|
||||
|
||||
[dependencies.url-escape]
|
||||
version = "=0.1.1"
|
||||
|
||||
[dev-dependencies.tempdir]
|
||||
version = "0.3.7"
|
||||
version = "=0.3.7"
|
||||
|
||||
[features]
|
||||
static-build = [
|
||||
"git2/vendored-openssl",
|
||||
"git2/vendored-libgit2",
|
||||
"isahc/static-curl",
|
||||
"isahc/static-ssl",
|
||||
]
|
||||
|
||||
49
Justfile
49
Justfile
@@ -1,17 +1,43 @@
|
||||
set positional-arguments
|
||||
|
||||
target := "x86_64-unknown-linux-musl"
|
||||
|
||||
check: test
|
||||
cargo check
|
||||
cargo fmt --check
|
||||
cargo clippy --no-deps -- -Dwarnings
|
||||
|
||||
fmt:
|
||||
cargo fmt
|
||||
git ls-files | grep '\.py$' | xargs black
|
||||
|
||||
lint:
|
||||
cargo clippy --no-deps
|
||||
|
||||
lint-fix:
|
||||
cargo clippy --no-deps --fix
|
||||
|
||||
release:
|
||||
cargo build --release
|
||||
cargo build --release --target {{target}}
|
||||
|
||||
test-binary:
|
||||
env \
|
||||
GITHUB_API_BASEURL=http://rest:5000/github \
|
||||
GITLAB_API_BASEURL=http://rest:5000/gitlab \
|
||||
cargo build --target {{target}} --profile e2e-tests --features=static-build
|
||||
|
||||
install:
|
||||
cargo install --path .
|
||||
|
||||
install-static:
|
||||
cargo install --target {{target}} --features=static-build --path .
|
||||
|
||||
build:
|
||||
cargo build
|
||||
|
||||
build-static:
|
||||
cargo build --target {{target}} --features=static-build
|
||||
|
||||
test: test-unit test-integration test-e2e
|
||||
|
||||
test-unit:
|
||||
@@ -20,19 +46,20 @@ test-unit:
|
||||
test-integration:
|
||||
cargo test --test "*"
|
||||
|
||||
e2e-venv:
|
||||
test-e2e +tests=".": test-binary
|
||||
cd ./e2e_tests \
|
||||
&& python3 -m venv venv \
|
||||
&& . ./venv/bin/activate \
|
||||
&& pip --disable-pip-version-check install -r ./requirements.txt >/dev/null
|
||||
&& docker-compose rm --stop -f \
|
||||
&& docker-compose build \
|
||||
&& docker-compose run \
|
||||
--rm \
|
||||
-v $PWD/../target/{{target}}/e2e-tests/grm:/grm \
|
||||
pytest \
|
||||
"GRM_BINARY=/grm ALTERNATE_DOMAIN=alternate-rest python3 -m pytest -p no:cacheprovider --color=yes "$@"" \
|
||||
&& docker-compose rm --stop -f
|
||||
|
||||
update-dependencies: update-cargo-dependencies
|
||||
|
||||
test-e2e: e2e-venv release
|
||||
cd ./e2e_tests \
|
||||
&& . ./venv/bin/activate \
|
||||
&& python -m pytest .
|
||||
|
||||
update-dependencies:
|
||||
update-cargo-dependencies:
|
||||
@cd ./depcheck \
|
||||
&& python3 -m venv ./venv \
|
||||
&& . ./venv/bin/activate \
|
||||
|
||||
674
LICENSE
Normal file
674
LICENSE
Normal file
@@ -0,0 +1,674 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
@@ -10,9 +10,17 @@ import tomlkit
|
||||
|
||||
INDEX_DIR = "crates.io-index"
|
||||
|
||||
AUTOUPDATE_DISABLED = []
|
||||
|
||||
if os.path.exists(INDEX_DIR):
|
||||
subprocess.run(
|
||||
["git", "pull", "--depth=1", "origin"],
|
||||
["git", "fetch", "--depth=1", "origin"],
|
||||
cwd=INDEX_DIR,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
subprocess.run(
|
||||
["git", "reset", "--hard", "origin/master"],
|
||||
cwd=INDEX_DIR,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
@@ -29,9 +37,16 @@ with open("../Cargo.toml", "r") as cargo_config:
|
||||
|
||||
update_necessary = False
|
||||
|
||||
# This updates the crates.io index, see https://github.com/rust-lang/cargo/issues/3377
|
||||
subprocess.run(
|
||||
["cargo", "update", "--dry-run"],
|
||||
check=True,
|
||||
capture_output=False, # to get some git output
|
||||
)
|
||||
|
||||
for tier in ["dependencies", "dev-dependencies"]:
|
||||
for name, dependency in cargo[tier].items():
|
||||
version = dependency["version"]
|
||||
version = dependency["version"].lstrip("=")
|
||||
if len(name) >= 4:
|
||||
info_file = f"{INDEX_DIR}/{name[0:2]}/{name[2:4]}/{name}"
|
||||
elif len(name) == 3:
|
||||
@@ -46,13 +61,24 @@ for tier in ["dependencies", "dev-dependencies"]:
|
||||
latest_version = None
|
||||
for version_entry in open(info_file, "r").readlines():
|
||||
version = semver.VersionInfo.parse(json.loads(version_entry)["vers"])
|
||||
if current_version.prerelease == "" and version.prerelease != "":
|
||||
# skip prereleases, except when we are on a prerelease already
|
||||
continue
|
||||
if latest_version is None or version > latest_version:
|
||||
if (
|
||||
current_version.prerelease is None
|
||||
and version.prerelease is not None
|
||||
):
|
||||
# skip prereleases, except when we are on a prerelease already
|
||||
print(f"{name}: Skipping prerelease version {version}")
|
||||
continue
|
||||
latest_version = version
|
||||
|
||||
if latest_version != current_version:
|
||||
if name in AUTOUPDATE_DISABLED:
|
||||
print(
|
||||
f"{name} {current_version}: There is a new version available "
|
||||
f"({latest_version}, current {current_version}), but autoupdating "
|
||||
f"is explictly disabled for {name}"
|
||||
)
|
||||
continue
|
||||
update_necessary = True
|
||||
if latest_version < current_version:
|
||||
print(
|
||||
@@ -62,13 +88,83 @@ for tier in ["dependencies", "dev-dependencies"]:
|
||||
print(
|
||||
f"{name}: New version found: {latest_version} (current {current_version})"
|
||||
)
|
||||
cargo[tier][name]["version"] = str(latest_version)
|
||||
|
||||
|
||||
if update_necessary is True:
|
||||
cargo[tier][name]["version"] = f"={str(latest_version)}"
|
||||
with open("../Cargo.toml", "w") as cargo_config:
|
||||
cargo_config.write(tomlkit.dumps(cargo))
|
||||
sys.exit(1)
|
||||
else:
|
||||
|
||||
try:
|
||||
cmd = subprocess.run(
|
||||
[
|
||||
"cargo",
|
||||
"update",
|
||||
"-Z",
|
||||
"no-index-update",
|
||||
"--aggressive",
|
||||
"--package",
|
||||
name,
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.stdout)
|
||||
print(e.stderr)
|
||||
raise
|
||||
|
||||
message = f"dependencies: Update {name} to {latest_version}"
|
||||
subprocess.run(
|
||||
[
|
||||
"git",
|
||||
"commit",
|
||||
"--message",
|
||||
message,
|
||||
"../Cargo.toml",
|
||||
"../Cargo.lock",
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
|
||||
# Note that we have to restart this lookup every time, as later packages can depend
|
||||
# on former packages
|
||||
while True:
|
||||
with open("../Cargo.lock", "r") as f:
|
||||
cargo_lock = tomlkit.parse(f.read())
|
||||
for package in cargo_lock["package"]:
|
||||
spec = f"{package['name']}:{package['version']}"
|
||||
try:
|
||||
cmd = subprocess.run(
|
||||
[
|
||||
"cargo",
|
||||
"update",
|
||||
"-Z",
|
||||
"no-index-update",
|
||||
"--aggressive",
|
||||
"--package",
|
||||
spec,
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.stdout)
|
||||
print(e.stderr)
|
||||
raise
|
||||
if len(cmd.stderr) != 0:
|
||||
update_necessary = True
|
||||
message = "Cargo.lock: {}".format(cmd.stderr.split("\n")[0].strip())
|
||||
print(message)
|
||||
cmd = subprocess.run(
|
||||
["git", "commit", "--message", message, "../Cargo.lock"],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
if update_necessary is False:
|
||||
print("Everything up to date")
|
||||
sys.exit(0)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# Summary
|
||||
|
||||
- [Overview](./overview.md)
|
||||
- [Getting started](./getting_started.md)
|
||||
- [Installation](./installation.md)
|
||||
- [Repository trees](./repos.md)
|
||||
- [Git Worktrees](./worktrees.md)
|
||||
- [Forge Integrations](./forge_integration.md)
|
||||
- [FAQ](./faq.md)
|
||||
- [Contributing](./contributing.md)
|
||||
|
||||
1
docs/src/contributing.md
Symbolic link
1
docs/src/contributing.md
Symbolic link
@@ -0,0 +1 @@
|
||||
../../CONTRIBUTING.md
|
||||
205
docs/src/forge_integration.md
Normal file
205
docs/src/forge_integration.md
Normal file
@@ -0,0 +1,205 @@
|
||||
# Forge Integrations
|
||||
|
||||
In addition to manging repositories locally, `grm` also integrates with source
|
||||
code hosting platforms. Right now, the following platforms are supported:
|
||||
|
||||
* [GitHub](https://github.com/)
|
||||
* [GitLab](https://gitlab.com/)
|
||||
|
||||
Imagine you are just starting out with `grm` and want to clone all your repositories
|
||||
from GitHub. This is as simple as:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --owner --token-command "pass show github_grm_access_token" --path ~/projects
|
||||
```
|
||||
|
||||
You will end up with your projects cloned into `~/projects/{your_github_username}/`
|
||||
|
||||
## Authentication
|
||||
|
||||
The only currently supported authentication option is using personal access
|
||||
token.
|
||||
|
||||
### GitHub
|
||||
|
||||
See the GitHub documentation for personal access tokens:
|
||||
[Link](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token).
|
||||
|
||||
The only required permission is the "repo" scope.
|
||||
|
||||
### GitHub
|
||||
|
||||
See the GitLab documentation for personal access tokens:
|
||||
[Link](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html).
|
||||
|
||||
The required scopes are a bit weird. Actually, the following should suffice:
|
||||
|
||||
* * `read_user` to get user information (required to get the current authenticated
|
||||
user name for the `--owner` filter.
|
||||
* A scope that allows reading private repositories. (`read_repository` is just
|
||||
for *cloning* private repos). This unfortunately does not exist.
|
||||
|
||||
So currently, you'll need to select the `read_api` scope.
|
||||
|
||||
## Filters
|
||||
|
||||
By default, `grm` will sync **nothing**. This is quite boring, so you have to
|
||||
tell the command what repositories to include. They are all inclusive (i.e. act
|
||||
as a logical OR), so you can easily chain many filters to clone a bunch of
|
||||
repositories. It's quite simple:
|
||||
|
||||
* `--user <USER>` syncs all repositories of that remote user
|
||||
* `--group <GROUP>` syncs all repositories of that remote group/organization
|
||||
* `--owner` syncs all repositories of the user that is used for authentication.
|
||||
This is effectively a shortcut for `--user $YOUR_USER`
|
||||
* `--access` syncs all repositories that the current user has access to
|
||||
|
||||
Easiest to see in an example:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --user torvals --owner --group zalando [...]
|
||||
```
|
||||
|
||||
This would sync all of Torvald's repositories, all of my own repositories and
|
||||
all (public) repositories in the "zalando" group.
|
||||
|
||||
## Strategies
|
||||
|
||||
There are generally three ways how you can use `grm` with forges:
|
||||
|
||||
### Ad-hoc cloning
|
||||
|
||||
This is the easiest, there are no local files involved. You just run the
|
||||
command, `grm` clones the repos, that's it. If you run the command again, `grm`
|
||||
will figure out the differences between local and remote repositories and
|
||||
resolve them locally.
|
||||
|
||||
### Create a file
|
||||
|
||||
This is effectively `grm repos find local`, but using the forge instead of the
|
||||
local file system. You will end up with a normal repository file that you can
|
||||
commit to git. To update the list of repositories, just run the command again
|
||||
and commit the new file.
|
||||
|
||||
### Define options in a file
|
||||
|
||||
This is a hybrid approach: You define filtering options in a file that you can
|
||||
commit to source control. Effectively, you are persisting the options you gave
|
||||
to `grm` on the command line with the ad-hoc approach. Similarly, `grm` will
|
||||
figure out differences between local and remote and resolve them.
|
||||
|
||||
A file would look like this:
|
||||
|
||||
```toml
|
||||
provider = "github"
|
||||
token_command = "cat ~/.github_token"
|
||||
root = "~/projects"
|
||||
|
||||
[filters]
|
||||
owner = true
|
||||
groups = [
|
||||
"zalando"
|
||||
]
|
||||
```
|
||||
|
||||
The options in the file map to the command line options of the `grm repos sync
|
||||
remote` command.
|
||||
|
||||
You'd then run the `grm repos sync` command the same way as with a list of
|
||||
repositories in a config:
|
||||
|
||||
```bash
|
||||
$ grm repos sync --config example.config.toml
|
||||
```
|
||||
|
||||
You can even use that file to generate a repository list that you can feed into
|
||||
`grm repos sync`:
|
||||
|
||||
```bash
|
||||
$ grm repos find config --config example.config.toml > repos.toml
|
||||
$ grm repos sync config --config repos.toml
|
||||
```
|
||||
|
||||
## Using with selfhosted GitLab
|
||||
|
||||
By default, `grm` uses the default GitLab API endpoint
|
||||
([https://gitlab.com](https://gitlab.com)). You can override the
|
||||
endpoint by specifying the `--api-url` parameter. Like this:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider gitlab --api-url https://gitlab.example.com [...]
|
||||
```
|
||||
|
||||
## The cloning protocol
|
||||
|
||||
By default, `grm` will use HTTPS for public repositories and SSH otherwise. This
|
||||
can be overridden with the `--force-ssh` switch.
|
||||
|
||||
## About the token command
|
||||
|
||||
To ensure maximum flexibility, `grm` has a single way to get the token it uses
|
||||
to authenticate: Specify a command that returns the token via stdout. This easily
|
||||
integrates with password managers like [`pass`](https://www.passwordstore.org/).
|
||||
|
||||
Of course, you are also free to specify something like `echo mytoken` as the
|
||||
command, as long as you are ok with the security implications (like having the
|
||||
token in cleartext in your shell history). It may be better to have the token
|
||||
in a file instead and read it: `cat ~/.gitlab_token`.
|
||||
|
||||
Generally, use whatever you want. The command just has to return sucessfully and
|
||||
return the token as the first line of stdout.
|
||||
|
||||
## Examples
|
||||
|
||||
Maybe you just want to locally clone all repos from your github user?
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --owner --root ~/github_projects --token-command "pass show github_grm_access_token"
|
||||
```
|
||||
|
||||
This will clone all repositories into `~/github_projects/{your_github_username}`.
|
||||
|
||||
If instead you want to clone **all** repositories you have access to (e.g. via
|
||||
organizations or other users' private repos you have access to), just change the
|
||||
filter a little bit:
|
||||
|
||||
```bash
|
||||
$ grm repos sync remote --provider github --access --root ~/github_projects --token-command "pass show github_grm_access_token"
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
### GitHub
|
||||
|
||||
Unfortunately, GitHub does not have a nice API endpoint to get **private**
|
||||
repositories for a certain user ([`/users/{user}/repos/`](https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user) only returns public
|
||||
repositories).
|
||||
|
||||
Therefore, using `--user {user}` will only show public repositories for GitHub.
|
||||
Note that this does not apply to `--access`: If you have access to another user's
|
||||
private repository, it will be listed.
|
||||
|
||||
## Adding integrations
|
||||
|
||||
Adding a new integration involves writing some Rust code. Most of the logic is
|
||||
generic, so you will not have to reinvent the wheel. Generally, you will need to
|
||||
gather the following information:
|
||||
|
||||
* A list of repositories for a single user
|
||||
* A list of repositories for a group (or any similar concept if applicable)
|
||||
* A list of repositories for the user that the API token belongs to
|
||||
* The username of the currently authenticated user
|
||||
|
||||
Authentication currently only works via a bearer token passed via the
|
||||
`Authorization` HTTP header.
|
||||
|
||||
Each repo has to have the following properties:
|
||||
|
||||
* A name (which also acts as the identifier for diff between local and remote
|
||||
repositories)
|
||||
* An SSH url to push to
|
||||
* An HTTPS url to clone and fetch from
|
||||
* A flag that marks the repository as private
|
||||
|
||||
If you plan to implement another forge, please first open an issue so we can
|
||||
go through the required setup. I'm happy to help!
|
||||
@@ -1,22 +0,0 @@
|
||||
# Quickstart
|
||||
|
||||
## Installation
|
||||
|
||||
Building GRM currently requires the nightly Rust toolchain. The easiest way
|
||||
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
|
||||
|
||||
Make sure that the nightly toolchain is installed:
|
||||
|
||||
```
|
||||
$ rustup toolchain install nightly
|
||||
```
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch master
|
||||
```
|
||||
|
||||
If you're brave, you can also run the development build:
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
|
||||
```
|
||||
56
docs/src/installation.md
Normal file
56
docs/src/installation.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# Installation
|
||||
|
||||
## Installation
|
||||
|
||||
Building GRM currently requires the nightly Rust toolchain. The easiest way
|
||||
is using [`rustup`](https://rustup.rs/). Make sure that rustup is properly installed.
|
||||
|
||||
Make sure that the nightly toolchain is installed:
|
||||
|
||||
```
|
||||
$ rustup toolchain install nightly
|
||||
```
|
||||
|
||||
Then, install the build dependencies:
|
||||
|
||||
| Distribution | Command |
|
||||
| ------------- | ------------------------------------------------------------------------------ |
|
||||
| Archlinux | `pacman -S --needed gcc openssl pkg-config` |
|
||||
| Ubuntu/Debian | `apt-get install --no-install-recommends pkg-config gcc libssl-dev zlib1g-dev` |
|
||||
|
||||
Then, it's a simple command to install the latest stable version:
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install git-repo-manager
|
||||
```
|
||||
|
||||
If you're brave, you can also run the development build:
|
||||
|
||||
```bash
|
||||
$ cargo +nightly install --git https://github.com/hakoerber/git-repo-manager.git --branch develop
|
||||
```
|
||||
|
||||
## Static build
|
||||
|
||||
Note that by default, you will get a dynamically linked executable.
|
||||
Alternatively, you can also build a statically linked binary. For this, you
|
||||
will need `musl` and a few other build dependencies installed installed:
|
||||
|
||||
| Distribution | Command |
|
||||
| ------------- | --------------------------------------------------------------------------- |
|
||||
| Archlinux | `pacman -S --needed gcc musl perl make` |
|
||||
| Ubuntu/Debian | `apt-get install --no-install-recommends gcc musl-tools libc-dev perl make` |
|
||||
|
||||
(`perl` and `make` are required for the OpenSSL build script)
|
||||
|
||||
The, add the musl target via `rustup`:
|
||||
|
||||
```
|
||||
$ rustup +nightly target add x86_64-unknown-linux-musl
|
||||
```
|
||||
|
||||
Then, use a modified build command to get a statically linked binary:
|
||||
|
||||
```
|
||||
$ cargo +nightly install git-repo-manager --target x86_64-unknown-linux-musl --features=static-build
|
||||
```
|
||||
@@ -5,7 +5,8 @@ Manager](https://github.com/hakoerber/git-repo-manager/) (GRM for short), a
|
||||
tool that helps you manage git repositories.
|
||||
|
||||
GRM helps you manage git repositories in a declarative way. Configure your
|
||||
repositories in a TOML file, GRM does the rest. Take a look at [the example
|
||||
repositories in a TOML or YAML file, GRM does the rest. Take a look at [the
|
||||
example
|
||||
configuration](https://github.com/hakoerber/git-repo-manager/blob/master/example.config.toml)
|
||||
to get a feel for the way you configure your repositories. See the [repository
|
||||
tree chapter](./repos.md) for details.
|
||||
|
||||
@@ -17,7 +17,7 @@ Then, you're ready to run the first sync. This will clone all configured reposit
|
||||
and set up the remotes.
|
||||
|
||||
```bash
|
||||
$ grm repos sync --config example.config.toml
|
||||
$ grm repos sync config --config example.config.toml
|
||||
[⚙] Cloning into "/home/me/projects/git-repo-manager" from "https://code.hkoerber.de/hannes/git-repo-manager.git"
|
||||
[✔] git-repo-manager: Repository successfully cloned
|
||||
[⚙] git-repo-manager: Setting up new remote "github" to "https://github.com/hakoerber/git-repo-manager.git"
|
||||
@@ -30,7 +30,7 @@ $ grm repos sync --config example.config.toml
|
||||
If you run it again, it will report no changes:
|
||||
|
||||
```
|
||||
$ grm repos sync --config example.config.toml
|
||||
$ grm repos sync config -c example.config.toml
|
||||
[✔] git-repo-manager: OK
|
||||
[✔] dotfiles: OK
|
||||
```
|
||||
@@ -42,7 +42,7 @@ a configuration from scratch. Luckily, GRM has a way to generate a configuration
|
||||
from an existing file tree:
|
||||
|
||||
```bash
|
||||
$ grm repos find ~/your/project/root > config.toml
|
||||
$ grm repos find local ~/your/project/root > config.toml
|
||||
```
|
||||
|
||||
This will detect all repositories and remotes and write them to `config.toml`.
|
||||
@@ -74,3 +74,9 @@ $ grm repos status
|
||||
╰──────────┴──────────┴────────┴──────────┴───────┴─────────╯
|
||||
```
|
||||
|
||||
## YAML
|
||||
|
||||
By default, the repo configuration uses TOML. If you prefer YAML, just give it
|
||||
a YAML file instead (file ending does not matter, `grm` will figure out the format
|
||||
itself). For generating a configuration, pass `--format yaml` to `grm repo find`
|
||||
to generate YAML instead of TOML.
|
||||
|
||||
@@ -71,6 +71,10 @@ Now, when you run a `grm sync`, you'll notice that the directory of the reposito
|
||||
is empty! Well, not totally, there is a hidden directory called `.git-main-working-tree`.
|
||||
This is where the repository actually "lives" (it's a bare checkout).
|
||||
|
||||
Note that there are few specific things you can configure for a certain
|
||||
workspace. This is all done in an optional `grm.toml` file right in the root
|
||||
of the worktree. More on that later.
|
||||
|
||||
### Creating a new worktree
|
||||
|
||||
To actually work, you'll first have to create a new worktree checkout. All
|
||||
@@ -130,6 +134,36 @@ The behaviour of `--track` differs depending on the existence of the remote bran
|
||||
new remote tracking branch, using the default branch (either `main` or `master`)
|
||||
as the base
|
||||
|
||||
Often, you'll have a workflow that uses tracking branches by default. It would
|
||||
be quite tedious to add `--track` every single time. Luckily, the `grm.toml` file
|
||||
supports defaults for the tracking behaviour. See this for an example:
|
||||
|
||||
```toml
|
||||
[track]
|
||||
default = true
|
||||
default_remote = "origin"
|
||||
```
|
||||
|
||||
This will set up a tracking branch on `origin` that has the same name as the local
|
||||
branch.
|
||||
|
||||
Sometimes, you might want to have a certain prefix for all your tracking branches.
|
||||
Maybe to prevent collissions with other contributors. You can simply set
|
||||
`default_remote_prefix` in `grm.toml`:
|
||||
|
||||
```toml
|
||||
[track]
|
||||
default = true
|
||||
default_remote = "origin"
|
||||
default_remote_prefix = "myname"
|
||||
```
|
||||
|
||||
When using branch `my-feature-branch`, the remote tracking branch would be
|
||||
`origin/myname/my-feature-branch` in this case.
|
||||
|
||||
Note that `--track` overrides any configuration in `grm.toml`. If you want to
|
||||
disable tracking, use `--no-track`.
|
||||
|
||||
### Showing the status of your worktrees
|
||||
|
||||
There is a handy little command that will show your an overview over all worktrees
|
||||
@@ -195,6 +229,39 @@ $ grm wt clean
|
||||
Note that this will not delete the default branch of the repository. It can of
|
||||
course still be delete with `grm wt delete` if neccessary.
|
||||
|
||||
### Persistent branches
|
||||
|
||||
You most likely have a few branches that are "special", that you don't want to
|
||||
clean up and that are the usual target for feature branches to merge into. GRM
|
||||
calls them "persistent branches" and treats them a bit differently:
|
||||
|
||||
* Their worktrees will never be deleted by `grm wt clean`
|
||||
* If the branches in other worktrees are merged into them, they will be cleaned
|
||||
up, even though they may not be in line with their upstream. Same goes for
|
||||
`grm wt delete`, which will not require a `--force` flag. Note that of
|
||||
course, actual changes in the worktree will still block an automatic cleanup!
|
||||
* As soon as you enable persistent branches, non-persistent branches will only
|
||||
ever cleaned up when merged into a persistent branch.
|
||||
|
||||
To elaborate: This is mostly relevant for a feature-branch workflow. Whenever a
|
||||
feature branch is merged, it can usually be thrown away. As merging is usually
|
||||
done on some remote code management platform (GitHub, GitLab, ...), this means
|
||||
that you usually keep a branch around until it is merged into one of the "main"
|
||||
branches (`master`, `main`, `develop`, ...)
|
||||
|
||||
Enable persistent branches by setting the following in the `grm.toml` in the
|
||||
worktree root:
|
||||
|
||||
```toml
|
||||
persistent_branches = [
|
||||
"master",
|
||||
"develop",
|
||||
]
|
||||
```
|
||||
|
||||
Note that setting persistent branches will disable any detection of "default"
|
||||
branches. The first entry will be considered your repositories' default branch.
|
||||
|
||||
### Converting an existing repository
|
||||
|
||||
It is possible to convert an existing directory to a worktree setup, using `grm
|
||||
@@ -202,7 +269,7 @@ wt convert`. This command has to be run in the root of the repository you want
|
||||
to convert:
|
||||
|
||||
```
|
||||
grm wt convert
|
||||
$ grm wt convert
|
||||
[✔] Conversion successful
|
||||
```
|
||||
|
||||
@@ -212,6 +279,81 @@ Commit them and try again!
|
||||
Afterwards, the directory is empty, as there are no worktrees checked out yet.
|
||||
Now you can use the usual commands to set up worktrees.
|
||||
|
||||
### Working with remotes
|
||||
|
||||
To fetch all remote references from all remotes in a worktree setup, you can
|
||||
use the following command:
|
||||
|
||||
```
|
||||
$ grm wt fetch
|
||||
[✔] Fetched from all remotes
|
||||
```
|
||||
|
||||
This is equivalent to running `git fetch --all` in any of the worktrees.
|
||||
|
||||
Often, you may want to pull all remote changes into your worktrees. For this,
|
||||
use the `git pull` equivalent:
|
||||
|
||||
```
|
||||
$ grm wt pull
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
This will refuse when there are local changes, or if the branch cannot be fast
|
||||
forwarded. If you want to rebase your local branches, use the `--rebase` switch:
|
||||
|
||||
```
|
||||
$ grm wt pull --rebase
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
As noted, this will fail if there are any local changes in your worktree. If you
|
||||
want to stash these changes automatically before the pull (and unstash them
|
||||
afterwards), use the `--stash` option.
|
||||
|
||||
This will rebase your changes onto the upstream branch. This is mainly helpful
|
||||
for persistent branches that change on the remote side.
|
||||
|
||||
There is a similar rebase feature that rebases onto the **default** branch instead:
|
||||
|
||||
```
|
||||
$ grm wt rebase
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
This is super helpful for feature branches. If you want to incorporate changes
|
||||
made on the remote branches, use `grm wt rebase` and all your branches will
|
||||
be up to date. If you want to also update to remote tracking branches in one go,
|
||||
use the `--pull` flag, and `--rebase` if you want to rebase instead of aborting
|
||||
on non-fast-forwards:
|
||||
|
||||
```
|
||||
$ grm wt rebase --pull --rebase
|
||||
[✔] master: Done
|
||||
[✔] my-cool-branch: Done
|
||||
```
|
||||
|
||||
"So, what's the difference between `pull --rebase` and `rebase --pull`? Why the
|
||||
hell is there a `--rebase` flag in the `rebase` command?"
|
||||
|
||||
Yes, it's kind of weird. Remember that `pull` only ever updates each worktree
|
||||
to their remote branch, if possible. `rebase` rabases onto the **default** branch
|
||||
instead. The switches to `rebase` are just convenience, so you do not have to
|
||||
run two commands.
|
||||
|
||||
* `rebase --pull` is the same as `pull` && `rebase`
|
||||
* `rebase --pull --rebase` is the same as `pull --rebase` && `rebase`
|
||||
|
||||
I understand that the UX is not the most intuitive. If you can think of an
|
||||
improvement, please let me know (e.g. via an GitHub issue)!
|
||||
|
||||
As with `pull`, `rebase` will also refuse to run when there are changes in your
|
||||
worktree. And you can also use the `--stash` option to stash/unstash changes
|
||||
automatically.
|
||||
|
||||
### Manual access
|
||||
|
||||
GRM isn't doing any magic, it's just git under the hood. If you need to have access
|
||||
|
||||
1
e2e_tests/.gitignore
vendored
1
e2e_tests/.gitignore
vendored
@@ -1,2 +1 @@
|
||||
/venv/
|
||||
/__pycache__/
|
||||
|
||||
8
e2e_tests/conftest.py
Normal file
8
e2e_tests/conftest.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import os
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
os.environ["GIT_AUTHOR_NAME"] = "Example user"
|
||||
os.environ["GIT_AUTHOR_EMAIL"] = "user@example.com"
|
||||
os.environ["GIT_COMMITTER_NAME"] = "Example user"
|
||||
os.environ["GIT_COMMITTER_EMAIL"] = "user@example.com"
|
||||
32
e2e_tests/docker-compose.yml
Normal file
32
e2e_tests/docker-compose.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
pytest:
|
||||
build: ./docker
|
||||
volumes:
|
||||
- type: bind
|
||||
source: ./
|
||||
target: /tests
|
||||
read_only: true
|
||||
- type: tmpfs
|
||||
target: /tmp
|
||||
environment:
|
||||
TMPDIR: /tmp
|
||||
depends_on:
|
||||
- rest
|
||||
command:
|
||||
- "true"
|
||||
networks:
|
||||
main:
|
||||
|
||||
rest:
|
||||
build: ./docker-rest/
|
||||
expose:
|
||||
- "5000"
|
||||
networks:
|
||||
main:
|
||||
aliases:
|
||||
- alternate-rest
|
||||
|
||||
networks:
|
||||
main:
|
||||
19
e2e_tests/docker-rest/Dockerfile
Normal file
19
e2e_tests/docker-rest/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM docker.io/debian:11.3
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV FLASK_APP=app.py
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
dumb-init \
|
||||
python3-flask \
|
||||
python3-jinja2 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
COPY flask .
|
||||
|
||||
CMD ["/usr/bin/dumb-init", "--", "flask", "run", "--port", "5000", "--host", "0.0.0.0"]
|
||||
7
e2e_tests/docker-rest/flask/app.py
Normal file
7
e2e_tests/docker-rest/flask/app.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
app.url_map.strict_slashes = False
|
||||
|
||||
import github
|
||||
import gitlab
|
||||
103
e2e_tests/docker-rest/flask/github.py
Normal file
103
e2e_tests/docker-rest/flask/github.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import os.path
|
||||
|
||||
from app import app
|
||||
|
||||
from flask import Flask, request, abort, jsonify, make_response
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
def check_headers():
|
||||
if request.headers.get("accept") != "application/vnd.github.v3+json":
|
||||
app.logger.error("Invalid accept header")
|
||||
abort(500)
|
||||
auth_header = request.headers.get("authorization")
|
||||
if auth_header != "token secret-token:myauthtoken":
|
||||
app.logger.error("Invalid authorization header: %s", auth_header)
|
||||
abort(
|
||||
make_response(
|
||||
jsonify(
|
||||
{
|
||||
"message": "Bad credentials",
|
||||
"documentation_url": "https://docs.example.com/rest",
|
||||
}
|
||||
),
|
||||
401,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def add_pagination(response, page, last_page):
|
||||
host = request.headers["host"]
|
||||
link_header = ""
|
||||
|
||||
def args(page):
|
||||
args = request.args.copy()
|
||||
args["page"] = page
|
||||
return "&".join([f"{k}={v}" for k, v in args.items()])
|
||||
|
||||
if page < last_page:
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
|
||||
)
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
|
||||
)
|
||||
response.headers["link"] = link_header
|
||||
|
||||
|
||||
def read_project_files(namespaces=[]):
|
||||
last_page = 4
|
||||
page = username = int(request.args.get("page", "1"))
|
||||
response_file = f"./github_api_page_{page}.json.j2"
|
||||
if not os.path.exists(response_file):
|
||||
return jsonify([])
|
||||
|
||||
response = make_response(
|
||||
jinja2.Template(open(response_file).read()).render(
|
||||
namespace=namespaces[page - 1]
|
||||
)
|
||||
)
|
||||
add_pagination(response, page, last_page)
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
|
||||
|
||||
def single_namespaced_projects(namespace):
|
||||
return read_project_files([namespace] * 4)
|
||||
|
||||
|
||||
def mixed_projects(namespaces):
|
||||
return read_project_files(namespaces)
|
||||
|
||||
|
||||
@app.route("/github/users/<string:user>/repos/")
|
||||
def github_user_repos(user):
|
||||
check_headers()
|
||||
if user == "myuser1":
|
||||
return single_namespaced_projects("myuser1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/github/orgs/<string:group>/repos/")
|
||||
def github_group_repos(group):
|
||||
check_headers()
|
||||
if not (request.args.get("type") == "all"):
|
||||
abort(500, "wrong arguments")
|
||||
if group == "mygroup1":
|
||||
return single_namespaced_projects("mygroup1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/github/user/repos/")
|
||||
def github_own_repos():
|
||||
check_headers()
|
||||
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
|
||||
|
||||
|
||||
@app.route("/github/user/")
|
||||
def github_user():
|
||||
check_headers()
|
||||
response = make_response(open("./github_api_user.json").read())
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
228
e2e_tests/docker-rest/flask/github_api_page_1.json.j2
Normal file
228
e2e_tests/docker-rest/flask/github_api_page_1.json.j2
Normal file
@@ -0,0 +1,228 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject1",
|
||||
"full_name": "{{ namespace }}/myproject1",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject1",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject1",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject1/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject1/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject1/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject1/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject1/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject1/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject1/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject1/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject1/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject1/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject1/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject1/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject1/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject1/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject1/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject1/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject1/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject1/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject1/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject1/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject1/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject1/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject1.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject1.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject1.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject1",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject2",
|
||||
"full_name": "{{ namespace }}/myproject2",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject2",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject2",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject2/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject2/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject2/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject2/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject2/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject2/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject2/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject2/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject2/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject2/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject2/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject2/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject2/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject2/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject2/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject2/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject2/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject2/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject2/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject2/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject2/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject2/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject2.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject2.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject2.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject2",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
115
e2e_tests/docker-rest/flask/github_api_page_2.json.j2
Normal file
115
e2e_tests/docker-rest/flask/github_api_page_2.json.j2
Normal file
@@ -0,0 +1,115 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject3",
|
||||
"full_name": "{{ namespace }}/myproject3",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject3",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject3",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject3/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject3/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject3/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject3/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject3/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject3/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject3/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject3/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject3/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject3/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject3/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject3/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject3/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject3/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject3/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject3/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject3/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject3/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject3/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject3/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject3/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject3/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject3.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject3.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject3.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject3",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
115
e2e_tests/docker-rest/flask/github_api_page_3.json.j2
Normal file
115
e2e_tests/docker-rest/flask/github_api_page_3.json.j2
Normal file
@@ -0,0 +1,115 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject4",
|
||||
"full_name": "{{ namespace }}/myproject4",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject4",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject4",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject4/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject4/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject4/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject4/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject4/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject4/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject4/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject4/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject4/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject4/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject4/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject4/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject4/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject4/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject4/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject4/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject4/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject4/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject4/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject4/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject4/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject4/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject4.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject4.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject4.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject4",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
115
e2e_tests/docker-rest/flask/github_api_page_4.json.j2
Normal file
115
e2e_tests/docker-rest/flask/github_api_page_4.json.j2
Normal file
@@ -0,0 +1,115 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnk0OTIzNDY2Ng==",
|
||||
"name": "myproject5",
|
||||
"full_name": "{{ namespace }}/myproject5",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "someuser",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjM3NDg2OTY=",
|
||||
"avatar_url": "https://example.com/u/3748696?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/{{ namespace }}",
|
||||
"html_url": "https://example.com/{{ namespace }}",
|
||||
"followers_url": "https://api.example.com/users/{{ namespace }}/followers",
|
||||
"following_url": "https://api.example.com/users/{{ namespace }}/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/{{ namespace }}/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/{{ namespace }}/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/{{ namespace }}/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/{{ namespace }}/orgs",
|
||||
"repos_url": "https://api.example.com/users/{{ namespace }}/repos",
|
||||
"events_url": "https://api.example.com/users/{{ namespace }}/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/{{ namespace }}/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://example.com/{{ namespace }}/myproject5",
|
||||
"description": "Shell script for automatically building ACI containers from scratch using acbuild.",
|
||||
"fork": false,
|
||||
"url": "https://api.example.com/repos/{{ namespace }}/myproject5",
|
||||
"forks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/forks",
|
||||
"keys_url": "https://api.example.com/repos/{{ namespace }}/myproject5/keys{/key_id}",
|
||||
"collaborators_url": "https://api.example.com/repos/{{ namespace }}/myproject5/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.example.com/repos/{{ namespace }}/myproject5/teams",
|
||||
"hooks_url": "https://api.example.com/repos/{{ namespace }}/myproject5/hooks",
|
||||
"issue_events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/events{/number}",
|
||||
"events_url": "https://api.example.com/repos/{{ namespace }}/myproject5/events",
|
||||
"assignees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/assignees{/user}",
|
||||
"branches_url": "https://api.example.com/repos/{{ namespace }}/myproject5/branches{/branch}",
|
||||
"tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/tags",
|
||||
"blobs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/refs{/sha}",
|
||||
"trees_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/trees{/sha}",
|
||||
"statuses_url": "https://api.example.com/repos/{{ namespace }}/myproject5/statuses/{sha}",
|
||||
"languages_url": "https://api.example.com/repos/{{ namespace }}/myproject5/languages",
|
||||
"stargazers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/stargazers",
|
||||
"contributors_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contributors",
|
||||
"subscribers_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscribers",
|
||||
"subscription_url": "https://api.example.com/repos/{{ namespace }}/myproject5/subscription",
|
||||
"commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/commits{/sha}",
|
||||
"git_commits_url": "https://api.example.com/repos/{{ namespace }}/myproject5/git/commits{/sha}",
|
||||
"comments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/comments{/number}",
|
||||
"issue_comment_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues/comments{/number}",
|
||||
"contents_url": "https://api.example.com/repos/{{ namespace }}/myproject5/contents/{+path}",
|
||||
"compare_url": "https://api.example.com/repos/{{ namespace }}/myproject5/compare/{base}...{head}",
|
||||
"merges_url": "https://api.example.com/repos/{{ namespace }}/myproject5/merges",
|
||||
"archive_url": "https://api.example.com/repos/{{ namespace }}/myproject5/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.example.com/repos/{{ namespace }}/myproject5/downloads",
|
||||
"issues_url": "https://api.example.com/repos/{{ namespace }}/myproject5/issues{/number}",
|
||||
"pulls_url": "https://api.example.com/repos/{{ namespace }}/myproject5/pulls{/number}",
|
||||
"milestones_url": "https://api.example.com/repos/{{ namespace }}/myproject5/milestones{/number}",
|
||||
"notifications_url": "https://api.example.com/repos/{{ namespace }}/myproject5/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.example.com/repos/{{ namespace }}/myproject5/labels{/name}",
|
||||
"releases_url": "https://api.example.com/repos/{{ namespace }}/myproject5/releases{/id}",
|
||||
"deployments_url": "https://api.example.com/repos/{{ namespace }}/myproject5/deployments",
|
||||
"created_at": "2016-01-07T22:27:54Z",
|
||||
"updated_at": "2021-11-20T16:15:37Z",
|
||||
"pushed_at": "2021-11-20T16:15:34Z",
|
||||
"git_url": "git://example.com/{{ namespace }}/myproject5.git",
|
||||
"ssh_url": "ssh://git@example.com/{{ namespace }}/myproject5.git",
|
||||
"clone_url": "https://example.com/{{ namespace }}/myproject5.git",
|
||||
"svn_url": "https://example.com/{{ namespace }}/myproject5",
|
||||
"homepage": null,
|
||||
"size": 12,
|
||||
"stargazers_count": 0,
|
||||
"watchers_count": 0,
|
||||
"language": "Shell",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"forks_count": 0,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 0,
|
||||
"license": {
|
||||
"key": "apache-2.0",
|
||||
"name": "Apache License 2.0",
|
||||
"spdx_id": "Apache-2.0",
|
||||
"url": "https://api.example.com/licenses/apache-2.0",
|
||||
"node_id": "MDc6TGljZW5zZTI="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"topics": [
|
||||
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 0,
|
||||
"open_issues": 0,
|
||||
"watchers": 0,
|
||||
"default_branch": "master",
|
||||
"permissions": {
|
||||
"admin": true,
|
||||
"maintain": true,
|
||||
"push": true,
|
||||
"triage": true,
|
||||
"pull": true
|
||||
}
|
||||
}
|
||||
]
|
||||
46
e2e_tests/docker-rest/flask/github_api_user.json
Normal file
46
e2e_tests/docker-rest/flask/github_api_user.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"login": "myuser1",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjE=",
|
||||
"avatar_url": "https://example.com/images/error/octocat_happy.gif",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.example.com/users/octocat",
|
||||
"html_url": "https://example.com/octocat",
|
||||
"followers_url": "https://api.example.com/users/octocat/followers",
|
||||
"following_url": "https://api.example.com/users/octocat/following{/other_user}",
|
||||
"gists_url": "https://api.example.com/users/octocat/gists{/gist_id}",
|
||||
"starred_url": "https://api.example.com/users/octocat/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.example.com/users/octocat/subscriptions",
|
||||
"organizations_url": "https://api.example.com/users/octocat/orgs",
|
||||
"repos_url": "https://api.example.com/users/octocat/repos",
|
||||
"events_url": "https://api.example.com/users/octocat/events{/privacy}",
|
||||
"received_events_url": "https://api.example.com/users/octocat/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false,
|
||||
"name": "monalisa octocat",
|
||||
"company": "GitHub",
|
||||
"blog": "https://example.com/blog",
|
||||
"location": "San Francisco",
|
||||
"email": "octocat@example.com",
|
||||
"hireable": false,
|
||||
"bio": "There once was...",
|
||||
"twitter_username": "monatheoctocat",
|
||||
"public_repos": 2,
|
||||
"public_gists": 1,
|
||||
"followers": 20,
|
||||
"following": 0,
|
||||
"created_at": "2008-01-14T04:33:35Z",
|
||||
"updated_at": "2008-01-14T04:33:35Z",
|
||||
"private_gists": 81,
|
||||
"total_private_repos": 100,
|
||||
"owned_private_repos": 100,
|
||||
"disk_usage": 10000,
|
||||
"collaborators": 8,
|
||||
"two_factor_authentication": true,
|
||||
"plan": {
|
||||
"name": "Medium",
|
||||
"space": 400,
|
||||
"private_repos": 20,
|
||||
"collaborators": 0
|
||||
}
|
||||
}
|
||||
106
e2e_tests/docker-rest/flask/gitlab.py
Normal file
106
e2e_tests/docker-rest/flask/gitlab.py
Normal file
@@ -0,0 +1,106 @@
|
||||
import os.path
|
||||
|
||||
from app import app
|
||||
|
||||
from flask import Flask, request, abort, jsonify, make_response
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
def check_headers():
|
||||
if request.headers.get("accept") != "application/json":
|
||||
app.logger.error("Invalid accept header")
|
||||
abort(500)
|
||||
auth_header = request.headers.get("authorization")
|
||||
if auth_header != "bearer secret-token:myauthtoken":
|
||||
app.logger.error("Invalid authorization header: %s", auth_header)
|
||||
abort(
|
||||
make_response(
|
||||
jsonify(
|
||||
{
|
||||
"message": "Bad credentials",
|
||||
"documentation_url": "https://docs.example.com/rest",
|
||||
}
|
||||
),
|
||||
401,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def add_pagination(response, page, last_page):
|
||||
host = request.headers["host"]
|
||||
link_header = ""
|
||||
|
||||
def args(page):
|
||||
args = request.args.copy()
|
||||
args["page"] = page
|
||||
return "&".join([f"{k}={v}" for k, v in args.items()])
|
||||
|
||||
if page < last_page:
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(page+1)}>; rel="next", '
|
||||
)
|
||||
link_header += (
|
||||
f'<{request.scheme}://{host}{request.path}?{args(last_page)}>; rel="last"'
|
||||
)
|
||||
response.headers["link"] = link_header
|
||||
|
||||
|
||||
def read_project_files(namespaces=[]):
|
||||
last_page = 4
|
||||
page = username = int(request.args.get("page", "1"))
|
||||
response_file = f"./gitlab_api_page_{page}.json"
|
||||
if not os.path.exists(response_file):
|
||||
return jsonify([])
|
||||
|
||||
response = make_response(
|
||||
jinja2.Template(open(response_file).read()).render(
|
||||
namespace=namespaces[page - 1]
|
||||
)
|
||||
)
|
||||
add_pagination(response, page, last_page)
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
|
||||
|
||||
def single_namespaced_projects(namespace):
|
||||
return read_project_files([namespace] * 4)
|
||||
|
||||
|
||||
def mixed_projects(namespaces):
|
||||
return read_project_files(namespaces)
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/users/<string:user>/projects")
|
||||
def gitlab_user_repos(user):
|
||||
check_headers()
|
||||
if user == "myuser1":
|
||||
return single_namespaced_projects("myuser1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/groups/<string:group>/projects")
|
||||
def gitlab_group_repos(group):
|
||||
check_headers()
|
||||
if not (
|
||||
request.args.get("include_subgroups") == "true"
|
||||
and request.args.get("archived") == "false"
|
||||
):
|
||||
abort(500, "wrong arguments")
|
||||
if group == "mygroup1":
|
||||
return single_namespaced_projects("mygroup1")
|
||||
return jsonify([])
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/projects/")
|
||||
def gitlab_own_repos():
|
||||
check_headers()
|
||||
return mixed_projects(["myuser1", "myuser2", "mygroup1", "mygroup2"])
|
||||
|
||||
|
||||
@app.route("/gitlab/api/v4/user/")
|
||||
def gitlab_user():
|
||||
check_headers()
|
||||
response = make_response(open("./gitlab_api_user.json").read())
|
||||
response.headers["content-type"] = "application/json"
|
||||
return response
|
||||
236
e2e_tests/docker-rest/flask/gitlab_api_page_1.json
Normal file
236
e2e_tests/docker-rest/flask/gitlab_api_page_1.json
Normal file
@@ -0,0 +1,236 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"description": "",
|
||||
"name": "myproject1",
|
||||
"name_with_namespace": "{{ namespace }} / myproject1",
|
||||
"path": "myproject1",
|
||||
"path_with_namespace": "{{ namespace }}/myproject1",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject1.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject1.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject1",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject1",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "private",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject1-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"description": "",
|
||||
"name": "myproject2",
|
||||
"name_with_namespace": "{{ namespace }} / myproject2",
|
||||
"path": "myproject2",
|
||||
"path_with_namespace": "{{ namespace }}/myproject2",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject2.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject2.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject2",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject2",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject2-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
119
e2e_tests/docker-rest/flask/gitlab_api_page_2.json
Normal file
119
e2e_tests/docker-rest/flask/gitlab_api_page_2.json
Normal file
@@ -0,0 +1,119 @@
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"description": "",
|
||||
"name": "myproject3",
|
||||
"name_with_namespace": "{{ namespace }} / myproject3",
|
||||
"path": "myproject3",
|
||||
"path_with_namespace": "{{ namespace }}/myproject3",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject3.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject3.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject3",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject3",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject3-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
119
e2e_tests/docker-rest/flask/gitlab_api_page_3.json
Normal file
119
e2e_tests/docker-rest/flask/gitlab_api_page_3.json
Normal file
@@ -0,0 +1,119 @@
|
||||
[
|
||||
{
|
||||
"id": 4,
|
||||
"description": "",
|
||||
"name": "myproject4",
|
||||
"name_with_namespace": "{{ namespace }} / myproject4",
|
||||
"path": "myproject4",
|
||||
"path_with_namespace": "{{ namespace }}/myproject4",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject4.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject4.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject4",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject4",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject4-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
119
e2e_tests/docker-rest/flask/gitlab_api_page_4.json
Normal file
119
e2e_tests/docker-rest/flask/gitlab_api_page_4.json
Normal file
@@ -0,0 +1,119 @@
|
||||
[
|
||||
{
|
||||
"id": 5,
|
||||
"description": "",
|
||||
"name": "myproject5",
|
||||
"name_with_namespace": "{{ namespace }} / myproject5",
|
||||
"path": "myproject5",
|
||||
"path_with_namespace": "{{ namespace }}/myproject5",
|
||||
"created_at": "2020-11-26T17:23:39.904Z",
|
||||
"default_branch": "master",
|
||||
"tag_list": [],
|
||||
"topics": [],
|
||||
"ssh_url_to_repo": "ssh://git@example.com/{{ namespace }}/myproject5.git",
|
||||
"http_url_to_repo": "https://example.com/{{ namespace }}/myproject5.git",
|
||||
"web_url": "https://example.com/{{ namespace }}/myproject5",
|
||||
"readme_url": null,
|
||||
"avatar_url": null,
|
||||
"forks_count": 0,
|
||||
"star_count": 0,
|
||||
"last_activity_at": "2020-11-26T17:23:39.904Z",
|
||||
"namespace": {
|
||||
"id": 3,
|
||||
"name": "{{ namespace }}",
|
||||
"path": "{{ namespace }}",
|
||||
"kind": "group",
|
||||
"full_path": "{{ namespace }}",
|
||||
"parent_id": null,
|
||||
"avatar_url": "/uploads/-/system/group/avatar/5/x.png",
|
||||
"web_url": "https://example.com/groups/{{ namespace }}"
|
||||
},
|
||||
"container_registry_image_prefix": "registry.example.com/{{ namespace }}/myproject5",
|
||||
"_links": {
|
||||
"self": "https://example.com/api/v4/projects/2",
|
||||
"issues": "https://example.com/api/v4/projects/2/issues",
|
||||
"merge_requests": "https://example.com/api/v4/projects/2/merge_requests",
|
||||
"repo_branches": "https://example.com/api/v4/projects/2/repository/branches",
|
||||
"labels": "https://example.com/api/v4/projects/2/labels",
|
||||
"events": "https://example.com/api/v4/projects/2/events",
|
||||
"members": "https://example.com/api/v4/projects/2/members",
|
||||
"cluster_agents": "https://example.com/api/v4/projects/2/cluster_agents"
|
||||
},
|
||||
"packages_enabled": true,
|
||||
"empty_repo": false,
|
||||
"archived": false,
|
||||
"visibility": "public",
|
||||
"resolve_outdated_diff_discussions": false,
|
||||
"container_expiration_policy": {
|
||||
"cadence": "1d",
|
||||
"enabled": false,
|
||||
"keep_n": 10,
|
||||
"older_than": "90d",
|
||||
"name_regex": ".*",
|
||||
"name_regex_keep": null,
|
||||
"next_run_at": "2020-11-27T17:23:39.927Z"
|
||||
},
|
||||
"issues_enabled": true,
|
||||
"merge_requests_enabled": true,
|
||||
"wiki_enabled": true,
|
||||
"jobs_enabled": true,
|
||||
"snippets_enabled": true,
|
||||
"container_registry_enabled": true,
|
||||
"service_desk_enabled": true,
|
||||
"service_desk_address": "contact-for-myproject5-2-issue-@incoming.example.com",
|
||||
"can_create_merge_request_in": true,
|
||||
"issues_access_level": "enabled",
|
||||
"repository_access_level": "enabled",
|
||||
"merge_requests_access_level": "enabled",
|
||||
"forking_access_level": "enabled",
|
||||
"wiki_access_level": "enabled",
|
||||
"builds_access_level": "enabled",
|
||||
"snippets_access_level": "enabled",
|
||||
"pages_access_level": "private",
|
||||
"operations_access_level": "enabled",
|
||||
"analytics_access_level": "enabled",
|
||||
"container_registry_access_level": "enabled",
|
||||
"security_and_compliance_access_level": "private",
|
||||
"emails_disabled": null,
|
||||
"shared_runners_enabled": true,
|
||||
"lfs_enabled": true,
|
||||
"creator_id": 1803951,
|
||||
"import_url": null,
|
||||
"import_type": null,
|
||||
"import_status": "none",
|
||||
"open_issues_count": 0,
|
||||
"ci_default_git_depth": 50,
|
||||
"ci_forward_deployment_enabled": true,
|
||||
"ci_job_token_scope_enabled": false,
|
||||
"ci_separated_caches": true,
|
||||
"public_jobs": true,
|
||||
"build_timeout": 3600,
|
||||
"auto_cancel_pending_pipelines": "enabled",
|
||||
"build_coverage_regex": null,
|
||||
"ci_config_path": "",
|
||||
"shared_with_groups": [],
|
||||
"only_allow_merge_if_pipeline_succeeds": false,
|
||||
"allow_merge_on_skipped_pipeline": null,
|
||||
"restrict_user_defined_variables": false,
|
||||
"request_access_enabled": true,
|
||||
"only_allow_merge_if_all_discussions_are_resolved": false,
|
||||
"remove_source_branch_after_merge": true,
|
||||
"printing_merge_request_link_enabled": true,
|
||||
"merge_method": "merge",
|
||||
"squash_option": "default_off",
|
||||
"enforce_auth_checks_on_uploads": true,
|
||||
"suggestion_commit_message": null,
|
||||
"merge_commit_template": null,
|
||||
"squash_commit_template": null,
|
||||
"auto_devops_enabled": false,
|
||||
"auto_devops_deploy_strategy": "continuous",
|
||||
"autoclose_referenced_issues": true,
|
||||
"keep_latest_artifact": true,
|
||||
"runner_token_expiration_interval": null,
|
||||
"external_authorization_classification_label": "",
|
||||
"requirements_enabled": false,
|
||||
"requirements_access_level": "enabled",
|
||||
"security_and_compliance_enabled": true,
|
||||
"compliance_frameworks": []
|
||||
}
|
||||
]
|
||||
42
e2e_tests/docker-rest/flask/gitlab_api_user.json
Normal file
42
e2e_tests/docker-rest/flask/gitlab_api_user.json
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"id": 1,
|
||||
"username": "myuser1",
|
||||
"name": "My User",
|
||||
"state": "active",
|
||||
"avatar_url": "https://example.com/avatar",
|
||||
"web_url": "https://example.com/myuser1",
|
||||
"created_at": "2016-12-10T10:09:11.585Z",
|
||||
"bio": "",
|
||||
"location": "",
|
||||
"public_email": "",
|
||||
"skype": "",
|
||||
"linkedin": "",
|
||||
"twitter": "",
|
||||
"website_url": "",
|
||||
"organization": "",
|
||||
"job_title": "",
|
||||
"pronouns": "",
|
||||
"bot": false,
|
||||
"work_information": null,
|
||||
"followers": 0,
|
||||
"following": 0,
|
||||
"is_followed": false,
|
||||
"local_time": "11:59 PM",
|
||||
"last_sign_in_at": "2020-03-14T09:13:44.977Z",
|
||||
"confirmed_at": "2022-05-19T23:48:47.033Z",
|
||||
"last_activity_on": "2022-05-19",
|
||||
"email": "myuser1@example.com",
|
||||
"theme_id": null,
|
||||
"color_scheme_id": 1,
|
||||
"projects_limit": 100000,
|
||||
"current_sign_in_at": "2022-05-19T23:45:49.661Z",
|
||||
"identities": [],
|
||||
"can_create_group": true,
|
||||
"can_create_project": true,
|
||||
"two_factor_enabled": false,
|
||||
"external": false,
|
||||
"private_profile": false,
|
||||
"commit_email": "myuser1@example.com",
|
||||
"shared_runners_minutes_limit": 2000,
|
||||
"extra_shared_runners_minutes_limit": null
|
||||
}
|
||||
14
e2e_tests/docker/Dockerfile
Normal file
14
e2e_tests/docker/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM docker.io/debian:11.3
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
python3-pytest \
|
||||
python3-toml \
|
||||
python3-git \
|
||||
python3-yaml \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /tests
|
||||
|
||||
ENTRYPOINT ["/bin/sh", "-c", "--"]
|
||||
@@ -8,9 +8,7 @@ import hashlib
|
||||
|
||||
import git
|
||||
|
||||
binary = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "target/release/grm"
|
||||
)
|
||||
binary = os.environ["GRM_BINARY"]
|
||||
|
||||
|
||||
def grm(args, cwd=None, is_invalid=False):
|
||||
@@ -20,6 +18,8 @@ def grm(args, cwd=None, is_invalid=False):
|
||||
print(f"grmcmd: {args}")
|
||||
print(f"stdout:\n{cmd.stdout}")
|
||||
print(f"stderr:\n{cmd.stderr}")
|
||||
assert "secret-token:" not in cmd.stdout
|
||||
assert "secret-token:" not in cmd.stderr
|
||||
assert "panicked" not in cmd.stderr
|
||||
return cmd
|
||||
|
||||
@@ -53,8 +53,35 @@ def checksum_directory(path):
|
||||
raise f"{path} not found"
|
||||
|
||||
def get_stat_hash(path):
|
||||
stat = bytes(str(os.stat(path).__hash__()), "ascii")
|
||||
return stat
|
||||
checksum = hashlib.md5()
|
||||
|
||||
# A note about bytes(). You may think that it converts something to
|
||||
# bytes (akin to str()). But it actually creates a list of zero bytes
|
||||
# with the length specified by the parameter.
|
||||
#
|
||||
# This is kinda couterintuitive to me:
|
||||
#
|
||||
# str(5) => '5'
|
||||
# bytes(5) => b'\x00\x00\x00\x00\x00'
|
||||
def int_to_bytes(i):
|
||||
return i.to_bytes((i.bit_length() + 7) // 8, byteorder="big")
|
||||
|
||||
# lstat() instead of stat() so symlinks are not followed. So symlinks
|
||||
# are treated as-is and will also be checked for changes.
|
||||
stat = os.lstat(path)
|
||||
|
||||
# Note that the list of attributes does not include any timings except
|
||||
# mtime.
|
||||
for s in [
|
||||
stat.st_mode, # type & permission bits
|
||||
stat.st_ino, # inode
|
||||
stat.st_uid,
|
||||
stat.st_gid,
|
||||
# it's a float in seconds, so this gives us ~1us precision
|
||||
int(stat.st_mtime * 1e6),
|
||||
]:
|
||||
checksum.update(int_to_bytes(s))
|
||||
return checksum.digest()
|
||||
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
@@ -95,9 +122,9 @@ class TempGitRepository:
|
||||
f"""
|
||||
cd {self.tmpdir.name}
|
||||
git init
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
echo test > root-commit
|
||||
git add root-commit
|
||||
git commit -m "root-commit"
|
||||
git remote add origin file://{self.remote_1_dir.name}
|
||||
git remote add otherremote file://{self.remote_2_dir.name}
|
||||
"""
|
||||
@@ -134,20 +161,24 @@ class TempGitRepositoryWorktree:
|
||||
f"""
|
||||
cd {self.tmpdir.name}
|
||||
git init
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
echo test > test2
|
||||
git add test2
|
||||
git commit -m "commit2"
|
||||
echo test > root-commit-in-worktree-1
|
||||
git add root-commit-in-worktree-1
|
||||
git commit -m "root-commit-in-worktree-1"
|
||||
echo test > root-commit-in-worktree-2
|
||||
git add root-commit-in-worktree-2
|
||||
git commit -m "root-commit-in-worktree-2"
|
||||
git remote add origin file://{self.remote_1_dir.name}
|
||||
git remote add otherremote file://{self.remote_2_dir.name}
|
||||
git push origin HEAD:master
|
||||
git ls-files | xargs rm -rf
|
||||
mv .git .git-main-working-tree
|
||||
git --git-dir .git-main-working-tree config core.bare true
|
||||
"""
|
||||
)
|
||||
return self.tmpdir.name
|
||||
commit = git.Repo(
|
||||
f"{self.tmpdir.name}/.git-main-working-tree"
|
||||
).head.commit.hexsha
|
||||
return (self.tmpdir.name, commit)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
del self.tmpdir
|
||||
@@ -155,6 +186,37 @@ class TempGitRepositoryWorktree:
|
||||
del self.remote_2_dir
|
||||
|
||||
|
||||
class RepoTree:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.root = tempfile.TemporaryDirectory()
|
||||
self.config = tempfile.NamedTemporaryFile()
|
||||
with open(self.config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{self.root.name}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test_worktree"
|
||||
worktree_setup = true
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", self.config.name])
|
||||
assert cmd.returncode == 0
|
||||
return (self.root.name, self.config.name, ["test", "test_worktree"])
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
del self.root
|
||||
del self.config
|
||||
|
||||
|
||||
class EmptyDir:
|
||||
def __init__(self):
|
||||
pass
|
||||
@@ -197,12 +259,12 @@ class TempGitFileRemote:
|
||||
f"""
|
||||
cd {self.tmpdir.name}
|
||||
git init
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
echo test > test2
|
||||
git add test2
|
||||
git commit -m "commit2"
|
||||
echo test > root-commit-in-remote-1
|
||||
git add root-commit-in-remote-1
|
||||
git commit -m "root-commit-in-remote-1"
|
||||
echo test > root-commit-in-remote-2
|
||||
git add root-commit-in-remote-2
|
||||
git commit -m "root-commit-in-remote-2"
|
||||
git ls-files | xargs rm -rf
|
||||
mv .git/* .
|
||||
git config core.bare true
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
attrs==21.2.0
|
||||
gitdb==4.0.9
|
||||
GitPython==3.1.24
|
||||
iniconfig==1.1.1
|
||||
packaging==21.3
|
||||
pluggy==1.0.0
|
||||
py==1.11.0
|
||||
pyparsing==3.0.6
|
||||
pytest==6.2.5
|
||||
smmap==5.0.0
|
||||
toml==0.10.2
|
||||
typing-extensions==4.0.0
|
||||
@@ -3,13 +3,15 @@
|
||||
import tempfile
|
||||
|
||||
import toml
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
from helpers import *
|
||||
|
||||
|
||||
def test_repos_find_nonexistent():
|
||||
with NonExistentPath() as nonexistent_dir:
|
||||
cmd = grm(["repos", "find", nonexistent_dir])
|
||||
cmd = grm(["repos", "find", "local", nonexistent_dir])
|
||||
assert "does not exist" in cmd.stderr.lower()
|
||||
assert cmd.returncode != 0
|
||||
assert not os.path.exists(nonexistent_dir)
|
||||
@@ -17,19 +19,30 @@ def test_repos_find_nonexistent():
|
||||
|
||||
def test_repos_find_file():
|
||||
with tempfile.NamedTemporaryFile() as tmpfile:
|
||||
cmd = grm(["repos", "find", tmpfile.name])
|
||||
cmd = grm(["repos", "find", "local", tmpfile.name])
|
||||
assert "not a directory" in cmd.stderr.lower()
|
||||
assert cmd.returncode != 0
|
||||
|
||||
|
||||
def test_repos_find_empty():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cmd = grm(["repos", "find", tmpdir])
|
||||
cmd = grm(["repos", "find", "local", tmpdir])
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_repos_find_invalid_format():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cmd = grm(
|
||||
["repos", "find", "local", tmpdir, "--format", "invalidformat"],
|
||||
is_invalid=True,
|
||||
)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "isn't a valid value" in cmd.stderr
|
||||
|
||||
|
||||
def test_repos_find_non_git_repos():
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
shell(
|
||||
@@ -43,14 +56,16 @@ def test_repos_find_non_git_repos():
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "find", tmpdir])
|
||||
cmd = grm(["repos", "find", "local", tmpdir])
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_repos_find():
|
||||
@pytest.mark.parametrize("default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_find(configtype, default):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
shell(
|
||||
f"""
|
||||
@@ -69,7 +84,7 @@ def test_repos_find():
|
||||
(
|
||||
cd ./repo2
|
||||
git init
|
||||
git co -b main
|
||||
git checkout -b main
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
@@ -83,11 +98,19 @@ def test_repos_find():
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "find", tmpdir])
|
||||
args = ["repos", "find", "local", tmpdir]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
@@ -125,14 +148,24 @@ def test_repos_find():
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
|
||||
|
||||
def test_repos_find_in_root():
|
||||
@pytest.mark.parametrize("default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_find_in_root(configtype, default):
|
||||
with TempGitRepository() as repo_dir:
|
||||
|
||||
cmd = grm(["repos", "find", repo_dir])
|
||||
args = ["repos", "find", "local", repo_dir]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
@@ -158,3 +191,88 @@ def test_repos_find_in_root():
|
||||
someremote = [r for r in repo1["remotes"] if r["name"] == "otherremote"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert someremote["type"] == "file"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("default", [True, False])
|
||||
def test_repos_find_with_invalid_repo(configtype, default):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmpdir}
|
||||
mkdir repo1
|
||||
(
|
||||
cd ./repo1
|
||||
git init
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
git remote add origin https://example.com/repo2.git
|
||||
git remote add someremote ssh://example.com/repo2.git
|
||||
)
|
||||
mkdir repo2
|
||||
(
|
||||
cd ./repo2
|
||||
git init
|
||||
git checkout -b main
|
||||
echo test > test
|
||||
git add test
|
||||
git commit -m "commit1"
|
||||
git remote add origin https://example.com/repo2.git
|
||||
)
|
||||
mkdir broken_repo
|
||||
(
|
||||
cd broken_repo
|
||||
echo "broken" > .git
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
args = ["repos", "find", "local", tmpdir]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert "broken" in cmd.stderr
|
||||
|
||||
if default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
for tree in output["trees"]:
|
||||
assert set(tree.keys()) == {"root", "repos"}
|
||||
assert tree["root"] == tmpdir
|
||||
assert isinstance(tree["repos"], list)
|
||||
assert len(tree["repos"]) == 2
|
||||
|
||||
repo1 = [r for r in tree["repos"] if r["name"] == "repo1"][0]
|
||||
assert repo1["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo1["remotes"]) == 2
|
||||
|
||||
origin = [r for r in repo1["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "https"
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
|
||||
someremote = [r for r in repo1["remotes"] if r["name"] == "someremote"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert someremote["type"] == "ssh"
|
||||
assert someremote["url"] == "ssh://example.com/repo2.git"
|
||||
|
||||
repo2 = [r for r in tree["repos"] if r["name"] == "repo2"][0]
|
||||
assert repo2["worktree_setup"] is False
|
||||
assert isinstance(repo1["remotes"], list)
|
||||
assert len(repo2["remotes"]) == 1
|
||||
|
||||
origin = [r for r in repo2["remotes"] if r["name"] == "origin"][0]
|
||||
assert set(origin.keys()) == {"name", "type", "url"}
|
||||
assert origin["type"] == "https"
|
||||
assert origin["url"] == "https://example.com/repo2.git"
|
||||
|
||||
950
e2e_tests/test_repos_find_remote.py
Normal file
950
e2e_tests/test_repos_find_remote.py
Normal file
@@ -0,0 +1,950 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
import toml
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
from helpers import *
|
||||
|
||||
|
||||
ALTERNATE_DOMAIN = os.environ["ALTERNATE_DOMAIN"]
|
||||
PROVIDERS = ["github", "gitlab"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_invalid_provider(use_config):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
provider = "thisproviderdoesnotexist"
|
||||
token_command = "true"
|
||||
root = "/"
|
||||
"""
|
||||
)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
"thisproviderdoesnotexist",
|
||||
"--token-command",
|
||||
"true",
|
||||
"--root",
|
||||
"/",
|
||||
]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
if not use_config:
|
||||
assert re.match(".*isn't a valid value for.*provider", cmd.stderr)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
def test_repos_find_remote_invalid_format(provider):
|
||||
cmd = grm(
|
||||
[
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--format",
|
||||
"invalidformat",
|
||||
"--token-command",
|
||||
"true",
|
||||
"--root",
|
||||
"/myroot",
|
||||
],
|
||||
is_invalid=True,
|
||||
)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "isn't a valid value" in cmd.stderr
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
def test_repos_find_remote_token_command_failed(provider):
|
||||
cmd = grm(
|
||||
[
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--format",
|
||||
"yaml",
|
||||
"--token-command",
|
||||
"false",
|
||||
"--root",
|
||||
"/myroot",
|
||||
],
|
||||
is_invalid=True,
|
||||
)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "token command failed" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_wrong_token(provider, use_config):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo wrongtoken"
|
||||
root = "/myroot"
|
||||
[filters]
|
||||
access = true
|
||||
"""
|
||||
)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo wrongtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--access",
|
||||
]
|
||||
cmd = grm(args, is_invalid=True)
|
||||
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "bad credentials" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_no_filter(provider, configtype, default, use_config):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
]
|
||||
if not default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_user_empty(
|
||||
provider, configtype, configtype_default, use_config
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
|
||||
[filters]
|
||||
users = ["someotheruser"]
|
||||
"""
|
||||
|
||||
f.write(cfg)
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--user",
|
||||
"someotheruser",
|
||||
]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [True, False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("use_owner", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_user(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
use_owner,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += f"force_ssh = true\n"
|
||||
if use_owner:
|
||||
cfg += """
|
||||
[filters]
|
||||
owner = true\n
|
||||
"""
|
||||
else:
|
||||
cfg += """
|
||||
[filters]
|
||||
users = ["myuser1"]\n
|
||||
"""
|
||||
|
||||
print(cfg)
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
]
|
||||
if use_owner:
|
||||
args += ["--owner"]
|
||||
else:
|
||||
args += ["--user", "myuser1"]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
|
||||
assert set(output["trees"][0].keys()) == {"root", "repos"}
|
||||
assert isinstance(output["trees"][0]["repos"], list)
|
||||
assert len(output["trees"][0]["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
|
||||
0
|
||||
]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_group_empty(
|
||||
provider, configtype, configtype_default, use_alternate_endpoint, use_config
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
groups = ["someothergroup"]\n
|
||||
"""
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--group",
|
||||
"someothergroup",
|
||||
]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_group(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += f"force_ssh = true\n"
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
groups = ["mygroup1"]\n
|
||||
"""
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--group",
|
||||
"mygroup1",
|
||||
]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 1
|
||||
|
||||
assert set(output["trees"][0].keys()) == {"root", "repos"}
|
||||
assert isinstance(output["trees"][0]["repos"], list)
|
||||
assert len(output["trees"][0]["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in output["trees"][0]["repos"] if r["name"] == f"myproject{i}"][
|
||||
0
|
||||
]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
if force_ssh or i == 1:
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("use_owner", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_user_and_group(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
use_owner,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += f"force_ssh = true\n"
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
groups = ["mygroup1"]\n
|
||||
"""
|
||||
|
||||
if use_owner:
|
||||
cfg += "owner = true\n"
|
||||
else:
|
||||
cfg += 'users = ["myuser1"]\n'
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--group",
|
||||
"mygroup1",
|
||||
]
|
||||
if use_owner:
|
||||
args += ["--owner"]
|
||||
else:
|
||||
args += ["--user", "myuser1"]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 2
|
||||
|
||||
user_namespace = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
|
||||
|
||||
assert set(user_namespace.keys()) == {"root", "repos"}
|
||||
assert isinstance(user_namespace["repos"], list)
|
||||
assert len(user_namespace["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in user_namespace["repos"] if r["name"] == f"myproject{i}"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
group_namespace = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][0]
|
||||
|
||||
assert set(group_namespace.keys()) == {"root", "repos"}
|
||||
assert isinstance(group_namespace["repos"], list)
|
||||
assert len(group_namespace["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [r for r in group_namespace["repos"] if r["name"] == f"myproject{i}"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", PROVIDERS)
|
||||
@pytest.mark.parametrize("configtype_default", [False])
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("worktree_default", [True, False])
|
||||
@pytest.mark.parametrize("worktree", [True, False])
|
||||
@pytest.mark.parametrize("with_user_filter", [True, False])
|
||||
@pytest.mark.parametrize("with_group_filter", [True, False])
|
||||
@pytest.mark.parametrize("force_ssh", [True, False])
|
||||
@pytest.mark.parametrize("use_alternate_endpoint", [True, False])
|
||||
@pytest.mark.parametrize("use_config", [True, False])
|
||||
def test_repos_find_remote_owner(
|
||||
provider,
|
||||
configtype,
|
||||
configtype_default,
|
||||
worktree,
|
||||
worktree_default,
|
||||
with_user_filter,
|
||||
with_group_filter,
|
||||
force_ssh,
|
||||
use_alternate_endpoint,
|
||||
use_config,
|
||||
):
|
||||
if use_config:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
cfg = f"""
|
||||
provider = "{provider}"
|
||||
token_command = "echo secret-token:myauthtoken"
|
||||
root = "/myroot"
|
||||
"""
|
||||
|
||||
if not worktree_default:
|
||||
cfg += f"worktree = {str(worktree).lower()}\n"
|
||||
if force_ssh:
|
||||
cfg += f"force_ssh = true\n"
|
||||
if use_alternate_endpoint:
|
||||
cfg += f'api_url = "http://{ALTERNATE_DOMAIN}:5000/{provider}"\n'
|
||||
cfg += """
|
||||
[filters]
|
||||
access = true\n
|
||||
"""
|
||||
|
||||
if with_user_filter:
|
||||
cfg += 'users = ["myuser1"]\n'
|
||||
if with_group_filter:
|
||||
cfg += 'groups = ["mygroup1"]\n'
|
||||
|
||||
f.write(cfg)
|
||||
|
||||
args = ["repos", "find", "config", "--config", config.name]
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
|
||||
else:
|
||||
args = [
|
||||
"repos",
|
||||
"find",
|
||||
"remote",
|
||||
"--provider",
|
||||
provider,
|
||||
"--token-command",
|
||||
"echo secret-token:myauthtoken",
|
||||
"--root",
|
||||
"/myroot",
|
||||
"--access",
|
||||
]
|
||||
if not worktree_default:
|
||||
args += ["--worktree", str(worktree).lower()]
|
||||
if with_user_filter:
|
||||
args += ["--user", "myuser1"]
|
||||
if with_group_filter:
|
||||
args += ["--group", "mygroup1"]
|
||||
if force_ssh:
|
||||
args += ["--force-ssh"]
|
||||
if use_alternate_endpoint:
|
||||
args += ["--api-url", f"http://{ALTERNATE_DOMAIN}:5000/{provider}"]
|
||||
|
||||
if not configtype_default:
|
||||
args += ["--format", configtype]
|
||||
cmd = grm(args)
|
||||
if use_alternate_endpoint and provider == "github":
|
||||
assert cmd.returncode != 0
|
||||
assert "overriding is not supported for github" in cmd.stderr.lower()
|
||||
return
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stderr) == 0
|
||||
|
||||
if configtype_default or configtype == "toml":
|
||||
output = toml.loads(cmd.stdout)
|
||||
elif configtype == "yaml":
|
||||
output = yaml.safe_load(cmd.stdout)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
assert isinstance(output, dict)
|
||||
assert set(output.keys()) == {"trees"}
|
||||
assert isinstance(output["trees"], list)
|
||||
assert len(output["trees"]) == 4
|
||||
|
||||
user_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/myuser1"][0]
|
||||
|
||||
assert set(user_namespace_1.keys()) == {"root", "repos"}
|
||||
assert isinstance(user_namespace_1["repos"], list)
|
||||
|
||||
if with_user_filter:
|
||||
assert len(user_namespace_1["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [
|
||||
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
|
||||
][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
else:
|
||||
assert len(user_namespace_1["repos"]) == 2
|
||||
|
||||
for i in range(1, 3):
|
||||
repo = [
|
||||
r for r in user_namespace_1["repos"] if r["name"] == f"myproject{i}"
|
||||
][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/myuser1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
user_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/myuser2"][0]
|
||||
|
||||
assert set(user_namespace_2.keys()) == {"root", "repos"}
|
||||
assert isinstance(user_namespace_2["repos"], list)
|
||||
assert len(user_namespace_2["repos"]) == 1
|
||||
|
||||
repo = user_namespace_2["repos"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh:
|
||||
assert (
|
||||
repo["remotes"][0]["url"] == f"ssh://git@example.com/myuser2/myproject3.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"] == f"https://example.com/myuser2/myproject3.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
group_namespace_1 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup1"][
|
||||
0
|
||||
]
|
||||
|
||||
assert set(group_namespace_1.keys()) == {"root", "repos"}
|
||||
assert isinstance(group_namespace_1["repos"], list)
|
||||
|
||||
if with_group_filter:
|
||||
assert len(group_namespace_1["repos"]) == 5
|
||||
|
||||
for i in range(1, 6):
|
||||
repo = [
|
||||
r for r in group_namespace_1["repos"] if r["name"] == f"myproject{i}"
|
||||
][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh or i == 1:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject{i}.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
else:
|
||||
assert len(group_namespace_1["repos"]) == 1
|
||||
|
||||
repo = group_namespace_1["repos"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup1/myproject4.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"https://example.com/mygroup1/myproject4.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
|
||||
group_namespace_2 = [t for t in output["trees"] if t["root"] == "/myroot/mygroup2"][
|
||||
0
|
||||
]
|
||||
|
||||
assert set(group_namespace_2.keys()) == {"root", "repos"}
|
||||
assert isinstance(group_namespace_2["repos"], list)
|
||||
assert len(group_namespace_2["repos"]) == 1
|
||||
|
||||
repo = group_namespace_2["repos"][0]
|
||||
assert repo["worktree_setup"] is (not worktree_default and worktree)
|
||||
assert isinstance(repo["remotes"], list)
|
||||
assert len(repo["remotes"]) == 1
|
||||
assert repo["remotes"][0]["name"] == provider
|
||||
if force_ssh:
|
||||
assert (
|
||||
repo["remotes"][0]["url"]
|
||||
== f"ssh://git@example.com/mygroup2/myproject5.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "ssh"
|
||||
else:
|
||||
assert (
|
||||
repo["remotes"][0]["url"] == f"https://example.com/mygroup2/myproject5.git"
|
||||
)
|
||||
assert repo["remotes"][0]["type"] == "https"
|
||||
13
e2e_tests/test_repos_status.py
Normal file
13
e2e_tests/test_repos_status.py
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import tempfile
|
||||
|
||||
from helpers import *
|
||||
|
||||
|
||||
def test_repos_sync_worktree_clone():
|
||||
with RepoTree() as (root, config, repos):
|
||||
cmd = grm(["repos", "status", "--config", config])
|
||||
assert cmd.returncode == 0
|
||||
for repo in repos:
|
||||
assert repo in cmd.stdout
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import tempfile
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
import toml
|
||||
@@ -9,8 +10,204 @@ import git
|
||||
|
||||
from helpers import *
|
||||
|
||||
templates = {
|
||||
"repo_simple": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
def test_repos_sync_config_is_valid_symlink():
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
""",
|
||||
"yaml": """
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: "test"
|
||||
""",
|
||||
},
|
||||
"repo_with_remote": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "{remotename}"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: test
|
||||
remotes:
|
||||
- name: "{remotename}"
|
||||
url: "file://{remote}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"repo_with_two_remotes": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: "test"
|
||||
remotes:
|
||||
- name: "origin"
|
||||
url: "file://{remote1}"
|
||||
type: "file"
|
||||
- name: "origin2"
|
||||
url: "file://{remote2}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"worktree_repo_simple": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: test
|
||||
worktree_setup: true
|
||||
"""
|
||||
),
|
||||
},
|
||||
"worktree_repo_with_remote": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: test
|
||||
worktree_setup: true
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"repo_in_subdirectory": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "outer/inner"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: outer/inner
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
"nested_trees": {
|
||||
"toml": """
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "outer"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees]]
|
||||
root = "{root}/subdir"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "inner"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
""",
|
||||
"yaml": textwrap.dedent(
|
||||
"""
|
||||
trees:
|
||||
- root: "{root}"
|
||||
repos:
|
||||
- name: outer
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote1}"
|
||||
type: "file"
|
||||
- root: "{root}/subdir"
|
||||
repos:
|
||||
- name: inner
|
||||
remotes:
|
||||
- name: origin
|
||||
url: "file://{remote2}"
|
||||
type: "file"
|
||||
"""
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_config_is_valid_symlink(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote, head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
@@ -20,21 +217,14 @@ def test_repos_sync_config_is_valid_symlink():
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config_symlink])
|
||||
subprocess.run(["cat", config.name])
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -54,7 +244,7 @@ def test_repos_sync_config_is_invalid_symlink():
|
||||
config_symlink = os.path.join(config_dir, "cfglink")
|
||||
os.symlink(nonexistent_dir, config_symlink)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config_symlink])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config_symlink])
|
||||
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
@@ -65,7 +255,7 @@ def test_repos_sync_config_is_invalid_symlink():
|
||||
|
||||
def test_repos_sync_config_is_directory():
|
||||
with tempfile.TemporaryDirectory() as config:
|
||||
cmd = grm(["repos", "sync", "--config", config])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config])
|
||||
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
@@ -77,30 +267,22 @@ def test_repos_sync_config_is_unreadable():
|
||||
config_path = os.path.join(config_dir, "cfg")
|
||||
open(config_path, "w")
|
||||
os.chmod(config_path, 0o0000)
|
||||
cmd = grm(["repos", "sync", "--config", config_path])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config_path])
|
||||
|
||||
assert os.path.exists(config_path)
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "permission denied" in cmd.stderr.lower()
|
||||
|
||||
|
||||
def test_repos_sync_unmanaged_repos():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_unmanaged_repos(configtype):
|
||||
with tempfile.TemporaryDirectory() as root:
|
||||
with TempGitRepository(dir=root) as unmanaged_repo:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{root}"
|
||||
f.write(templates["repo_simple"][configtype].format(root=root))
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(root, "test")
|
||||
@@ -112,53 +294,33 @@ def test_repos_sync_unmanaged_repos():
|
||||
assert any([re.match(regex, l) for l in cmd.stderr.lower().split("\n")])
|
||||
|
||||
|
||||
def test_repos_sync_root_is_file():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_root_is_file(configtype):
|
||||
with tempfile.NamedTemporaryFile() as target:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target.name}"
|
||||
f.write(templates["repo_simple"][configtype].format(root=target.name))
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert "not a directory" in cmd.stderr.lower()
|
||||
|
||||
|
||||
def test_repos_sync_normal_clone():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_clone(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -183,21 +345,92 @@ def test_repos_sync_normal_clone():
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
def test_repos_sync_normal_init():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_repo_in_subdirectory(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote, remote_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
"""
|
||||
templates["repo_in_subdirectory"][configtype].format(
|
||||
root=target, remote=remote
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "outer", "inner")
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == remote_head_commit_sha
|
||||
|
||||
assert len(repo.remotes) == 1
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote}"
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert not "found unmanaged repository" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_nested_clone(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
templates["nested_trees"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
def validate(git_dir, sha, remote):
|
||||
assert os.path.exists(git_dir)
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
assert str(repo.active_branch) == "master"
|
||||
assert str(repo.head.commit) == sha
|
||||
|
||||
assert len(repo.remotes) == 1
|
||||
urls = list(repo.remote("origin").urls)
|
||||
assert len(urls) == 1
|
||||
assert urls[0] == f"file://{remote}"
|
||||
|
||||
validate(
|
||||
os.path.join(target, "outer"), remote1_head_commit_sha, remote1
|
||||
)
|
||||
validate(
|
||||
os.path.join(target, "subdir", "inner"),
|
||||
remote2_head_commit_sha,
|
||||
remote2,
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
print(cmd.stdout)
|
||||
print(cmd.stderr)
|
||||
assert not "found unmanaged repository" in cmd.stderr.lower()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_init(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(templates["repo_simple"][configtype].format(root=target))
|
||||
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -210,28 +443,20 @@ def test_repos_sync_normal_init():
|
||||
assert not repo.head.is_valid()
|
||||
|
||||
|
||||
def test_repos_sync_normal_add_remote():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_add_remote(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -246,26 +471,12 @@ def test_repos_sync_normal_add_remote():
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert set([str(r) for r in repo.remotes]) == {
|
||||
@@ -282,33 +493,20 @@ def test_repos_sync_normal_add_remote():
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
def test_repos_sync_normal_remove_remote():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_remove_remote(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -326,21 +524,12 @@ def test_repos_sync_normal_remove_remote():
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote2, remotename="origin2"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
shell(f"cd {git_dir} && git remote -v")
|
||||
with git.Repo(git_dir) as repo:
|
||||
@@ -369,28 +558,20 @@ def test_repos_sync_normal_remove_remote():
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
def test_repos_sync_normal_change_remote_url():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_change_remote_url(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -405,21 +586,12 @@ def test_repos_sync_normal_change_remote_url():
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote2, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
with git.Repo(git_dir) as repo:
|
||||
assert set([str(r) for r in repo.remotes]) == {"origin"}
|
||||
@@ -429,28 +601,20 @@ def test_repos_sync_normal_change_remote_url():
|
||||
assert urls[0] == f"file://{remote2}"
|
||||
|
||||
|
||||
def test_repos_sync_normal_change_remote_name():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_change_remote_name(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
@@ -465,21 +629,12 @@ def test_repos_sync_normal_change_remote_name():
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin2"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
with git.Repo(git_dir) as repo:
|
||||
# See the note in `test_repos_sync_normal_remove_remote()`
|
||||
@@ -492,34 +647,41 @@ def test_repos_sync_normal_change_remote_name():
|
||||
assert urls[0] == f"file://{remote1}"
|
||||
|
||||
|
||||
def test_repos_sync_worktree_clone():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
@pytest.mark.parametrize("init_worktree", [True, False, "default"])
|
||||
def test_repos_sync_worktree_clone(configtype, init_worktree):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote, head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["worktree_repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
args = ["repos", "sync", "config", "--config", config.name]
|
||||
if init_worktree is True:
|
||||
args.append("--init-worktree=true")
|
||||
if init_worktree is False:
|
||||
args.append("--init-worktree=false")
|
||||
|
||||
for i in [1, 2]:
|
||||
cmd = grm(args)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
worktree_dir = f"{target}/test"
|
||||
assert os.path.exists(worktree_dir)
|
||||
|
||||
assert set(os.listdir(worktree_dir)) == {".git-main-working-tree"}
|
||||
if init_worktree is True or init_worktree == "default":
|
||||
assert set(os.listdir(worktree_dir)) == {
|
||||
".git-main-working-tree",
|
||||
"master",
|
||||
}
|
||||
else:
|
||||
assert set(os.listdir(worktree_dir)) == {
|
||||
".git-main-working-tree"
|
||||
}
|
||||
|
||||
with git.Repo(
|
||||
os.path.join(worktree_dir, ".git-main-working-tree")
|
||||
@@ -530,22 +692,16 @@ def test_repos_sync_worktree_clone():
|
||||
assert str(repo.head.commit) == head_commit_sha
|
||||
|
||||
|
||||
def test_repos_sync_worktree_init():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_worktree_init(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
"""
|
||||
templates["worktree_repo_simple"][configtype].format(root=target)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
worktree_dir = f"{target}/test"
|
||||
@@ -559,149 +715,112 @@ def test_repos_sync_worktree_init():
|
||||
assert not repo.head.is_valid()
|
||||
|
||||
|
||||
def test_repos_sync_invalid_toml():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_invalid_syntax(configtype):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
if configtype == "toml":
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = invalid as there are no quotes ;)
|
||||
"""
|
||||
)
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
elif configtype == "yaml":
|
||||
f.write(
|
||||
f"""
|
||||
trees:
|
||||
wrong:
|
||||
indentation:
|
||||
"""
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
|
||||
|
||||
def test_repos_sync_unchanged():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_unchanged(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin2"
|
||||
url = "file://{remote2}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_two_remotes"][configtype].format(
|
||||
root=target, remote1=remote1, remote2=remote2
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
before = checksum_directory(target)
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
after = checksum_directory(target)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_repos_sync_normal_change_to_worktree():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_normal_change_to_worktree(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["worktree_repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
assert "already exists" in cmd.stderr
|
||||
assert "not using a worktree setup" in cmd.stderr
|
||||
|
||||
|
||||
def test_repos_sync_worktree_change_to_normal():
|
||||
@pytest.mark.parametrize("configtype", ["toml", "yaml"])
|
||||
def test_repos_sync_worktree_change_to_normal(configtype):
|
||||
with tempfile.TemporaryDirectory() as target:
|
||||
with TempGitFileRemote() as (remote1, remote1_head_commit_sha):
|
||||
with TempGitFileRemote() as (remote2, remote2_head_commit_sha):
|
||||
with tempfile.NamedTemporaryFile() as config:
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
worktree_setup = true
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["worktree_repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode == 0
|
||||
|
||||
git_dir = os.path.join(target, "test")
|
||||
|
||||
with open(config.name, "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[[trees]]
|
||||
root = "{target}"
|
||||
|
||||
[[trees.repos]]
|
||||
name = "test"
|
||||
|
||||
[[trees.repos.remotes]]
|
||||
name = "origin"
|
||||
url = "file://{remote1}"
|
||||
type = "file"
|
||||
"""
|
||||
templates["repo_with_remote"][configtype].format(
|
||||
root=target, remote=remote1, remotename="origin"
|
||||
)
|
||||
)
|
||||
|
||||
cmd = grm(["repos", "sync", "--config", config.name])
|
||||
cmd = grm(["repos", "sync", "config", "--config", config.name])
|
||||
assert cmd.returncode != 0
|
||||
assert "already exists" in cmd.stderr
|
||||
assert "using a worktree setup" in cmd.stderr
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pytest
|
||||
|
||||
from helpers import *
|
||||
|
||||
|
||||
def test_worktree_clean():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
@@ -15,9 +17,7 @@ def test_worktree_clean():
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_no_tracking_branch():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -31,9 +31,7 @@ def test_worktree_clean_refusal_no_tracking_branch():
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_uncommited_changes_new_file():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -49,9 +47,7 @@ def test_worktree_clean_refusal_uncommited_changes_new_file():
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_uncommited_changes_changed_file():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -67,9 +63,7 @@ def test_worktree_clean_refusal_uncommited_changes_changed_file():
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_uncommited_changes_cleand_file():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -87,9 +81,7 @@ def test_worktree_clean_refusal_uncommited_changes_cleand_file():
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_commited_changes():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -107,9 +99,7 @@ def test_worktree_clean_refusal_commited_changes():
|
||||
|
||||
|
||||
def test_worktree_clean_refusal_tracking_branch_mismatch():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -127,7 +117,7 @@ def test_worktree_clean_refusal_tracking_branch_mismatch():
|
||||
|
||||
|
||||
def test_worktree_clean_fail_from_subdir():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -151,3 +141,56 @@ def test_worktree_clean_non_git():
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("configure_default_branch", [True, False])
|
||||
@pytest.mark.parametrize("branch_list_empty", [True, False])
|
||||
def test_worktree_clean_configured_default_branch(
|
||||
configure_default_branch, branch_list_empty
|
||||
):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
if configure_default_branch:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
if branch_list_empty:
|
||||
f.write(
|
||||
f"""
|
||||
persistent_branches = []
|
||||
"""
|
||||
)
|
||||
else:
|
||||
f.write(
|
||||
f"""
|
||||
persistent_branches = [
|
||||
"mybranch"
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
(
|
||||
cd ./test
|
||||
touch change
|
||||
git add change
|
||||
git commit -m commit
|
||||
)
|
||||
|
||||
git --git-dir ./.git-main-working-tree worktree add mybranch
|
||||
(
|
||||
cd ./mybranch
|
||||
git merge --no-ff test
|
||||
)
|
||||
git --git-dir ./.git-main-working-tree worktree remove mybranch
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
if configure_default_branch and not branch_list_empty:
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
else:
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
143
e2e_tests/test_worktree_config_presistent_branch.py
Normal file
143
e2e_tests/test_worktree_config_presistent_branch.py
Normal file
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os.path
|
||||
|
||||
from helpers import *
|
||||
|
||||
|
||||
def test_worktree_never_clean_persistent_branches():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"mybranch",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "mybranch", "--track", "origin/master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
before = checksum_directory(f"{base_dir}/mybranch")
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "mybranch" in os.listdir(base_dir)
|
||||
repo = git.Repo(os.path.join(base_dir, "mybranch"))
|
||||
assert str(repo.active_branch) == "mybranch"
|
||||
|
||||
after = checksum_directory(f"{base_dir}/mybranch")
|
||||
assert before == after
|
||||
|
||||
|
||||
def test_worktree_clean_branch_merged_into_persistent():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"master",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/test
|
||||
touch change1
|
||||
git add change1
|
||||
git commit -m "commit1"
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
git merge --no-ff test
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_no_clean_unmerged_branch():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"master",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/test
|
||||
touch change1
|
||||
git add change1
|
||||
git commit -m "commit1"
|
||||
git push origin test
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
cmd = grm(["wt", "clean"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
|
||||
def test_worktree_delete_branch_merged_into_persistent():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
persistent_branches = [
|
||||
"master",
|
||||
]
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/test
|
||||
touch change1
|
||||
git add change1
|
||||
git commit -m "commit1"
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "add", "master"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
git merge --no-ff test
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "delete", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert "test" not in os.listdir(base_dir)
|
||||
@@ -23,7 +23,7 @@ def test_convert():
|
||||
|
||||
|
||||
def test_convert_already_worktree():
|
||||
with TempGitRepositoryWorktree() as git_dir:
|
||||
with TempGitRepositoryWorktree() as (git_dir, _commit):
|
||||
before = checksum_directory(git_dir)
|
||||
|
||||
cmd = grm(["wt", "convert"], cwd=git_dir)
|
||||
|
||||
176
e2e_tests/test_worktree_fetch.py
Normal file
176
e2e_tests/test_worktree_fetch.py
Normal file
@@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from helpers import *
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
import git
|
||||
|
||||
|
||||
def test_worktree_fetch():
|
||||
with TempGitRepositoryWorktree() as (base_dir, root_commit):
|
||||
with TempGitFileRemote() as (remote_path, _remote_sha):
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir .git-main-working-tree remote add upstream file://{remote_path}
|
||||
git --git-dir .git-main-working-tree push --force upstream master:master
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "fetch"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha
|
||||
assert repo.commit("master").hexsha == repo.commit("upstream/master").hexsha
|
||||
|
||||
with EmptyDir() as tmp:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmp}
|
||||
git clone {remote_path} tmp
|
||||
cd tmp
|
||||
echo change > mychange-remote
|
||||
git add mychange-remote
|
||||
git commit -m "change-remote"
|
||||
git push origin HEAD:master
|
||||
"""
|
||||
)
|
||||
remote_commit = git.Repo(f"{tmp}/tmp").commit("master").hexsha
|
||||
|
||||
assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha
|
||||
assert repo.commit("master").hexsha == repo.commit("upstream/master").hexsha
|
||||
|
||||
cmd = grm(["wt", "fetch"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
assert repo.commit("master").hexsha == repo.commit("origin/master").hexsha
|
||||
assert repo.commit("master").hexsha == root_commit
|
||||
assert repo.commit("upstream/master").hexsha == remote_commit
|
||||
|
||||
|
||||
@pytest.mark.parametrize("rebase", [True, False])
|
||||
@pytest.mark.parametrize("ffable", [True, False])
|
||||
@pytest.mark.parametrize("has_changes", [True, False])
|
||||
@pytest.mark.parametrize("stash", [True, False])
|
||||
def test_worktree_pull(rebase, ffable, has_changes, stash):
|
||||
with TempGitRepositoryWorktree() as (base_dir, root_commit):
|
||||
with TempGitFileRemote() as (remote_path, _remote_sha):
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir .git-main-working-tree remote add upstream file://{remote_path}
|
||||
git --git-dir .git-main-working-tree push --force upstream master:master
|
||||
"""
|
||||
)
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
assert repo.commit("origin/master").hexsha == repo.commit("master").hexsha
|
||||
assert repo.commit("upstream/master").hexsha == repo.commit("master").hexsha
|
||||
|
||||
with EmptyDir() as tmp:
|
||||
shell(
|
||||
f"""
|
||||
cd {tmp}
|
||||
git clone {remote_path} tmp
|
||||
cd tmp
|
||||
git checkout origin/master
|
||||
echo change > mychange-remote
|
||||
git add mychange-remote
|
||||
git commit -m "change-remote"
|
||||
git push origin HEAD:master
|
||||
"""
|
||||
)
|
||||
remote_commit = git.Repo(f"{tmp}/tmp").commit("HEAD").hexsha
|
||||
|
||||
grm(["wt", "add", "master", "--track", "upstream/master"], cwd=base_dir)
|
||||
|
||||
repo = git.Repo(f"{base_dir}/master")
|
||||
if not ffable:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
echo change > mychange
|
||||
git add mychange
|
||||
git commit -m "local-commit-in-master"
|
||||
"""
|
||||
)
|
||||
|
||||
if has_changes:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/master
|
||||
echo change >> root-commit-in-worktree-1
|
||||
echo uncommitedchange > uncommitedchange
|
||||
"""
|
||||
)
|
||||
|
||||
args = ["wt", "pull"]
|
||||
if rebase:
|
||||
args += ["--rebase"]
|
||||
if stash:
|
||||
args += ["--stash"]
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
if has_changes and not stash:
|
||||
assert cmd.returncode != 0
|
||||
assert re.match(r".*master.*contains changes.*", cmd.stderr)
|
||||
else:
|
||||
assert repo.commit("upstream/master").hexsha == remote_commit
|
||||
assert repo.commit("origin/master").hexsha == root_commit
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
if has_changes:
|
||||
assert ["uncommitedchange"] == repo.untracked_files
|
||||
assert repo.is_dirty()
|
||||
else:
|
||||
assert not repo.is_dirty()
|
||||
|
||||
if not rebase:
|
||||
if ffable:
|
||||
assert cmd.returncode == 0
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
== repo.commit("upstream/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("upstream/master").hexsha == remote_commit
|
||||
)
|
||||
else:
|
||||
assert cmd.returncode != 0
|
||||
assert "cannot be fast forwarded" in cmd.stderr
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
assert repo.commit("master").hexsha != remote_commit
|
||||
assert (
|
||||
repo.commit("upstream/master").hexsha == remote_commit
|
||||
)
|
||||
else:
|
||||
assert cmd.returncode == 0
|
||||
if ffable:
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
!= repo.commit("origin/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("master").hexsha
|
||||
== repo.commit("upstream/master").hexsha
|
||||
)
|
||||
assert (
|
||||
repo.commit("upstream/master").hexsha == remote_commit
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
repo.commit("master").message.strip()
|
||||
== "local-commit-in-master"
|
||||
)
|
||||
assert repo.commit("master~1").hexsha == remote_commit
|
||||
270
e2e_tests/test_worktree_rebase.py
Normal file
270
e2e_tests/test_worktree_rebase.py
Normal file
@@ -0,0 +1,270 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from helpers import *
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
import git
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pull", [True, False])
|
||||
@pytest.mark.parametrize("rebase", [True, False])
|
||||
@pytest.mark.parametrize("ffable", [True, False])
|
||||
@pytest.mark.parametrize("has_changes", [True, False])
|
||||
@pytest.mark.parametrize("stash", [True, False])
|
||||
def test_worktree_rebase(pull, rebase, ffable, has_changes, stash):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _root_commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write('persistent_branches = ["mybasebranch"]')
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
|
||||
grm(
|
||||
["wt", "add", "mybasebranch", "--track", "origin/mybasebranch"],
|
||||
cwd=base_dir,
|
||||
)
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/mybasebranch
|
||||
echo change > mychange-root
|
||||
git add mychange-root
|
||||
git commit -m "commit-root"
|
||||
echo change > mychange-base-local
|
||||
git add mychange-base-local
|
||||
git commit -m "commit-in-base-local"
|
||||
git push origin mybasebranch
|
||||
"""
|
||||
)
|
||||
|
||||
grm(
|
||||
["wt", "add", "myfeatbranch", "--track", "origin/myfeatbranch"],
|
||||
cwd=base_dir,
|
||||
)
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/myfeatbranch
|
||||
git reset --hard mybasebranch^ # root
|
||||
echo change > mychange-feat-local
|
||||
git add mychange-feat-local
|
||||
git commit -m "commit-in-feat-local"
|
||||
git push origin HEAD:myfeatbranch
|
||||
"""
|
||||
)
|
||||
|
||||
grm(["wt", "add", "tmp"], cwd=base_dir)
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/tmp
|
||||
git reset --hard mybasebranch
|
||||
echo change > mychange-base-remote
|
||||
git add mychange-base-remote
|
||||
git commit -m "commit-in-base-remote"
|
||||
git push origin HEAD:mybasebranch
|
||||
|
||||
git reset --hard myfeatbranch
|
||||
echo change > mychange-feat-remote
|
||||
git add mychange-feat-remote
|
||||
git commit -m "commit-in-feat-remote"
|
||||
git push origin HEAD:myfeatbranch
|
||||
"""
|
||||
)
|
||||
|
||||
if not ffable:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/mybasebranch
|
||||
echo change > mychange-base-no-ff
|
||||
git add mychange-base-no-ff
|
||||
git commit -m "commit-in-base-local-no-ff"
|
||||
|
||||
cd {base_dir}/myfeatbranch
|
||||
echo change > mychange-feat-no-ff
|
||||
git add mychange-feat-no-ff
|
||||
git commit -m "commit-in-feat-local-no-ff"
|
||||
"""
|
||||
)
|
||||
|
||||
if has_changes:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}/myfeatbranch
|
||||
echo uncommitedchange > uncommitedchange
|
||||
"""
|
||||
)
|
||||
|
||||
grm(["wt", "delete", "--force", "tmp"], cwd=base_dir)
|
||||
|
||||
repo = git.Repo(f"{base_dir}/.git-main-working-tree")
|
||||
if ffable:
|
||||
assert repo.commit("mybasebranch~1").message.strip() == "commit-root"
|
||||
assert (
|
||||
repo.refs.mybasebranch.commit.message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert (
|
||||
repo.remote("origin").refs.mybasebranch.commit.message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.refs.myfeatbranch.commit.message.strip() == "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.remote("origin").refs.myfeatbranch.commit.message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
repo.commit("mybasebranch").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("mybasebranch~1").message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("mybasebranch~2").message.strip() == "commit-root"
|
||||
assert (
|
||||
repo.commit("myfeatbranch").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("myfeatbranch~1").message.strip() == "commit-in-feat-local"
|
||||
)
|
||||
assert repo.commit("myfeatbranch~2").message.strip() == "commit-root"
|
||||
assert (
|
||||
repo.remote("origin").refs.mybasebranch.commit.message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.remote("origin").refs.myfeatbranch.commit.message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
|
||||
args = ["wt", "rebase"]
|
||||
if pull:
|
||||
args += ["--pull"]
|
||||
if rebase:
|
||||
args += ["--rebase"]
|
||||
if stash:
|
||||
args += ["--stash"]
|
||||
cmd = grm(args, cwd=base_dir)
|
||||
|
||||
if rebase and not pull:
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stderr) != 0
|
||||
elif has_changes and not stash:
|
||||
assert cmd.returncode != 0
|
||||
assert re.match(r".*myfeatbranch.*contains changes.*", cmd.stderr)
|
||||
else:
|
||||
repo = git.Repo(f"{base_dir}/myfeatbranch")
|
||||
if has_changes:
|
||||
assert ["uncommitedchange"] == repo.untracked_files
|
||||
if pull:
|
||||
if rebase:
|
||||
assert cmd.returncode == 0
|
||||
if ffable:
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
else:
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~4").message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~5").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~6").message.strip() == "commit-root"
|
||||
else:
|
||||
if ffable:
|
||||
assert cmd.returncode == 0
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-remote"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
else:
|
||||
assert cmd.returncode != 0
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip()
|
||||
== "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip()
|
||||
== "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
else:
|
||||
assert cmd.returncode == 0
|
||||
if ffable:
|
||||
assert repo.commit("HEAD").message.strip() == "commit-in-feat-local"
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~2").message.strip() == "commit-root"
|
||||
else:
|
||||
assert (
|
||||
repo.commit("HEAD").message.strip()
|
||||
== "commit-in-feat-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~1").message.strip() == "commit-in-feat-local"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~2").message.strip()
|
||||
== "commit-in-base-local-no-ff"
|
||||
)
|
||||
assert (
|
||||
repo.commit("HEAD~3").message.strip() == "commit-in-base-local"
|
||||
)
|
||||
assert repo.commit("HEAD~4").message.strip() == "commit-root"
|
||||
@@ -1,10 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
|
||||
from helpers import *
|
||||
|
||||
import pytest
|
||||
|
||||
def test_worktree_status():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
|
||||
@pytest.mark.parametrize("has_config", [True, False])
|
||||
def test_worktree_status(has_config):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
if has_config:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write("")
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -16,7 +24,7 @@ def test_worktree_status():
|
||||
|
||||
|
||||
def test_worktree_status_fail_from_subdir():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -40,3 +48,30 @@ def test_worktree_status_non_git():
|
||||
assert cmd.returncode != 0
|
||||
assert len(cmd.stdout) == 0
|
||||
assert len(cmd.stderr) != 0
|
||||
|
||||
|
||||
def test_worktree_status_warn_with_non_worktree_dir():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
mkdir not_a_worktree
|
||||
"""
|
||||
)
|
||||
|
||||
cmd = grm(["wt", "status"], cwd=base_dir)
|
||||
|
||||
assert cmd.returncode == 0
|
||||
assert len(cmd.stdout) != 0
|
||||
assert len(cmd.stderr) != 0
|
||||
assert (
|
||||
re.match(
|
||||
".*error.*not_a_worktree.*not a valid worktree directory",
|
||||
cmd.stderr,
|
||||
re.IGNORECASE,
|
||||
)
|
||||
is not None
|
||||
)
|
||||
|
||||
@@ -3,35 +3,157 @@
|
||||
from helpers import *
|
||||
|
||||
import git
|
||||
import pytest
|
||||
|
||||
import os.path
|
||||
|
||||
|
||||
def test_worktree_add_simple():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
|
||||
@pytest.mark.parametrize("has_config", [True, False])
|
||||
@pytest.mark.parametrize("has_default", [True, False])
|
||||
@pytest.mark.parametrize("has_prefix", [True, False])
|
||||
def test_worktree_add_simple(
|
||||
remote_branch_already_exists, has_config, has_default, has_prefix
|
||||
):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
if has_config:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[track]
|
||||
default = {str(has_default).lower()}
|
||||
default_remote = "origin"
|
||||
"""
|
||||
)
|
||||
if has_prefix:
|
||||
f.write(
|
||||
"""
|
||||
default_remote_prefix = "myprefix"
|
||||
"""
|
||||
)
|
||||
|
||||
if remote_branch_already_exists:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir ./.git-main-working-tree worktree add tmp
|
||||
(
|
||||
cd tmp
|
||||
touch change
|
||||
git add change
|
||||
git commit -m commit
|
||||
git push origin HEAD:test
|
||||
#git reset --hard 'HEAD@{1}'
|
||||
git branch -va
|
||||
)
|
||||
git --git-dir ./.git-main-working-tree worktree remove tmp
|
||||
"""
|
||||
)
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(base_dir)
|
||||
if has_config is True:
|
||||
assert len(files) == 3
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
|
||||
else:
|
||||
assert len(files) == 2
|
||||
assert set(files) == {".git-main-working-tree", "test"}
|
||||
|
||||
repo = git.Repo(os.path.join(base_dir, "test"))
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert str(repo.active_branch) == "test"
|
||||
if has_config and has_default:
|
||||
if has_prefix and not remote_branch_already_exists:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch()) == "origin/myprefix/test"
|
||||
)
|
||||
else:
|
||||
assert str(repo.active_branch.tracking_branch()) == "origin/test"
|
||||
else:
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
|
||||
|
||||
def test_worktree_add_with_tracking():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
def test_worktree_add_into_subdirectory():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "dir/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(base_dir)
|
||||
assert len(files) == 2
|
||||
assert set(files) == {".git-main-working-tree", "dir"}
|
||||
|
||||
files = os.listdir(os.path.join(base_dir, "dir"))
|
||||
assert set(files) == {"test"}
|
||||
|
||||
repo = git.Repo(os.path.join(base_dir, "dir", "test"))
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
|
||||
|
||||
def test_worktree_add_into_invalid_subdirectory():
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "/dir/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 1
|
||||
assert "dir" not in os.listdir(base_dir)
|
||||
assert "dir" not in os.listdir("/")
|
||||
|
||||
cmd = grm(["wt", "add", "dir/"], cwd=base_dir)
|
||||
assert cmd.returncode == 1
|
||||
assert "dir" not in os.listdir(base_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
|
||||
@pytest.mark.parametrize("has_config", [True, False])
|
||||
@pytest.mark.parametrize("has_default", [True, False])
|
||||
@pytest.mark.parametrize("has_prefix", [True, False])
|
||||
def test_worktree_add_with_tracking(
|
||||
remote_branch_already_exists, has_config, has_default, has_prefix
|
||||
):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
if has_config:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[track]
|
||||
default = {str(has_default).lower()}
|
||||
default_remote = "origin"
|
||||
"""
|
||||
)
|
||||
if has_prefix:
|
||||
f.write(
|
||||
"""
|
||||
default_remote_prefix = "myprefix"
|
||||
"""
|
||||
)
|
||||
|
||||
if remote_branch_already_exists:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir ./.git-main-working-tree worktree add tmp
|
||||
(
|
||||
cd tmp
|
||||
touch change
|
||||
git add change
|
||||
git commit -m commit
|
||||
git push origin HEAD:test
|
||||
#git reset --hard 'HEAD@{1}'
|
||||
git branch -va
|
||||
)
|
||||
git --git-dir ./.git-main-working-tree worktree remove tmp
|
||||
"""
|
||||
)
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
print(cmd.stderr)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(base_dir)
|
||||
if has_config is True:
|
||||
assert len(files) == 3
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
|
||||
else:
|
||||
assert len(files) == 2
|
||||
assert set(files) == {".git-main-working-tree", "test"}
|
||||
|
||||
@@ -42,8 +164,117 @@ def test_worktree_add_with_tracking():
|
||||
assert str(repo.active_branch.tracking_branch()) == "origin/test"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("has_config", [True, False])
|
||||
@pytest.mark.parametrize("has_default", [True, False])
|
||||
@pytest.mark.parametrize("has_prefix", [True, False])
|
||||
@pytest.mark.parametrize("track", [True, False])
|
||||
def test_worktree_add_with_explicit_no_tracking(
|
||||
has_config, has_default, has_prefix, track
|
||||
):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
if has_config:
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[track]
|
||||
default = {str(has_default).lower()}
|
||||
default_remote = "origin"
|
||||
"""
|
||||
)
|
||||
if has_prefix:
|
||||
f.write(
|
||||
"""
|
||||
default_remote_prefix = "myprefix"
|
||||
"""
|
||||
)
|
||||
if track is True:
|
||||
cmd = grm(
|
||||
["wt", "add", "test", "--track", "origin/test", "--no-track"],
|
||||
cwd=base_dir,
|
||||
)
|
||||
else:
|
||||
cmd = grm(["wt", "add", "test", "--no-track"], cwd=base_dir)
|
||||
print(cmd.stderr)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(base_dir)
|
||||
if has_config is True:
|
||||
assert len(files) == 3
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
|
||||
else:
|
||||
assert len(files) == 2
|
||||
assert set(files) == {".git-main-working-tree", "test"}
|
||||
|
||||
repo = git.Repo(os.path.join(base_dir, "test"))
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert str(repo.active_branch) == "test"
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("remote_branch_already_exists", [True, False])
|
||||
@pytest.mark.parametrize("has_default", [True, False])
|
||||
@pytest.mark.parametrize("has_prefix", [True, False])
|
||||
def test_worktree_add_with_config(
|
||||
remote_branch_already_exists, has_default, has_prefix
|
||||
):
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
with open(os.path.join(base_dir, "grm.toml"), "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
[track]
|
||||
default = {str(has_default).lower()}
|
||||
default_remote = "origin"
|
||||
"""
|
||||
)
|
||||
if has_prefix:
|
||||
f.write(
|
||||
"""
|
||||
default_remote_prefix = "myprefix"
|
||||
"""
|
||||
)
|
||||
if remote_branch_already_exists:
|
||||
shell(
|
||||
f"""
|
||||
cd {base_dir}
|
||||
git --git-dir ./.git-main-working-tree worktree add tmp
|
||||
(
|
||||
cd tmp
|
||||
touch change
|
||||
git add change
|
||||
git commit -m commit
|
||||
git push origin HEAD:test
|
||||
#git reset --hard 'HEAD@{1}'
|
||||
git branch -va
|
||||
)
|
||||
git --git-dir ./.git-main-working-tree worktree remove tmp
|
||||
"""
|
||||
)
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
print(cmd.stderr)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
files = os.listdir(base_dir)
|
||||
assert len(files) == 3
|
||||
assert set(files) == {".git-main-working-tree", "grm.toml", "test"}
|
||||
|
||||
repo = git.Repo(os.path.join(base_dir, "test"))
|
||||
assert not repo.bare
|
||||
assert not repo.is_dirty()
|
||||
assert str(repo.active_branch) == "test"
|
||||
if has_default:
|
||||
if has_prefix and not remote_branch_already_exists:
|
||||
assert (
|
||||
str(repo.active_branch.tracking_branch()) == "origin/myprefix/test"
|
||||
)
|
||||
else:
|
||||
assert str(repo.active_branch.tracking_branch()) == "origin/test"
|
||||
else:
|
||||
assert repo.active_branch.tracking_branch() is None
|
||||
|
||||
|
||||
def test_worktree_delete():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
@@ -60,9 +291,7 @@ def test_worktree_delete():
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_no_tracking_branch():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -78,9 +307,7 @@ def test_worktree_delete_refusal_no_tracking_branch():
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_uncommited_changes_new_file():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -98,9 +325,7 @@ def test_worktree_delete_refusal_uncommited_changes_new_file():
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_uncommited_changes_changed_file():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -118,9 +343,7 @@ def test_worktree_delete_refusal_uncommited_changes_changed_file():
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_uncommited_changes_deleted_file():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -140,9 +363,7 @@ def test_worktree_delete_refusal_uncommited_changes_deleted_file():
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_commited_changes():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -162,9 +383,7 @@ def test_worktree_delete_refusal_commited_changes():
|
||||
|
||||
|
||||
def test_worktree_delete_refusal_tracking_branch_mismatch():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -184,9 +403,7 @@ def test_worktree_delete_refusal_tracking_branch_mismatch():
|
||||
|
||||
|
||||
def test_worktree_delete_force_refusal():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
before = checksum_directory(base_dir)
|
||||
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
|
||||
@@ -196,7 +413,7 @@ def test_worktree_delete_force_refusal():
|
||||
|
||||
|
||||
def test_worktree_add_delete_add():
|
||||
with TempGitRepositoryWorktree() as base_dir:
|
||||
with TempGitRepositoryWorktree() as (base_dir, _commit):
|
||||
cmd = grm(["wt", "add", "test", "--track", "origin/test"], cwd=base_dir)
|
||||
assert cmd.returncode == 0
|
||||
assert "test" in os.listdir(base_dir)
|
||||
|
||||
16
example.config.yaml
Normal file
16
example.config.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
trees:
|
||||
- root: "~/example-projects/"
|
||||
repos:
|
||||
- name: "git-repo-manager"
|
||||
remotes:
|
||||
- name: "origin"
|
||||
url: "https://code.hkoerber.de/hannes/git-repo-manager.git"
|
||||
type: "https"
|
||||
- name: "github"
|
||||
url: "https://github.com/hakoerber/git-repo-manager.git"
|
||||
type: "https"
|
||||
- name: "dotfiles"
|
||||
remotes:
|
||||
- name: "origin"
|
||||
url: "https://github.com/hakoerber/dotfiles.git"
|
||||
type: "https"
|
||||
36
src/auth.rs
Normal file
36
src/auth.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use std::process;
|
||||
|
||||
pub fn get_token_from_command(command: &str) -> Result<String, String> {
|
||||
let output = process::Command::new("/usr/bin/env")
|
||||
.arg("sh")
|
||||
.arg("-c")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map_err(|error| format!("Failed to run token-command: {}", error))?;
|
||||
|
||||
let stderr = String::from_utf8(output.stderr).map_err(|error| error.to_string())?;
|
||||
let stdout = String::from_utf8(output.stdout).map_err(|error| error.to_string())?;
|
||||
|
||||
if !output.status.success() {
|
||||
if !stderr.is_empty() {
|
||||
return Err(format!("Token command failed: {}", stderr));
|
||||
} else {
|
||||
return Err(String::from("Token command failed."));
|
||||
}
|
||||
}
|
||||
|
||||
if !stderr.is_empty() {
|
||||
return Err(format!("Token command produced stderr: {}", stderr));
|
||||
}
|
||||
|
||||
if stdout.is_empty() {
|
||||
return Err(String::from("Token command did not produce output"));
|
||||
}
|
||||
|
||||
let token = stdout
|
||||
.split('\n')
|
||||
.next()
|
||||
.ok_or_else(|| String::from("Output did not contain any newline"))?;
|
||||
|
||||
Ok(token.to_string())
|
||||
}
|
||||
129
src/cmd.rs
129
src/cmd.rs
@@ -1,129 +0,0 @@
|
||||
use clap::{AppSettings, Parser};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(
|
||||
name = clap::crate_name!(),
|
||||
version = clap::crate_version!(),
|
||||
author = clap::crate_authors!("\n"),
|
||||
about = clap::crate_description!(),
|
||||
long_version = clap::crate_version!(),
|
||||
license = clap::crate_license!(),
|
||||
setting = AppSettings::DeriveDisplayOrder,
|
||||
setting = AppSettings::PropagateVersion,
|
||||
setting = AppSettings::HelpRequired,
|
||||
)]
|
||||
pub struct Opts {
|
||||
#[clap(subcommand)]
|
||||
pub subcmd: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum SubCommand {
|
||||
#[clap(about = "Manage repositories")]
|
||||
Repos(Repos),
|
||||
#[clap(visible_alias = "wt", about = "Manage worktrees")]
|
||||
Worktree(Worktree),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Repos {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: ReposAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum ReposAction {
|
||||
#[clap(
|
||||
visible_alias = "run",
|
||||
about = "Synchronize the repositories to the configured values"
|
||||
)]
|
||||
Sync(Sync),
|
||||
#[clap(about = "Generate a repository configuration from an existing file tree")]
|
||||
Find(Find),
|
||||
#[clap(about = "Show status of configured repositories")]
|
||||
Status(OptionalConfig),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct Sync {
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "./config.toml",
|
||||
about = "Path to the configuration file"
|
||||
)]
|
||||
pub config: String,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct OptionalConfig {
|
||||
#[clap(short, long, about = "Path to the configuration file")]
|
||||
pub config: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Find {
|
||||
#[clap(about = "The path to search through")]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Worktree {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: WorktreeAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum WorktreeAction {
|
||||
#[clap(about = "Add a new worktree")]
|
||||
Add(WorktreeAddArgs),
|
||||
#[clap(about = "Add an existing worktree")]
|
||||
Delete(WorktreeDeleteArgs),
|
||||
#[clap(about = "Show state of existing worktrees")]
|
||||
Status(WorktreeStatusArgs),
|
||||
#[clap(about = "Convert a normal repository to a worktree setup")]
|
||||
Convert(WorktreeConvertArgs),
|
||||
#[clap(about = "Clean all worktrees that do not contain uncommited/unpushed changes")]
|
||||
Clean(WorktreeCleanArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeAddArgs {
|
||||
#[clap(about = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(
|
||||
short = 'n',
|
||||
long = "branch-namespace",
|
||||
about = "Namespace of the branch"
|
||||
)]
|
||||
pub branch_namespace: Option<String>,
|
||||
#[clap(short = 't', long = "track", about = "Remote branch to track")]
|
||||
pub track: Option<String>,
|
||||
}
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeDeleteArgs {
|
||||
#[clap(about = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(
|
||||
long = "force",
|
||||
about = "Force deletion, even when there are uncommitted/unpushed changes"
|
||||
)]
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeStatusArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeConvertArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeCleanArgs {}
|
||||
|
||||
pub fn parse() -> Opts {
|
||||
Opts::parse()
|
||||
}
|
||||
298
src/config.rs
298
src/config.rs
@@ -1,21 +1,300 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process;
|
||||
|
||||
use super::repo::Repo;
|
||||
use std::path::Path;
|
||||
|
||||
use super::auth;
|
||||
use super::output::*;
|
||||
use super::path;
|
||||
use super::provider;
|
||||
use super::provider::Filter;
|
||||
use super::provider::Provider;
|
||||
use super::repo;
|
||||
use super::tree;
|
||||
|
||||
pub type RemoteProvider = provider::RemoteProvider;
|
||||
pub type RemoteType = repo::RemoteType;
|
||||
|
||||
fn worktree_setup_default() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Config {
|
||||
pub trees: Vec<Tree>,
|
||||
#[serde(untagged)]
|
||||
pub enum Config {
|
||||
ConfigTrees(ConfigTrees),
|
||||
ConfigProvider(ConfigProvider),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Tree {
|
||||
pub struct ConfigTrees {
|
||||
pub trees: Vec<ConfigTree>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ConfigProviderFilter {
|
||||
pub access: Option<bool>,
|
||||
pub owner: Option<bool>,
|
||||
pub users: Option<Vec<String>>,
|
||||
pub groups: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ConfigProvider {
|
||||
pub provider: RemoteProvider,
|
||||
pub token_command: String,
|
||||
pub root: String,
|
||||
pub repos: Option<Vec<Repo>>,
|
||||
pub filters: Option<ConfigProviderFilter>,
|
||||
|
||||
pub force_ssh: Option<bool>,
|
||||
|
||||
pub api_url: Option<String>,
|
||||
|
||||
pub worktree: Option<bool>,
|
||||
pub init_worktree: Option<bool>,
|
||||
}
|
||||
|
||||
pub fn read_config(path: &str) -> Result<Config, String> {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct RemoteConfig {
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
#[serde(rename = "type")]
|
||||
pub remote_type: RemoteType,
|
||||
}
|
||||
|
||||
impl RemoteConfig {
|
||||
pub fn from_remote(remote: repo::Remote) -> Self {
|
||||
Self {
|
||||
name: remote.name,
|
||||
url: remote.url,
|
||||
remote_type: remote.remote_type,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_remote(self) -> repo::Remote {
|
||||
repo::Remote {
|
||||
name: self.name,
|
||||
url: self.url,
|
||||
remote_type: self.remote_type,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct RepoConfig {
|
||||
pub name: String,
|
||||
|
||||
#[serde(default = "worktree_setup_default")]
|
||||
pub worktree_setup: bool,
|
||||
|
||||
pub remotes: Option<Vec<RemoteConfig>>,
|
||||
}
|
||||
|
||||
impl RepoConfig {
|
||||
pub fn from_repo(repo: repo::Repo) -> Self {
|
||||
Self {
|
||||
name: repo.name,
|
||||
worktree_setup: repo.worktree_setup,
|
||||
remotes: repo
|
||||
.remotes
|
||||
.map(|remotes| remotes.into_iter().map(RemoteConfig::from_remote).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_repo(self) -> repo::Repo {
|
||||
let (namespace, name) = if let Some((namespace, name)) = self.name.rsplit_once('/') {
|
||||
(Some(namespace.to_string()), name.to_string())
|
||||
} else {
|
||||
(None, self.name)
|
||||
};
|
||||
|
||||
repo::Repo {
|
||||
name,
|
||||
namespace,
|
||||
worktree_setup: self.worktree_setup,
|
||||
remotes: self.remotes.map(|remotes| {
|
||||
remotes
|
||||
.into_iter()
|
||||
.map(|remote| remote.into_remote())
|
||||
.collect()
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConfigTrees {
|
||||
pub fn to_config(self) -> Config {
|
||||
Config::ConfigTrees(self)
|
||||
}
|
||||
|
||||
pub fn from_vec(vec: Vec<ConfigTree>) -> Self {
|
||||
ConfigTrees { trees: vec }
|
||||
}
|
||||
|
||||
pub fn from_trees(vec: Vec<tree::Tree>) -> Self {
|
||||
ConfigTrees {
|
||||
trees: vec.into_iter().map(ConfigTree::from_tree).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trees(self) -> Vec<ConfigTree> {
|
||||
self.trees
|
||||
}
|
||||
|
||||
pub fn trees_mut(&mut self) -> &mut Vec<ConfigTree> {
|
||||
&mut self.trees
|
||||
}
|
||||
|
||||
pub fn trees_ref(&self) -> &Vec<ConfigTree> {
|
||||
self.trees.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn trees(self) -> Result<Vec<ConfigTree>, String> {
|
||||
match self {
|
||||
Config::ConfigTrees(config) => Ok(config.trees),
|
||||
Config::ConfigProvider(config) => {
|
||||
let token = match auth::get_token_from_command(&config.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filters = config.filters.unwrap_or(ConfigProviderFilter {
|
||||
access: Some(false),
|
||||
owner: Some(false),
|
||||
users: Some(vec![]),
|
||||
groups: Some(vec![]),
|
||||
});
|
||||
|
||||
let filter = Filter::new(
|
||||
filters.users.unwrap_or_default(),
|
||||
filters.groups.unwrap_or_default(),
|
||||
filters.owner.unwrap_or(false),
|
||||
filters.access.unwrap_or(false),
|
||||
);
|
||||
|
||||
let repos = match config.provider {
|
||||
RemoteProvider::Github => {
|
||||
match provider::Github::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
)?
|
||||
}
|
||||
RemoteProvider::Gitlab => {
|
||||
match provider::Gitlab::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
)?
|
||||
}
|
||||
};
|
||||
|
||||
let mut trees = vec![];
|
||||
|
||||
for (namespace, namespace_repos) in repos {
|
||||
let repos = namespace_repos
|
||||
.into_iter()
|
||||
.map(RepoConfig::from_repo)
|
||||
.collect();
|
||||
let tree = ConfigTree {
|
||||
root: if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&config.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&config.root))
|
||||
},
|
||||
repos: Some(repos),
|
||||
};
|
||||
trees.push(tree);
|
||||
}
|
||||
Ok(trees)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_trees(trees: Vec<ConfigTree>) -> Self {
|
||||
Config::ConfigTrees(ConfigTrees { trees })
|
||||
}
|
||||
|
||||
pub fn normalize(&mut self) {
|
||||
if let Config::ConfigTrees(config) = self {
|
||||
let home = path::env_home().display().to_string();
|
||||
for tree in &mut config.trees_mut().iter_mut() {
|
||||
if tree.root.starts_with(&home) {
|
||||
// The tilde is not handled differently, it's just a normal path component for `Path`.
|
||||
// Therefore we can treat it like that during **output**.
|
||||
//
|
||||
// The `unwrap()` is safe here as we are testing via `starts_with()`
|
||||
// beforehand
|
||||
let mut path = tree.root.strip_prefix(&home).unwrap();
|
||||
if path.starts_with('/') {
|
||||
path = path.strip_prefix('/').unwrap();
|
||||
}
|
||||
|
||||
tree.root = Path::new("~").join(path).display().to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_toml(&self) -> Result<String, String> {
|
||||
match toml::to_string(self) {
|
||||
Ok(toml) => Ok(toml),
|
||||
Err(error) => Err(error.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_yaml(&self) -> Result<String, String> {
|
||||
serde_yaml::to_string(self).map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct ConfigTree {
|
||||
pub root: String,
|
||||
pub repos: Option<Vec<RepoConfig>>,
|
||||
}
|
||||
|
||||
impl ConfigTree {
|
||||
pub fn from_repos(root: String, repos: Vec<repo::Repo>) -> Self {
|
||||
Self {
|
||||
root,
|
||||
repos: Some(repos.into_iter().map(RepoConfig::from_repo).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_tree(tree: tree::Tree) -> Self {
|
||||
Self {
|
||||
root: tree.root,
|
||||
repos: Some(tree.repos.into_iter().map(RepoConfig::from_repo).collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_config<'a, T>(path: &str) -> Result<T, String>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
let content = match std::fs::read_to_string(&path) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
@@ -30,7 +309,9 @@ pub fn read_config(path: &str) -> Result<Config, String> {
|
||||
}
|
||||
};
|
||||
|
||||
let config: Config = match toml::from_str(&content) {
|
||||
let config: T = match toml::from_str(&content) {
|
||||
Ok(c) => c,
|
||||
Err(_) => match serde_yaml::from_str(&content) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
@@ -38,6 +319,7 @@ pub fn read_config(path: &str) -> Result<Config, String> {
|
||||
path, e
|
||||
))
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Ok(config)
|
||||
|
||||
346
src/grm/cmd.rs
Normal file
346
src/grm/cmd.rs
Normal file
@@ -0,0 +1,346 @@
|
||||
use clap::{AppSettings, Parser};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(
|
||||
name = clap::crate_name!(),
|
||||
version = clap::crate_version!(),
|
||||
author = clap::crate_authors!("\n"),
|
||||
about = clap::crate_description!(),
|
||||
long_version = clap::crate_version!(),
|
||||
global_setting(AppSettings::DeriveDisplayOrder),
|
||||
propagate_version = true,
|
||||
)]
|
||||
pub struct Opts {
|
||||
#[clap(subcommand)]
|
||||
pub subcmd: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum SubCommand {
|
||||
#[clap(about = "Manage repositories")]
|
||||
Repos(Repos),
|
||||
#[clap(visible_alias = "wt", about = "Manage worktrees")]
|
||||
Worktree(Worktree),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Repos {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: ReposAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum ReposAction {
|
||||
#[clap(subcommand)]
|
||||
Sync(SyncAction),
|
||||
#[clap(subcommand)]
|
||||
Find(FindAction),
|
||||
#[clap(about = "Show status of configured repositories")]
|
||||
Status(OptionalConfig),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(about = "Sync local repositories with a configured list")]
|
||||
pub enum SyncAction {
|
||||
#[clap(about = "Synchronize the repositories to the configured values")]
|
||||
Config(Config),
|
||||
#[clap(about = "Synchronize the repositories from a remote provider")]
|
||||
Remote(SyncRemoteArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(about = "Generate a repository configuration from existing repositories")]
|
||||
pub enum FindAction {
|
||||
#[clap(about = "Find local repositories")]
|
||||
Local(FindLocalArgs),
|
||||
#[clap(about = "Find repositories on remote provider")]
|
||||
Remote(FindRemoteArgs),
|
||||
#[clap(about = "Find repositories as defined in the configuration file")]
|
||||
Config(FindConfigArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct FindLocalArgs {
|
||||
#[clap(help = "The path to search through")]
|
||||
pub path: String,
|
||||
|
||||
#[clap(
|
||||
arg_enum,
|
||||
short,
|
||||
long,
|
||||
help = "Format to produce",
|
||||
default_value_t = ConfigFormat::Toml,
|
||||
)]
|
||||
pub format: ConfigFormat,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct FindConfigArgs {
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "./config.toml",
|
||||
help = "Path to the configuration file"
|
||||
)]
|
||||
pub config: String,
|
||||
|
||||
#[clap(
|
||||
arg_enum,
|
||||
short,
|
||||
long,
|
||||
help = "Format to produce",
|
||||
default_value_t = ConfigFormat::Toml,
|
||||
)]
|
||||
pub format: ConfigFormat,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct FindRemoteArgs {
|
||||
#[clap(short, long, help = "Path to the configuration file")]
|
||||
pub config: Option<String>,
|
||||
|
||||
#[clap(arg_enum, short, long, help = "Remote provider to use")]
|
||||
pub provider: RemoteProvider,
|
||||
|
||||
#[clap(
|
||||
multiple_occurrences = true,
|
||||
name = "user",
|
||||
long,
|
||||
help = "Users to get repositories from"
|
||||
)]
|
||||
pub users: Vec<String>,
|
||||
|
||||
#[clap(
|
||||
multiple_occurrences = true,
|
||||
name = "group",
|
||||
long,
|
||||
help = "Groups to get repositories from"
|
||||
)]
|
||||
pub groups: Vec<String>,
|
||||
|
||||
#[clap(long, help = "Get repositories that belong to the requesting user")]
|
||||
pub owner: bool,
|
||||
|
||||
#[clap(long, help = "Get repositories that the requesting user has access to")]
|
||||
pub access: bool,
|
||||
|
||||
#[clap(long, help = "Always use SSH, even for public repositories")]
|
||||
pub force_ssh: bool,
|
||||
|
||||
#[clap(long, help = "Command to get API token")]
|
||||
pub token_command: String,
|
||||
|
||||
#[clap(long, help = "Root of the repo tree to produce")]
|
||||
pub root: String,
|
||||
|
||||
#[clap(
|
||||
arg_enum,
|
||||
short,
|
||||
long,
|
||||
help = "Format to produce",
|
||||
default_value_t = ConfigFormat::Toml,
|
||||
)]
|
||||
pub format: ConfigFormat,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Use worktree setup for repositories",
|
||||
possible_values = &["true", "false"],
|
||||
default_value = "false",
|
||||
default_missing_value = "true",
|
||||
min_values = 0,
|
||||
max_values = 1,
|
||||
)]
|
||||
pub worktree: String,
|
||||
|
||||
#[clap(long, help = "Base URL for the API")]
|
||||
pub api_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct Config {
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "./config.toml",
|
||||
help = "Path to the configuration file"
|
||||
)]
|
||||
pub config: String,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Check out the default worktree after clone",
|
||||
possible_values = &["true", "false"],
|
||||
default_value = "true",
|
||||
default_missing_value = "true",
|
||||
min_values = 0,
|
||||
max_values = 1,
|
||||
)]
|
||||
pub init_worktree: String,
|
||||
}
|
||||
|
||||
pub type RemoteProvider = super::provider::RemoteProvider;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct SyncRemoteArgs {
|
||||
#[clap(arg_enum, short, long, help = "Remote provider to use")]
|
||||
pub provider: RemoteProvider,
|
||||
|
||||
#[clap(
|
||||
multiple_occurrences = true,
|
||||
name = "user",
|
||||
long,
|
||||
help = "Users to get repositories from"
|
||||
)]
|
||||
pub users: Vec<String>,
|
||||
|
||||
#[clap(
|
||||
multiple_occurrences = true,
|
||||
name = "group",
|
||||
long,
|
||||
help = "Groups to get repositories from"
|
||||
)]
|
||||
pub groups: Vec<String>,
|
||||
|
||||
#[clap(long, help = "Get repositories that belong to the requesting user")]
|
||||
pub owner: bool,
|
||||
|
||||
#[clap(long, help = "Get repositories that the requesting user has access to")]
|
||||
pub access: bool,
|
||||
|
||||
#[clap(long, help = "Always use SSH, even for public repositories")]
|
||||
pub force_ssh: bool,
|
||||
|
||||
#[clap(long, help = "Command to get API token")]
|
||||
pub token_command: String,
|
||||
|
||||
#[clap(long, help = "Root of the repo tree to produce")]
|
||||
pub root: String,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Use worktree setup for repositories",
|
||||
possible_values = &["true", "false"],
|
||||
default_value = "false",
|
||||
default_missing_value = "true",
|
||||
min_values = 0,
|
||||
max_values = 1,
|
||||
)]
|
||||
pub worktree: String,
|
||||
|
||||
#[clap(long, help = "Base URL for the API")]
|
||||
pub api_url: Option<String>,
|
||||
|
||||
#[clap(
|
||||
long,
|
||||
help = "Check out the default worktree after clone",
|
||||
possible_values = &["true", "false"],
|
||||
default_value = "true",
|
||||
default_missing_value = "true",
|
||||
min_values = 0,
|
||||
max_values = 1,
|
||||
)]
|
||||
pub init_worktree: String,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap()]
|
||||
pub struct OptionalConfig {
|
||||
#[clap(short, long, help = "Path to the configuration file")]
|
||||
pub config: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(clap::ArgEnum, Clone)]
|
||||
pub enum ConfigFormat {
|
||||
Yaml,
|
||||
Toml,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Worktree {
|
||||
#[clap(subcommand, name = "action")]
|
||||
pub action: WorktreeAction,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub enum WorktreeAction {
|
||||
#[clap(about = "Add a new worktree")]
|
||||
Add(WorktreeAddArgs),
|
||||
#[clap(about = "Add an existing worktree")]
|
||||
Delete(WorktreeDeleteArgs),
|
||||
#[clap(about = "Show state of existing worktrees")]
|
||||
Status(WorktreeStatusArgs),
|
||||
#[clap(about = "Convert a normal repository to a worktree setup")]
|
||||
Convert(WorktreeConvertArgs),
|
||||
#[clap(about = "Clean all worktrees that do not contain uncommited/unpushed changes")]
|
||||
Clean(WorktreeCleanArgs),
|
||||
#[clap(about = "Fetch refs from remotes")]
|
||||
Fetch(WorktreeFetchArgs),
|
||||
#[clap(about = "Fetch refs from remotes and update local branches")]
|
||||
Pull(WorktreePullArgs),
|
||||
#[clap(about = "Rebase worktree onto default branch")]
|
||||
Rebase(WorktreeRebaseArgs),
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeAddArgs {
|
||||
#[clap(help = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(short = 't', long = "track", help = "Remote branch to track")]
|
||||
pub track: Option<String>,
|
||||
|
||||
#[clap(long = "--no-track", help = "Disable tracking")]
|
||||
pub no_track: bool,
|
||||
}
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeDeleteArgs {
|
||||
#[clap(help = "Name of the worktree")]
|
||||
pub name: String,
|
||||
|
||||
#[clap(
|
||||
long = "force",
|
||||
help = "Force deletion, even when there are uncommitted/unpushed changes"
|
||||
)]
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeStatusArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeConvertArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeCleanArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeFetchArgs {}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreePullArgs {
|
||||
#[clap(long = "--rebase", help = "Perform a rebase instead of a fast-forward")]
|
||||
pub rebase: bool,
|
||||
#[clap(long = "--stash", help = "Stash & unstash changes before & after pull")]
|
||||
pub stash: bool,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct WorktreeRebaseArgs {
|
||||
#[clap(long = "--pull", help = "Perform a pull before rebasing")]
|
||||
pub pull: bool,
|
||||
#[clap(long = "--rebase", help = "Perform a rebase when doing a pull")]
|
||||
pub rebase: bool,
|
||||
#[clap(
|
||||
long = "--stash",
|
||||
help = "Stash & unstash changes before & after rebase"
|
||||
)]
|
||||
pub stash: bool,
|
||||
}
|
||||
|
||||
pub fn parse() -> Opts {
|
||||
Opts::parse()
|
||||
}
|
||||
774
src/grm/main.rs
Normal file
774
src/grm/main.rs
Normal file
@@ -0,0 +1,774 @@
|
||||
use std::path::Path;
|
||||
use std::process;
|
||||
|
||||
mod cmd;
|
||||
|
||||
use grm::auth;
|
||||
use grm::config;
|
||||
use grm::find_in_tree;
|
||||
use grm::output::*;
|
||||
use grm::path;
|
||||
use grm::provider;
|
||||
use grm::provider::Provider;
|
||||
use grm::repo;
|
||||
use grm::table;
|
||||
use grm::tree;
|
||||
use grm::worktree;
|
||||
|
||||
fn main() {
|
||||
let opts = cmd::parse();
|
||||
|
||||
match opts.subcmd {
|
||||
cmd::SubCommand::Repos(repos) => match repos.action {
|
||||
cmd::ReposAction::Sync(sync) => match sync {
|
||||
cmd::SyncAction::Config(args) => {
|
||||
let config = match config::read_config(&args.config) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
match tree::sync_trees(config, args.init_worktree == "true") {
|
||||
Ok(success) => {
|
||||
if !success {
|
||||
process::exit(1)
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error syncing trees: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::SyncAction::Remote(args) => {
|
||||
let token = match auth::get_token_from_command(&args.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filter =
|
||||
provider::Filter::new(args.users, args.groups, args.owner, args.access);
|
||||
|
||||
let worktree = args.worktree == "true";
|
||||
|
||||
let repos = match args.provider {
|
||||
cmd::RemoteProvider::Github => {
|
||||
match provider::Github::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(worktree, args.force_ssh)
|
||||
}
|
||||
cmd::RemoteProvider::Gitlab => {
|
||||
match provider::Gitlab::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(worktree, args.force_ssh)
|
||||
}
|
||||
};
|
||||
|
||||
match repos {
|
||||
Ok(repos) => {
|
||||
let mut trees: Vec<config::ConfigTree> = vec![];
|
||||
|
||||
for (namespace, repolist) in repos {
|
||||
let root = if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&args.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&args.root))
|
||||
};
|
||||
|
||||
let tree = config::ConfigTree::from_repos(root, repolist);
|
||||
trees.push(tree);
|
||||
}
|
||||
|
||||
let config = config::Config::from_trees(trees);
|
||||
|
||||
match tree::sync_trees(config, args.init_worktree == "true") {
|
||||
Ok(success) => {
|
||||
if !success {
|
||||
process::exit(1)
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error syncing trees: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cmd::ReposAction::Status(args) => match &args.config {
|
||||
Some(config_path) => {
|
||||
let config = match config::read_config(config_path) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
match table::get_status_table(config) {
|
||||
Ok((tables, errors)) => {
|
||||
for table in tables {
|
||||
println!("{}", table);
|
||||
}
|
||||
for error in errors {
|
||||
print_error(&format!("Error: {}", error));
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting status: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let dir = match std::env::current_dir() {
|
||||
Ok(dir) => dir,
|
||||
Err(error) => {
|
||||
print_error(&format!("Could not open current directory: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
match table::show_single_repo_status(&dir) {
|
||||
Ok((table, warnings)) => {
|
||||
println!("{}", table);
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting status: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cmd::ReposAction::Find(find) => match find {
|
||||
cmd::FindAction::Local(args) => {
|
||||
let path = Path::new(&args.path);
|
||||
if !path.exists() {
|
||||
print_error(&format!("Path \"{}\" does not exist", path.display()));
|
||||
process::exit(1);
|
||||
}
|
||||
if !path.is_dir() {
|
||||
print_error(&format!("Path \"{}\" is not a directory", path.display()));
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
let path = match path.canonicalize() {
|
||||
Ok(path) => path,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed to canonicalize path \"{}\". This is a bug. Error message: {}",
|
||||
&path.display(),
|
||||
error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let (found_repos, warnings) = match find_in_tree(&path) {
|
||||
Ok((repos, warnings)) => (repos, warnings),
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let trees = config::ConfigTrees::from_trees(vec![found_repos]);
|
||||
if trees.trees_ref().iter().all(|t| match &t.repos {
|
||||
None => false,
|
||||
Some(r) => r.is_empty(),
|
||||
}) {
|
||||
print_warning("No repositories found");
|
||||
} else {
|
||||
let mut config = trees.to_config();
|
||||
|
||||
config.normalize();
|
||||
|
||||
match args.format {
|
||||
cmd::ConfigFormat::Toml => {
|
||||
let toml = match config.as_toml() {
|
||||
Ok(toml) => toml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to TOML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", toml);
|
||||
}
|
||||
cmd::ConfigFormat::Yaml => {
|
||||
let yaml = match config.as_yaml() {
|
||||
Ok(yaml) => yaml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to YAML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", yaml);
|
||||
}
|
||||
}
|
||||
}
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
cmd::FindAction::Config(args) => {
|
||||
let config: config::ConfigProvider = match config::read_config(&args.config) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&error);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let token = match auth::get_token_from_command(&config.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filters = config.filters.unwrap_or(config::ConfigProviderFilter {
|
||||
access: Some(false),
|
||||
owner: Some(false),
|
||||
users: Some(vec![]),
|
||||
groups: Some(vec![]),
|
||||
});
|
||||
|
||||
let filter = provider::Filter::new(
|
||||
filters.users.unwrap_or_default(),
|
||||
filters.groups.unwrap_or_default(),
|
||||
filters.owner.unwrap_or(false),
|
||||
filters.access.unwrap_or(false),
|
||||
);
|
||||
|
||||
let repos = match config.provider {
|
||||
provider::RemoteProvider::Github => {
|
||||
match match provider::Github::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
provider::RemoteProvider::Gitlab => {
|
||||
match match provider::Gitlab::new(filter, token, config.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(
|
||||
config.worktree.unwrap_or(false),
|
||||
config.force_ssh.unwrap_or(false),
|
||||
) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut trees = vec![];
|
||||
|
||||
for (namespace, namespace_repos) in repos {
|
||||
let tree = config::ConfigTree {
|
||||
root: if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&config.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&config.root))
|
||||
},
|
||||
repos: Some(
|
||||
namespace_repos
|
||||
.into_iter()
|
||||
.map(config::RepoConfig::from_repo)
|
||||
.collect(),
|
||||
),
|
||||
};
|
||||
trees.push(tree);
|
||||
}
|
||||
|
||||
let config = config::Config::from_trees(trees);
|
||||
|
||||
match args.format {
|
||||
cmd::ConfigFormat::Toml => {
|
||||
let toml = match config.as_toml() {
|
||||
Ok(toml) => toml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to TOML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", toml);
|
||||
}
|
||||
cmd::ConfigFormat::Yaml => {
|
||||
let yaml = match config.as_yaml() {
|
||||
Ok(yaml) => yaml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to YAML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", yaml);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::FindAction::Remote(args) => {
|
||||
let token = match auth::get_token_from_command(&args.token_command) {
|
||||
Ok(token) => token,
|
||||
Err(error) => {
|
||||
print_error(&format!("Getting token from command failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let filter =
|
||||
provider::Filter::new(args.users, args.groups, args.owner, args.access);
|
||||
|
||||
let worktree = args.worktree == "true";
|
||||
|
||||
let repos = match args.provider {
|
||||
cmd::RemoteProvider::Github => {
|
||||
match provider::Github::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(worktree, args.force_ssh)
|
||||
}
|
||||
cmd::RemoteProvider::Gitlab => {
|
||||
match provider::Gitlab::new(filter, token, args.api_url) {
|
||||
Ok(provider) => provider,
|
||||
Err(error) => {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
.get_repos(worktree, args.force_ssh)
|
||||
}
|
||||
};
|
||||
|
||||
let repos = repos.unwrap_or_else(|error| {
|
||||
print_error(&format!("Error: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut trees: Vec<config::ConfigTree> = vec![];
|
||||
|
||||
for (namespace, repolist) in repos {
|
||||
let tree = config::ConfigTree {
|
||||
root: if let Some(namespace) = namespace {
|
||||
path::path_as_string(&Path::new(&args.root).join(namespace))
|
||||
} else {
|
||||
path::path_as_string(Path::new(&args.root))
|
||||
},
|
||||
repos: Some(
|
||||
repolist
|
||||
.into_iter()
|
||||
.map(config::RepoConfig::from_repo)
|
||||
.collect(),
|
||||
),
|
||||
};
|
||||
trees.push(tree);
|
||||
}
|
||||
|
||||
let mut config = config::Config::from_trees(trees);
|
||||
|
||||
config.normalize();
|
||||
|
||||
match args.format {
|
||||
cmd::ConfigFormat::Toml => {
|
||||
let toml = match config.as_toml() {
|
||||
Ok(toml) => toml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to TOML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", toml);
|
||||
}
|
||||
cmd::ConfigFormat::Yaml => {
|
||||
let yaml = match config.as_yaml() {
|
||||
Ok(yaml) => yaml,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Failed converting config to YAML: {}",
|
||||
&error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
print!("{}", yaml);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
cmd::SubCommand::Worktree(args) => {
|
||||
let cwd = std::env::current_dir().unwrap_or_else(|error| {
|
||||
print_error(&format!("Could not open current directory: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match args.action {
|
||||
cmd::WorktreeAction::Add(action_args) => {
|
||||
let track = match &action_args.track {
|
||||
Some(branch) => {
|
||||
let split = branch.split_once('/');
|
||||
|
||||
if split.is_none()
|
||||
|| split.unwrap().0.is_empty()
|
||||
|| split.unwrap().1.is_empty()
|
||||
{
|
||||
print_error("Tracking branch needs to match the pattern <remote>/<branch_name>");
|
||||
process::exit(1);
|
||||
};
|
||||
|
||||
// unwrap() here is safe because we checked for
|
||||
// is_none() explictily before
|
||||
let (remote_name, remote_branch_name) = split.unwrap();
|
||||
|
||||
Some((remote_name, remote_branch_name))
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut name: &str = &action_args.name;
|
||||
let subdirectory;
|
||||
let split = name.split_once('/');
|
||||
match split {
|
||||
None => subdirectory = None,
|
||||
Some(split) => {
|
||||
if split.0.is_empty() || split.1.is_empty() {
|
||||
print_error("Worktree name cannot start or end with a slash");
|
||||
process::exit(1);
|
||||
} else {
|
||||
(subdirectory, name) = (Some(Path::new(split.0)), split.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match worktree::add_worktree(
|
||||
&cwd,
|
||||
name,
|
||||
subdirectory,
|
||||
track,
|
||||
action_args.no_track,
|
||||
) {
|
||||
Ok(_) => print_success(&format!("Worktree {} created", &action_args.name)),
|
||||
Err(error) => {
|
||||
print_error(&format!("Error creating worktree: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Delete(action_args) => {
|
||||
let worktree_dir = cwd.join(&action_args.name);
|
||||
|
||||
let worktree_config = match repo::read_worktree_root_config(&cwd) {
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
print_error(&format!(
|
||||
"Error getting worktree configuration: {}",
|
||||
error
|
||||
));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
print_error(&format!("Error opening repository: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match repo.remove_worktree(
|
||||
&action_args.name,
|
||||
&worktree_dir,
|
||||
action_args.force,
|
||||
&worktree_config,
|
||||
) {
|
||||
Ok(_) => print_success(&format!("Worktree {} deleted", &action_args.name)),
|
||||
Err(error) => {
|
||||
match error {
|
||||
repo::WorktreeRemoveFailureReason::Error(msg) => {
|
||||
print_error(&msg);
|
||||
process::exit(1);
|
||||
}
|
||||
repo::WorktreeRemoveFailureReason::Changes(changes) => {
|
||||
print_warning(&format!(
|
||||
"Changes in worktree: {}. Refusing to delete",
|
||||
changes
|
||||
));
|
||||
}
|
||||
repo::WorktreeRemoveFailureReason::NotMerged(message) => {
|
||||
print_warning(&message);
|
||||
}
|
||||
}
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Status(_args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
print_error(&format!("Error opening repository: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match table::get_worktree_status_table(&repo, &cwd) {
|
||||
Ok((table, errors)) => {
|
||||
println!("{}", table);
|
||||
for error in errors {
|
||||
print_error(&format!("Error: {}", error));
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting status: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Convert(_args) => {
|
||||
// Converting works like this:
|
||||
// * Check whether there are uncommitted/unpushed changes
|
||||
// * Move the contents of .git dir to the worktree directory
|
||||
// * Remove all files
|
||||
// * Set `core.bare` to `true`
|
||||
|
||||
let repo = repo::RepoHandle::open(&cwd, false).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match repo.convert_to_worktree(&cwd) {
|
||||
Ok(_) => print_success("Conversion done"),
|
||||
Err(reason) => {
|
||||
match reason {
|
||||
repo::WorktreeConversionFailureReason::Changes => {
|
||||
print_error("Changes found in repository, refusing to convert");
|
||||
}
|
||||
repo::WorktreeConversionFailureReason::Ignored => {
|
||||
print_error("Ignored files found in repository, refusing to convert. Run git clean -f -d -X to remove them manually.");
|
||||
}
|
||||
repo::WorktreeConversionFailureReason::Error(error) => {
|
||||
print_error(&format!("Error during conversion: {}", error));
|
||||
}
|
||||
}
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Clean(_args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
match repo.cleanup_worktrees(&cwd) {
|
||||
Ok(warnings) => {
|
||||
for warning in warnings {
|
||||
print_warning(&warning);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Worktree cleanup failed: {}", error));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
for unmanaged_worktree in
|
||||
repo.find_unmanaged_worktrees(&cwd).unwrap_or_else(|error| {
|
||||
print_error(&format!("Failed finding unmanaged worktrees: {}", error));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
print_warning(&format!(
|
||||
"Found {}, which is not a valid worktree directory!",
|
||||
&unmanaged_worktree
|
||||
));
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Fetch(_args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
repo.fetchall().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error fetching remotes: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
print_success("Fetched from all remotes");
|
||||
}
|
||||
cmd::WorktreeAction::Pull(args) => {
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
repo.fetchall().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error fetching remotes: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut failures = false;
|
||||
for worktree in repo.get_worktrees().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error getting worktrees: {}", error));
|
||||
process::exit(1);
|
||||
}) {
|
||||
if let Some(warning) = worktree
|
||||
.forward_branch(args.rebase, args.stash)
|
||||
.unwrap_or_else(|error| {
|
||||
print_error(&format!("Error updating worktree branch: {}", error));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
print_warning(&format!("{}: {}", worktree.name(), warning));
|
||||
failures = true;
|
||||
} else {
|
||||
print_success(&format!("{}: Done", worktree.name()));
|
||||
}
|
||||
}
|
||||
if failures {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
cmd::WorktreeAction::Rebase(args) => {
|
||||
if args.rebase && !args.pull {
|
||||
print_error("There is no point in using --rebase without --pull");
|
||||
process::exit(1);
|
||||
}
|
||||
let repo = repo::RepoHandle::open(&cwd, true).unwrap_or_else(|error| {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
print_error("Directory does not contain a git repository");
|
||||
} else {
|
||||
print_error(&format!("Opening repository failed: {}", error));
|
||||
}
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
if args.pull {
|
||||
repo.fetchall().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error fetching remotes: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
let config = repo::read_worktree_root_config(&cwd).unwrap_or_else(|error| {
|
||||
print_error(&format!("Failed to read worktree configuration: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let worktrees = repo.get_worktrees().unwrap_or_else(|error| {
|
||||
print_error(&format!("Error getting worktrees: {}", error));
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut failures = false;
|
||||
|
||||
for worktree in &worktrees {
|
||||
if args.pull {
|
||||
if let Some(warning) = worktree
|
||||
.forward_branch(args.rebase, args.stash)
|
||||
.unwrap_or_else(|error| {
|
||||
print_error(&format!(
|
||||
"Error updating worktree branch: {}",
|
||||
error
|
||||
));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
failures = true;
|
||||
print_warning(&format!("{}: {}", worktree.name(), warning));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for worktree in &worktrees {
|
||||
if let Some(warning) = worktree
|
||||
.rebase_onto_default(&config, args.stash)
|
||||
.unwrap_or_else(|error| {
|
||||
print_error(&format!("Error rebasing worktree branch: {}", error));
|
||||
process::exit(1);
|
||||
})
|
||||
{
|
||||
failures = true;
|
||||
print_warning(&format!("{}: {}", worktree.name(), warning));
|
||||
} else {
|
||||
print_success(&format!("{}: Done", worktree.name()));
|
||||
}
|
||||
}
|
||||
if failures {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1331
src/lib.rs
1331
src/lib.rs
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
||||
use grm::run;
|
||||
|
||||
fn main() {
|
||||
run();
|
||||
}
|
||||
84
src/path.rs
Normal file
84
src/path.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
|
||||
use super::output::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn setup() {
|
||||
std::env::set_var("HOME", "/home/test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_expand_tilde() {
|
||||
setup();
|
||||
assert_eq!(
|
||||
expand_path(Path::new("~/file")),
|
||||
Path::new("/home/test/file")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_expand_invalid_tilde() {
|
||||
setup();
|
||||
assert_eq!(
|
||||
expand_path(Path::new("/home/~/file")),
|
||||
Path::new("/home/~/file")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_expand_home() {
|
||||
setup();
|
||||
assert_eq!(
|
||||
expand_path(Path::new("$HOME/file")),
|
||||
Path::new("/home/test/file")
|
||||
);
|
||||
assert_eq!(
|
||||
expand_path(Path::new("${HOME}/file")),
|
||||
Path::new("/home/test/file")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path_as_string(path: &Path) -> String {
|
||||
path.to_path_buf().into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
pub fn env_home() -> PathBuf {
|
||||
match std::env::var("HOME") {
|
||||
Ok(path) => Path::new(&path).to_path_buf(),
|
||||
Err(e) => {
|
||||
print_error(&format!("Unable to read HOME: {}", e));
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_path(path: &Path) -> PathBuf {
|
||||
fn home_dir() -> Option<PathBuf> {
|
||||
Some(env_home())
|
||||
}
|
||||
|
||||
let expanded_path = match shellexpand::full_with_context(
|
||||
&path_as_string(path),
|
||||
home_dir,
|
||||
|name| -> Result<Option<String>, &'static str> {
|
||||
match name {
|
||||
"HOME" => Ok(Some(path_as_string(home_dir().unwrap().as_path()))),
|
||||
_ => Ok(None),
|
||||
}
|
||||
},
|
||||
) {
|
||||
Ok(std::borrow::Cow::Borrowed(path)) => path.to_owned(),
|
||||
Ok(std::borrow::Cow::Owned(path)) => path,
|
||||
Err(e) => {
|
||||
print_error(&format!("Unable to expand root: {}", e));
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
Path::new(&expanded_path).to_path_buf()
|
||||
}
|
||||
145
src/provider/github.rs
Normal file
145
src/provider/github.rs
Normal file
@@ -0,0 +1,145 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::escape;
|
||||
use super::ApiErrorResponse;
|
||||
use super::Filter;
|
||||
use super::JsonError;
|
||||
use super::Project;
|
||||
use super::Provider;
|
||||
use super::SecretToken;
|
||||
|
||||
const PROVIDER_NAME: &str = "github";
|
||||
const ACCEPT_HEADER_JSON: &str = "application/vnd.github.v3+json";
|
||||
const GITHUB_API_BASEURL: &str =
|
||||
option_env!("GITHUB_API_BASEURL").unwrap_or("https://api.github.com");
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GithubProject {
|
||||
pub name: String,
|
||||
pub full_name: String,
|
||||
pub clone_url: String,
|
||||
pub ssh_url: String,
|
||||
pub private: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GithubUser {
|
||||
#[serde(rename = "login")]
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
impl Project for GithubProject {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn namespace(&self) -> Option<String> {
|
||||
if let Some((namespace, _name)) = self.full_name.rsplit_once('/') {
|
||||
Some(namespace.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn ssh_url(&self) -> String {
|
||||
self.ssh_url.clone()
|
||||
}
|
||||
|
||||
fn http_url(&self) -> String {
|
||||
self.clone_url.clone()
|
||||
}
|
||||
|
||||
fn private(&self) -> bool {
|
||||
self.private
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GithubApiErrorResponse {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl JsonError for GithubApiErrorResponse {
|
||||
fn to_string(self) -> String {
|
||||
self.message
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Github {
|
||||
filter: Filter,
|
||||
secret_token: SecretToken,
|
||||
}
|
||||
|
||||
impl Provider for Github {
|
||||
type Project = GithubProject;
|
||||
type Error = GithubApiErrorResponse;
|
||||
|
||||
fn new(
|
||||
filter: Filter,
|
||||
secret_token: SecretToken,
|
||||
api_url_override: Option<String>,
|
||||
) -> Result<Self, String> {
|
||||
if api_url_override.is_some() {
|
||||
return Err("API URL overriding is not supported for Github".to_string());
|
||||
}
|
||||
Ok(Self {
|
||||
filter,
|
||||
secret_token,
|
||||
})
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
String::from(PROVIDER_NAME)
|
||||
}
|
||||
|
||||
fn filter(&self) -> Filter {
|
||||
self.filter.clone()
|
||||
}
|
||||
|
||||
fn secret_token(&self) -> SecretToken {
|
||||
self.secret_token.clone()
|
||||
}
|
||||
|
||||
fn auth_header_key() -> String {
|
||||
"token".to_string()
|
||||
}
|
||||
|
||||
fn get_user_projects(
|
||||
&self,
|
||||
user: &str,
|
||||
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{GITHUB_API_BASEURL}/users/{}/repos", escape(user)),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_group_projects(
|
||||
&self,
|
||||
group: &str,
|
||||
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{GITHUB_API_BASEURL}/orgs/{}/repos?type=all", escape(group)),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_accessible_projects(
|
||||
&self,
|
||||
) -> Result<Vec<GithubProject>, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{GITHUB_API_BASEURL}/user/repos"),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GithubApiErrorResponse>> {
|
||||
Ok(super::call::<GithubUser, GithubApiErrorResponse>(
|
||||
&format!("{GITHUB_API_BASEURL}/user"),
|
||||
&Self::auth_header_key(),
|
||||
&self.secret_token(),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)?
|
||||
.username)
|
||||
}
|
||||
}
|
||||
166
src/provider/gitlab.rs
Normal file
166
src/provider/gitlab.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::escape;
|
||||
use super::ApiErrorResponse;
|
||||
use super::Filter;
|
||||
use super::JsonError;
|
||||
use super::Project;
|
||||
use super::Provider;
|
||||
use super::SecretToken;
|
||||
|
||||
const PROVIDER_NAME: &str = "gitlab";
|
||||
const ACCEPT_HEADER_JSON: &str = "application/json";
|
||||
const GITLAB_API_BASEURL: &str = option_env!("GITLAB_API_BASEURL").unwrap_or("https://gitlab.com");
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum GitlabVisibility {
|
||||
Private,
|
||||
Internal,
|
||||
Public,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GitlabProject {
|
||||
#[serde(rename = "path")]
|
||||
pub name: String,
|
||||
pub path_with_namespace: String,
|
||||
pub http_url_to_repo: String,
|
||||
pub ssh_url_to_repo: String,
|
||||
pub visibility: GitlabVisibility,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GitlabUser {
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
impl Project for GitlabProject {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn namespace(&self) -> Option<String> {
|
||||
if let Some((namespace, _name)) = self.path_with_namespace.rsplit_once('/') {
|
||||
Some(namespace.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn ssh_url(&self) -> String {
|
||||
self.ssh_url_to_repo.clone()
|
||||
}
|
||||
|
||||
fn http_url(&self) -> String {
|
||||
self.http_url_to_repo.clone()
|
||||
}
|
||||
|
||||
fn private(&self) -> bool {
|
||||
!matches!(self.visibility, GitlabVisibility::Public)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GitlabApiErrorResponse {
|
||||
#[serde(alias = "error_description", alias = "error")]
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl JsonError for GitlabApiErrorResponse {
|
||||
fn to_string(self) -> String {
|
||||
self.message
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Gitlab {
|
||||
filter: Filter,
|
||||
secret_token: SecretToken,
|
||||
api_url_override: Option<String>,
|
||||
}
|
||||
|
||||
impl Gitlab {
|
||||
fn api_url(&self) -> String {
|
||||
self.api_url_override
|
||||
.as_ref()
|
||||
.unwrap_or(&GITLAB_API_BASEURL.to_string())
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl Provider for Gitlab {
|
||||
type Project = GitlabProject;
|
||||
type Error = GitlabApiErrorResponse;
|
||||
|
||||
fn new(
|
||||
filter: Filter,
|
||||
secret_token: SecretToken,
|
||||
api_url_override: Option<String>,
|
||||
) -> Result<Self, String> {
|
||||
Ok(Self {
|
||||
filter,
|
||||
secret_token,
|
||||
api_url_override,
|
||||
})
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
String::from(PROVIDER_NAME)
|
||||
}
|
||||
|
||||
fn filter(&self) -> Filter {
|
||||
self.filter.clone()
|
||||
}
|
||||
|
||||
fn secret_token(&self) -> SecretToken {
|
||||
self.secret_token.clone()
|
||||
}
|
||||
|
||||
fn auth_header_key() -> String {
|
||||
"bearer".to_string()
|
||||
}
|
||||
|
||||
fn get_user_projects(
|
||||
&self,
|
||||
user: &str,
|
||||
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{}/api/v4/users/{}/projects", self.api_url(), escape(user)),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_group_projects(
|
||||
&self,
|
||||
group: &str,
|
||||
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!(
|
||||
"{}/api/v4/groups/{}/projects?include_subgroups=true&archived=false",
|
||||
self.api_url(),
|
||||
escape(group),
|
||||
),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_accessible_projects(
|
||||
&self,
|
||||
) -> Result<Vec<GitlabProject>, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
self.call_list(
|
||||
&format!("{}/api/v4/projects", self.api_url(),),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)
|
||||
}
|
||||
|
||||
fn get_current_user(&self) -> Result<String, ApiErrorResponse<GitlabApiErrorResponse>> {
|
||||
Ok(super::call::<GitlabUser, GitlabApiErrorResponse>(
|
||||
&format!("{}/api/v4/user", self.api_url()),
|
||||
&Self::auth_header_key(),
|
||||
&self.secret_token(),
|
||||
Some(ACCEPT_HEADER_JSON),
|
||||
)?
|
||||
.username)
|
||||
}
|
||||
}
|
||||
349
src/provider/mod.rs
Normal file
349
src/provider/mod.rs
Normal file
@@ -0,0 +1,349 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// Required to use the `json()` method from the trait
|
||||
use isahc::ReadResponseExt;
|
||||
|
||||
pub mod github;
|
||||
pub mod gitlab;
|
||||
|
||||
pub use github::Github;
|
||||
pub use gitlab::Gitlab;
|
||||
|
||||
use super::repo;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, clap::ArgEnum, Clone)]
|
||||
pub enum RemoteProvider {
|
||||
#[serde(alias = "github", alias = "GitHub")]
|
||||
Github,
|
||||
#[serde(alias = "gitlab", alias = "GitLab")]
|
||||
Gitlab,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum ProjectResponse<T, U> {
|
||||
Success(Vec<T>),
|
||||
Failure(U),
|
||||
}
|
||||
|
||||
pub fn escape(s: &str) -> String {
|
||||
url_escape::encode_component(s).to_string()
|
||||
}
|
||||
|
||||
pub trait Project {
|
||||
fn into_repo_config(
|
||||
self,
|
||||
provider_name: &str,
|
||||
worktree_setup: bool,
|
||||
force_ssh: bool,
|
||||
) -> repo::Repo
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
repo::Repo {
|
||||
name: self.name(),
|
||||
namespace: self.namespace(),
|
||||
worktree_setup,
|
||||
remotes: Some(vec![repo::Remote {
|
||||
name: String::from(provider_name),
|
||||
url: if force_ssh || self.private() {
|
||||
self.ssh_url()
|
||||
} else {
|
||||
self.http_url()
|
||||
},
|
||||
remote_type: if force_ssh || self.private() {
|
||||
repo::RemoteType::Ssh
|
||||
} else {
|
||||
repo::RemoteType::Https
|
||||
},
|
||||
}]),
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> String;
|
||||
fn namespace(&self) -> Option<String>;
|
||||
fn ssh_url(&self) -> String;
|
||||
fn http_url(&self) -> String;
|
||||
fn private(&self) -> bool;
|
||||
}
|
||||
|
||||
type SecretToken = String;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Filter {
|
||||
users: Vec<String>,
|
||||
groups: Vec<String>,
|
||||
owner: bool,
|
||||
access: bool,
|
||||
}
|
||||
|
||||
impl Filter {
|
||||
pub fn new(users: Vec<String>, groups: Vec<String>, owner: bool, access: bool) -> Self {
|
||||
Filter {
|
||||
users,
|
||||
groups,
|
||||
owner,
|
||||
access,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ApiErrorResponse<T>
|
||||
where
|
||||
T: JsonError,
|
||||
{
|
||||
Json(T),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl<T> From<String> for ApiErrorResponse<T>
|
||||
where
|
||||
T: JsonError,
|
||||
{
|
||||
fn from(s: String) -> ApiErrorResponse<T> {
|
||||
ApiErrorResponse::String(s)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait JsonError {
|
||||
fn to_string(self) -> String;
|
||||
}
|
||||
|
||||
pub trait Provider {
|
||||
type Project: serde::de::DeserializeOwned + Project;
|
||||
type Error: serde::de::DeserializeOwned + JsonError;
|
||||
|
||||
fn new(
|
||||
filter: Filter,
|
||||
secret_token: SecretToken,
|
||||
api_url_override: Option<String>,
|
||||
) -> Result<Self, String>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
fn name(&self) -> String;
|
||||
fn filter(&self) -> Filter;
|
||||
fn secret_token(&self) -> SecretToken;
|
||||
fn auth_header_key() -> String;
|
||||
|
||||
fn get_user_projects(
|
||||
&self,
|
||||
user: &str,
|
||||
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
fn get_group_projects(
|
||||
&self,
|
||||
group: &str,
|
||||
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
fn get_own_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
|
||||
self.get_user_projects(&self.get_current_user()?)
|
||||
}
|
||||
|
||||
fn get_accessible_projects(&self) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
fn get_current_user(&self) -> Result<String, ApiErrorResponse<Self::Error>>;
|
||||
|
||||
///
|
||||
/// Calls the API at specific uri and expects a successful response of Vec<T> back, or an error
|
||||
/// response U
|
||||
///
|
||||
/// Handles paging with "link" HTTP headers properly and reads all pages to
|
||||
/// the end.
|
||||
fn call_list(
|
||||
&self,
|
||||
uri: &str,
|
||||
accept_header: Option<&str>,
|
||||
) -> Result<Vec<Self::Project>, ApiErrorResponse<Self::Error>> {
|
||||
let mut results = vec![];
|
||||
|
||||
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
|
||||
|
||||
let request = isahc::Request::builder()
|
||||
.uri(uri)
|
||||
.method("GET")
|
||||
.header("accept", accept_header.unwrap_or("application/json"))
|
||||
.header(
|
||||
"authorization",
|
||||
format!("{} {}", Self::auth_header_key(), &self.secret_token()),
|
||||
)
|
||||
.body(())
|
||||
.map_err(|error| error.to_string())?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let r: Self::Error = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
|
||||
return Err(ApiErrorResponse::Json(r));
|
||||
}
|
||||
|
||||
let result: Vec<Self::Project> = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing response: {}", error))?;
|
||||
|
||||
results.extend(result);
|
||||
|
||||
if let Some(link_header) = response.headers().get("link") {
|
||||
let link_header = link_header.to_str().map_err(|error| error.to_string())?;
|
||||
|
||||
let link_header =
|
||||
parse_link_header::parse(link_header).map_err(|error| error.to_string())?;
|
||||
|
||||
let next_page = link_header.get(&Some(String::from("next")));
|
||||
|
||||
if let Some(page) = next_page {
|
||||
let following_repos = self.call_list(&page.raw_uri, accept_header)?;
|
||||
results.extend(following_repos);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn get_repos(
|
||||
&self,
|
||||
worktree_setup: bool,
|
||||
force_ssh: bool,
|
||||
) -> Result<HashMap<Option<String>, Vec<repo::Repo>>, String> {
|
||||
let mut repos = vec![];
|
||||
|
||||
if self.filter().owner {
|
||||
repos.extend(self.get_own_projects().map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?);
|
||||
}
|
||||
|
||||
if self.filter().access {
|
||||
let accessible_projects =
|
||||
self.get_accessible_projects()
|
||||
.map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?;
|
||||
|
||||
for accessible_project in accessible_projects {
|
||||
let mut already_present = false;
|
||||
for repo in &repos {
|
||||
if repo.name() == accessible_project.name()
|
||||
&& repo.namespace() == accessible_project.namespace()
|
||||
{
|
||||
already_present = true;
|
||||
}
|
||||
}
|
||||
if !already_present {
|
||||
repos.push(accessible_project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for user in &self.filter().users {
|
||||
let user_projects = self.get_user_projects(user).map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?;
|
||||
|
||||
for user_project in user_projects {
|
||||
let mut already_present = false;
|
||||
for repo in &repos {
|
||||
if repo.name() == user_project.name()
|
||||
&& repo.namespace() == user_project.namespace()
|
||||
{
|
||||
already_present = true;
|
||||
}
|
||||
}
|
||||
if !already_present {
|
||||
repos.push(user_project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for group in &self.filter().groups {
|
||||
let group_projects = self
|
||||
.get_group_projects(group)
|
||||
.map_err(|error| match error {
|
||||
ApiErrorResponse::Json(x) => x.to_string(),
|
||||
ApiErrorResponse::String(s) => s,
|
||||
})?;
|
||||
for group_project in group_projects {
|
||||
let mut already_present = false;
|
||||
for repo in &repos {
|
||||
if repo.name() == group_project.name()
|
||||
&& repo.namespace() == group_project.namespace()
|
||||
{
|
||||
already_present = true;
|
||||
}
|
||||
}
|
||||
|
||||
if !already_present {
|
||||
repos.push(group_project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut ret: HashMap<Option<String>, Vec<repo::Repo>> = HashMap::new();
|
||||
|
||||
for repo in repos {
|
||||
let namespace = repo.namespace();
|
||||
|
||||
let mut repo = repo.into_repo_config(&self.name(), worktree_setup, force_ssh);
|
||||
|
||||
// Namespace is already part of the hashmap key. I'm not too happy
|
||||
// about the data exchange format here.
|
||||
repo.remove_namespace();
|
||||
|
||||
ret.entry(namespace).or_insert(vec![]).push(repo);
|
||||
}
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
||||
fn call<T, U>(
|
||||
uri: &str,
|
||||
auth_header_key: &str,
|
||||
secret_token: &str,
|
||||
accept_header: Option<&str>,
|
||||
) -> Result<T, ApiErrorResponse<U>>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
U: serde::de::DeserializeOwned + JsonError,
|
||||
{
|
||||
let client = isahc::HttpClient::new().map_err(|error| error.to_string())?;
|
||||
|
||||
let request = isahc::Request::builder()
|
||||
.uri(uri)
|
||||
.header("accept", accept_header.unwrap_or("application/json"))
|
||||
.header(
|
||||
"authorization",
|
||||
format!("{} {}", &auth_header_key, &secret_token),
|
||||
)
|
||||
.body(())
|
||||
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.map_err(|error| ApiErrorResponse::String(error.to_string()))?;
|
||||
|
||||
let success = response.status().is_success();
|
||||
|
||||
if !success {
|
||||
let response: U = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing error response: {}", error))?;
|
||||
|
||||
return Err(ApiErrorResponse::Json(response));
|
||||
}
|
||||
|
||||
let response: T = response
|
||||
.json()
|
||||
.map_err(|error| format!("Failed deserializing response: {}", error))?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
1333
src/repo.rs
1333
src/repo.rs
File diff suppressed because it is too large
Load Diff
313
src/table.rs
Normal file
313
src/table.rs
Normal file
@@ -0,0 +1,313 @@
|
||||
use super::config;
|
||||
use super::path;
|
||||
use super::repo;
|
||||
|
||||
use comfy_table::{Cell, Table};
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
fn add_table_header(table: &mut Table) {
|
||||
table
|
||||
.load_preset(comfy_table::presets::UTF8_FULL)
|
||||
.apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS)
|
||||
.set_header(vec![
|
||||
Cell::new("Repo"),
|
||||
Cell::new("Worktree"),
|
||||
Cell::new("Status"),
|
||||
Cell::new("Branches"),
|
||||
Cell::new("HEAD"),
|
||||
Cell::new("Remotes"),
|
||||
]);
|
||||
}
|
||||
|
||||
fn add_repo_status(
|
||||
table: &mut Table,
|
||||
repo_name: &str,
|
||||
repo_handle: &repo::RepoHandle,
|
||||
is_worktree: bool,
|
||||
) -> Result<(), String> {
|
||||
let repo_status = repo_handle.status(is_worktree)?;
|
||||
|
||||
table.add_row(vec![
|
||||
repo_name,
|
||||
match is_worktree {
|
||||
true => "\u{2714}",
|
||||
false => "",
|
||||
},
|
||||
&match is_worktree {
|
||||
true => String::from(""),
|
||||
false => match repo_status.changes {
|
||||
Some(changes) => {
|
||||
let mut out = Vec::new();
|
||||
if changes.files_new > 0 {
|
||||
out.push(format!("New: {}\n", changes.files_new))
|
||||
}
|
||||
if changes.files_modified > 0 {
|
||||
out.push(format!("Modified: {}\n", changes.files_modified))
|
||||
}
|
||||
if changes.files_deleted > 0 {
|
||||
out.push(format!("Deleted: {}\n", changes.files_deleted))
|
||||
}
|
||||
out.into_iter().collect::<String>().trim().to_string()
|
||||
}
|
||||
None => String::from("\u{2714}"),
|
||||
},
|
||||
},
|
||||
repo_status
|
||||
.branches
|
||||
.iter()
|
||||
.map(|(branch_name, remote_branch)| {
|
||||
format!(
|
||||
"branch: {}{}\n",
|
||||
&branch_name,
|
||||
&match remote_branch {
|
||||
None => String::from(" <!local>"),
|
||||
Some((remote_branch_name, remote_tracking_status)) => {
|
||||
format!(
|
||||
" <{}>{}",
|
||||
remote_branch_name,
|
||||
&match remote_tracking_status {
|
||||
repo::RemoteTrackingStatus::UpToDate =>
|
||||
String::from(" \u{2714}"),
|
||||
repo::RemoteTrackingStatus::Ahead(d) => format!(" [+{}]", &d),
|
||||
repo::RemoteTrackingStatus::Behind(d) => format!(" [-{}]", &d),
|
||||
repo::RemoteTrackingStatus::Diverged(d1, d2) =>
|
||||
format!(" [+{}/-{}]", &d1, &d2),
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim(),
|
||||
&match is_worktree {
|
||||
true => String::from(""),
|
||||
false => match repo_status.head {
|
||||
Some(head) => head,
|
||||
None => String::from("Empty"),
|
||||
},
|
||||
},
|
||||
repo_status
|
||||
.remotes
|
||||
.iter()
|
||||
.map(|r| format!("{}\n", r))
|
||||
.collect::<String>()
|
||||
.trim(),
|
||||
]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Don't return table, return a type that implements Display(?)
|
||||
pub fn get_worktree_status_table(
|
||||
repo: &repo::RepoHandle,
|
||||
directory: &Path,
|
||||
) -> Result<(impl std::fmt::Display, Vec<String>), String> {
|
||||
let worktrees = repo.get_worktrees()?;
|
||||
let mut table = Table::new();
|
||||
|
||||
let mut errors = Vec::new();
|
||||
|
||||
add_worktree_table_header(&mut table);
|
||||
for worktree in &worktrees {
|
||||
let worktree_dir = &directory.join(&worktree.name());
|
||||
if worktree_dir.exists() {
|
||||
let repo = match repo::RepoHandle::open(worktree_dir, false) {
|
||||
Ok(repo) => repo,
|
||||
Err(error) => {
|
||||
errors.push(format!(
|
||||
"Failed opening repo of worktree {}: {}",
|
||||
&worktree.name(),
|
||||
&error
|
||||
));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if let Err(error) = add_worktree_status(&mut table, worktree, &repo) {
|
||||
errors.push(error);
|
||||
}
|
||||
} else {
|
||||
errors.push(format!(
|
||||
"Worktree {} does not have a directory",
|
||||
&worktree.name()
|
||||
));
|
||||
}
|
||||
}
|
||||
for worktree in repo::RepoHandle::find_unmanaged_worktrees(repo, directory)? {
|
||||
errors.push(format!(
|
||||
"Found {}, which is not a valid worktree directory!",
|
||||
&worktree
|
||||
));
|
||||
}
|
||||
Ok((table, errors))
|
||||
}
|
||||
|
||||
pub fn get_status_table(config: config::Config) -> Result<(Vec<Table>, Vec<String>), String> {
|
||||
let mut errors = Vec::new();
|
||||
let mut tables = Vec::new();
|
||||
for tree in config.trees()? {
|
||||
let repos = tree.repos.unwrap_or_default();
|
||||
|
||||
let root_path = path::expand_path(Path::new(&tree.root));
|
||||
|
||||
let mut table = Table::new();
|
||||
add_table_header(&mut table);
|
||||
|
||||
for repo in &repos {
|
||||
let repo_path = root_path.join(&repo.name);
|
||||
|
||||
if !repo_path.exists() {
|
||||
errors.push(format!(
|
||||
"{}: Repository does not exist. Run sync?",
|
||||
&repo.name
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
let repo_handle = repo::RepoHandle::open(&repo_path, repo.worktree_setup);
|
||||
|
||||
let repo_handle = match repo_handle {
|
||||
Ok(repo) => repo,
|
||||
Err(error) => {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
errors.push(format!(
|
||||
"{}: No git repository found. Run sync?",
|
||||
&repo.name
|
||||
));
|
||||
} else {
|
||||
errors.push(format!(
|
||||
"{}: Opening repository failed: {}",
|
||||
&repo.name, error
|
||||
));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
add_repo_status(&mut table, &repo.name, &repo_handle, repo.worktree_setup)?;
|
||||
}
|
||||
|
||||
tables.push(table);
|
||||
}
|
||||
|
||||
Ok((tables, errors))
|
||||
}
|
||||
|
||||
fn add_worktree_table_header(table: &mut Table) {
|
||||
table
|
||||
.load_preset(comfy_table::presets::UTF8_FULL)
|
||||
.apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS)
|
||||
.set_header(vec![
|
||||
Cell::new("Worktree"),
|
||||
Cell::new("Status"),
|
||||
Cell::new("Branch"),
|
||||
Cell::new("Remote branch"),
|
||||
]);
|
||||
}
|
||||
|
||||
fn add_worktree_status(
|
||||
table: &mut Table,
|
||||
worktree: &repo::Worktree,
|
||||
repo: &repo::RepoHandle,
|
||||
) -> Result<(), String> {
|
||||
let repo_status = repo.status(false)?;
|
||||
|
||||
let local_branch = repo
|
||||
.head_branch()
|
||||
.map_err(|error| format!("Failed getting head branch: {}", error))?;
|
||||
|
||||
let upstream_output = match local_branch.upstream() {
|
||||
Ok(remote_branch) => {
|
||||
let remote_branch_name = remote_branch
|
||||
.name()
|
||||
.map_err(|error| format!("Failed getting name of remote branch: {}", error))?;
|
||||
|
||||
let (ahead, behind) = repo
|
||||
.graph_ahead_behind(&local_branch, &remote_branch)
|
||||
.map_err(|error| format!("Failed computing branch deviation: {}", error))?;
|
||||
|
||||
format!(
|
||||
"{}{}\n",
|
||||
&remote_branch_name,
|
||||
&match (ahead, behind) {
|
||||
(0, 0) => String::from(""),
|
||||
(d, 0) => format!(" [+{}]", &d),
|
||||
(0, d) => format!(" [-{}]", &d),
|
||||
(d1, d2) => format!(" [+{}/-{}]", &d1, &d2),
|
||||
},
|
||||
)
|
||||
}
|
||||
Err(_) => String::from(""),
|
||||
};
|
||||
|
||||
table.add_row(vec![
|
||||
worktree.name(),
|
||||
&match repo_status.changes {
|
||||
Some(changes) => {
|
||||
let mut out = Vec::new();
|
||||
if changes.files_new > 0 {
|
||||
out.push(format!("New: {}\n", changes.files_new))
|
||||
}
|
||||
if changes.files_modified > 0 {
|
||||
out.push(format!("Modified: {}\n", changes.files_modified))
|
||||
}
|
||||
if changes.files_deleted > 0 {
|
||||
out.push(format!("Deleted: {}\n", changes.files_deleted))
|
||||
}
|
||||
out.into_iter().collect::<String>().trim().to_string()
|
||||
}
|
||||
None => String::from("\u{2714}"),
|
||||
},
|
||||
&local_branch
|
||||
.name()
|
||||
.map_err(|error| format!("Failed getting name of branch: {}", error))?,
|
||||
&upstream_output,
|
||||
]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn show_single_repo_status(
|
||||
path: &Path,
|
||||
) -> Result<(impl std::fmt::Display, Vec<String>), String> {
|
||||
let mut table = Table::new();
|
||||
let mut warnings = Vec::new();
|
||||
|
||||
let is_worktree = repo::RepoHandle::detect_worktree(path);
|
||||
add_table_header(&mut table);
|
||||
|
||||
let repo_handle = repo::RepoHandle::open(path, is_worktree);
|
||||
|
||||
if let Err(error) = repo_handle {
|
||||
if error.kind == repo::RepoErrorKind::NotFound {
|
||||
return Err(String::from("Directory is not a git directory"));
|
||||
} else {
|
||||
return Err(format!("Opening repository failed: {}", error));
|
||||
}
|
||||
};
|
||||
|
||||
let repo_name = match path.file_name() {
|
||||
None => {
|
||||
warnings.push(format!(
|
||||
"Cannot detect repo name for path {}. Are you working in /?",
|
||||
&path.display()
|
||||
));
|
||||
String::from("unknown")
|
||||
}
|
||||
Some(file_name) => match file_name.to_str() {
|
||||
None => {
|
||||
warnings.push(format!(
|
||||
"Name of repo directory {} is not valid UTF-8",
|
||||
&path.display()
|
||||
));
|
||||
String::from("invalid")
|
||||
}
|
||||
Some(name) => name.to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
add_repo_status(&mut table, &repo_name, &repo_handle.unwrap(), is_worktree)?;
|
||||
|
||||
Ok((table, warnings))
|
||||
}
|
||||
268
src/tree.rs
Normal file
268
src/tree.rs
Normal file
@@ -0,0 +1,268 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use super::config;
|
||||
use super::output::*;
|
||||
use super::path;
|
||||
use super::repo;
|
||||
use super::worktree;
|
||||
|
||||
pub struct Tree {
|
||||
pub root: String,
|
||||
pub repos: Vec<repo::Repo>,
|
||||
}
|
||||
|
||||
pub fn find_unmanaged_repos(
|
||||
root_path: &Path,
|
||||
managed_repos: &[repo::Repo],
|
||||
) -> Result<Vec<PathBuf>, String> {
|
||||
let mut unmanaged_repos = Vec::new();
|
||||
|
||||
for repo_path in find_repo_paths(root_path)? {
|
||||
if !managed_repos
|
||||
.iter()
|
||||
.any(|r| Path::new(root_path).join(r.fullname()) == repo_path)
|
||||
{
|
||||
unmanaged_repos.push(repo_path);
|
||||
}
|
||||
}
|
||||
Ok(unmanaged_repos)
|
||||
}
|
||||
|
||||
pub fn sync_trees(config: config::Config, init_worktree: bool) -> Result<bool, String> {
|
||||
let mut failures = false;
|
||||
|
||||
let mut unmanaged_repos_absolute_paths = vec![];
|
||||
let mut managed_repos_absolute_paths = vec![];
|
||||
|
||||
let trees = config.trees()?;
|
||||
|
||||
for tree in trees {
|
||||
let repos: Vec<repo::Repo> = tree
|
||||
.repos
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|repo| repo.into_repo())
|
||||
.collect();
|
||||
|
||||
let root_path = path::expand_path(Path::new(&tree.root));
|
||||
|
||||
for repo in &repos {
|
||||
managed_repos_absolute_paths.push(root_path.join(repo.fullname()));
|
||||
match sync_repo(&root_path, repo, init_worktree) {
|
||||
Ok(_) => print_repo_success(&repo.name, "OK"),
|
||||
Err(error) => {
|
||||
print_repo_error(&repo.name, &error);
|
||||
failures = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match find_unmanaged_repos(&root_path, &repos) {
|
||||
Ok(repos) => {
|
||||
unmanaged_repos_absolute_paths.extend(repos);
|
||||
}
|
||||
Err(error) => {
|
||||
print_error(&format!("Error getting unmanaged repos: {}", error));
|
||||
failures = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for unmanaged_repo_absolute_path in &unmanaged_repos_absolute_paths {
|
||||
if managed_repos_absolute_paths
|
||||
.iter()
|
||||
.any(|managed_repo_absolute_path| {
|
||||
managed_repo_absolute_path == unmanaged_repo_absolute_path
|
||||
})
|
||||
{
|
||||
continue;
|
||||
}
|
||||
print_warning(&format!(
|
||||
"Found unmanaged repository: \"{}\"",
|
||||
path::path_as_string(unmanaged_repo_absolute_path)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(!failures)
|
||||
}
|
||||
|
||||
/// Finds repositories recursively, returning their path
|
||||
pub fn find_repo_paths(path: &Path) -> Result<Vec<PathBuf>, String> {
|
||||
let mut repos = Vec::new();
|
||||
|
||||
let git_dir = path.join(".git");
|
||||
let git_worktree = path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY);
|
||||
|
||||
if git_dir.exists() || git_worktree.exists() {
|
||||
repos.push(path.to_path_buf());
|
||||
} else {
|
||||
match fs::read_dir(path) {
|
||||
Ok(contents) => {
|
||||
for content in contents {
|
||||
match content {
|
||||
Ok(entry) => {
|
||||
let path = entry.path();
|
||||
if path.is_symlink() {
|
||||
continue;
|
||||
}
|
||||
if path.is_dir() {
|
||||
match find_repo_paths(&path) {
|
||||
Ok(ref mut r) => repos.append(r),
|
||||
Err(error) => return Err(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Error accessing directory: {}", e));
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Failed to open \"{}\": {}",
|
||||
&path.display(),
|
||||
match e.kind() {
|
||||
std::io::ErrorKind::NotADirectory =>
|
||||
String::from("directory expected, but path is not a directory"),
|
||||
std::io::ErrorKind::NotFound => String::from("not found"),
|
||||
_ => format!("{:?}", e.kind()),
|
||||
}
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(repos)
|
||||
}
|
||||
|
||||
fn sync_repo(root_path: &Path, repo: &repo::Repo, init_worktree: bool) -> Result<(), String> {
|
||||
let repo_path = root_path.join(&repo.fullname());
|
||||
let actual_git_directory = get_actual_git_directory(&repo_path, repo.worktree_setup);
|
||||
|
||||
let mut newly_created = false;
|
||||
|
||||
if repo_path.exists() {
|
||||
if repo.worktree_setup && !actual_git_directory.exists() {
|
||||
return Err(String::from(
|
||||
"Repo already exists, but is not using a worktree setup",
|
||||
));
|
||||
};
|
||||
} else if matches!(&repo.remotes, None) || repo.remotes.as_ref().unwrap().is_empty() {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
"Repository does not have remotes configured, initializing new",
|
||||
);
|
||||
match repo::RepoHandle::init(&repo_path, repo.worktree_setup) {
|
||||
Ok(r) => {
|
||||
print_repo_success(&repo.name, "Repository created");
|
||||
Some(r)
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Repository failed during init: {}", e));
|
||||
}
|
||||
};
|
||||
} else {
|
||||
let first = repo.remotes.as_ref().unwrap().first().unwrap();
|
||||
|
||||
match repo::clone_repo(first, &repo_path, repo.worktree_setup) {
|
||||
Ok(_) => {
|
||||
print_repo_success(&repo.name, "Repository successfully cloned");
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Repository failed during clone: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
newly_created = true;
|
||||
}
|
||||
|
||||
let repo_handle = match repo::RepoHandle::open(&repo_path, repo.worktree_setup) {
|
||||
Ok(repo) => repo,
|
||||
Err(error) => {
|
||||
if !repo.worktree_setup && repo::RepoHandle::open(&repo_path, true).is_ok() {
|
||||
return Err(String::from(
|
||||
"Repo already exists, but is using a worktree setup",
|
||||
));
|
||||
} else {
|
||||
return Err(format!("Opening repository failed: {}", error));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if newly_created && repo.worktree_setup && init_worktree {
|
||||
match repo_handle.default_branch() {
|
||||
Ok(branch) => {
|
||||
worktree::add_worktree(&repo_path, &branch.name()?, None, None, false)?;
|
||||
}
|
||||
Err(_error) => print_repo_error(
|
||||
&repo.name,
|
||||
"Could not determine default branch, skipping worktree initializtion",
|
||||
),
|
||||
}
|
||||
}
|
||||
if let Some(remotes) = &repo.remotes {
|
||||
let current_remotes: Vec<String> = repo_handle
|
||||
.remotes()
|
||||
.map_err(|error| format!("Repository failed during getting the remotes: {}", error))?;
|
||||
|
||||
for remote in remotes {
|
||||
let current_remote = repo_handle.find_remote(&remote.name)?;
|
||||
|
||||
match current_remote {
|
||||
Some(current_remote) => {
|
||||
let current_url = current_remote.url();
|
||||
|
||||
if remote.url != current_url {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
&format!("Updating remote {} to \"{}\"", &remote.name, &remote.url),
|
||||
);
|
||||
if let Err(e) = repo_handle.remote_set_url(&remote.name, &remote.url) {
|
||||
return Err(format!("Repository failed during setting of the remote URL for remote \"{}\": {}", &remote.name, e));
|
||||
};
|
||||
}
|
||||
}
|
||||
None => {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
&format!(
|
||||
"Setting up new remote \"{}\" to \"{}\"",
|
||||
&remote.name, &remote.url
|
||||
),
|
||||
);
|
||||
if let Err(e) = repo_handle.new_remote(&remote.name, &remote.url) {
|
||||
return Err(format!(
|
||||
"Repository failed during setting the remotes: {}",
|
||||
e
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for current_remote in ¤t_remotes {
|
||||
if !remotes.iter().any(|r| &r.name == current_remote) {
|
||||
print_repo_action(
|
||||
&repo.name,
|
||||
&format!("Deleting remote \"{}\"", ¤t_remote,),
|
||||
);
|
||||
if let Err(e) = repo_handle.remote_delete(current_remote) {
|
||||
return Err(format!(
|
||||
"Repository failed during deleting remote \"{}\": {}",
|
||||
¤t_remote, e
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_actual_git_directory(path: &Path, is_worktree: bool) -> PathBuf {
|
||||
match is_worktree {
|
||||
false => path.to_path_buf(),
|
||||
true => path.join(worktree::GIT_MAIN_WORKTREE_DIRECTORY),
|
||||
}
|
||||
}
|
||||
166
src/worktree.rs
Normal file
166
src/worktree.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use std::path::Path;
|
||||
|
||||
use super::repo;
|
||||
|
||||
pub const GIT_MAIN_WORKTREE_DIRECTORY: &str = ".git-main-working-tree";
|
||||
|
||||
pub fn add_worktree(
|
||||
directory: &Path,
|
||||
name: &str,
|
||||
subdirectory: Option<&Path>,
|
||||
track: Option<(&str, &str)>,
|
||||
no_track: bool,
|
||||
) -> Result<(), String> {
|
||||
let repo = repo::RepoHandle::open(directory, true).map_err(|error| match error.kind {
|
||||
repo::RepoErrorKind::NotFound => {
|
||||
String::from("Current directory does not contain a worktree setup")
|
||||
}
|
||||
_ => format!("Error opening repo: {}", error),
|
||||
})?;
|
||||
|
||||
let config = repo::read_worktree_root_config(directory)?;
|
||||
|
||||
if repo.find_worktree(name).is_ok() {
|
||||
return Err(format!("Worktree {} already exists", &name));
|
||||
}
|
||||
|
||||
let path = match subdirectory {
|
||||
Some(dir) => directory.join(dir).join(name),
|
||||
None => directory.join(Path::new(name)),
|
||||
};
|
||||
|
||||
let mut remote_branch_exists = false;
|
||||
|
||||
let default_checkout = || repo.default_branch()?.to_commit();
|
||||
|
||||
let checkout_commit;
|
||||
if no_track {
|
||||
checkout_commit = default_checkout()?;
|
||||
} else {
|
||||
match track {
|
||||
Some((remote_name, remote_branch_name)) => {
|
||||
let remote_branch = repo.find_remote_branch(remote_name, remote_branch_name);
|
||||
match remote_branch {
|
||||
Ok(branch) => {
|
||||
remote_branch_exists = true;
|
||||
checkout_commit = branch.to_commit()?;
|
||||
}
|
||||
Err(_) => {
|
||||
remote_branch_exists = false;
|
||||
checkout_commit = default_checkout()?;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => match &config {
|
||||
None => checkout_commit = default_checkout()?,
|
||||
Some(config) => match &config.track {
|
||||
None => checkout_commit = default_checkout()?,
|
||||
Some(track_config) => {
|
||||
if track_config.default {
|
||||
let remote_branch =
|
||||
repo.find_remote_branch(&track_config.default_remote, name);
|
||||
match remote_branch {
|
||||
Ok(branch) => {
|
||||
remote_branch_exists = true;
|
||||
checkout_commit = branch.to_commit()?;
|
||||
}
|
||||
Err(_) => {
|
||||
checkout_commit = default_checkout()?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
checkout_commit = default_checkout()?;
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
let mut target_branch = match repo.find_local_branch(name) {
|
||||
Ok(branchref) => branchref,
|
||||
Err(_) => repo.create_branch(name, &checkout_commit)?,
|
||||
};
|
||||
|
||||
fn push(
|
||||
remote: &mut repo::RemoteHandle,
|
||||
branch_name: &str,
|
||||
remote_branch_name: &str,
|
||||
repo: &repo::RepoHandle,
|
||||
) -> Result<(), String> {
|
||||
if !remote.is_pushable()? {
|
||||
return Err(format!(
|
||||
"Cannot push to non-pushable remote {}",
|
||||
remote.url()
|
||||
));
|
||||
}
|
||||
remote.push(branch_name, remote_branch_name, repo)
|
||||
}
|
||||
|
||||
if !no_track {
|
||||
if let Some((remote_name, remote_branch_name)) = track {
|
||||
if remote_branch_exists {
|
||||
target_branch.set_upstream(remote_name, remote_branch_name)?;
|
||||
} else {
|
||||
let mut remote = repo
|
||||
.find_remote(remote_name)
|
||||
.map_err(|error| format!("Error getting remote {}: {}", remote_name, error))?
|
||||
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
|
||||
|
||||
push(
|
||||
&mut remote,
|
||||
&target_branch.name()?,
|
||||
remote_branch_name,
|
||||
&repo,
|
||||
)?;
|
||||
|
||||
target_branch.set_upstream(remote_name, remote_branch_name)?;
|
||||
}
|
||||
} else if let Some(config) = config {
|
||||
if let Some(track_config) = config.track {
|
||||
if track_config.default {
|
||||
let remote_name = track_config.default_remote;
|
||||
if remote_branch_exists {
|
||||
target_branch.set_upstream(&remote_name, name)?;
|
||||
} else {
|
||||
let remote_branch_name = match track_config.default_remote_prefix {
|
||||
Some(prefix) => {
|
||||
format!("{}{}{}", &prefix, super::BRANCH_NAMESPACE_SEPARATOR, &name)
|
||||
}
|
||||
None => name.to_string(),
|
||||
};
|
||||
|
||||
let mut remote = repo
|
||||
.find_remote(&remote_name)
|
||||
.map_err(|error| {
|
||||
format!("Error getting remote {}: {}", remote_name, error)
|
||||
})?
|
||||
.ok_or_else(|| format!("Remote {} not found", remote_name))?;
|
||||
|
||||
if !remote.is_pushable()? {
|
||||
return Err(format!(
|
||||
"Cannot push to non-pushable remote {}",
|
||||
remote.url()
|
||||
));
|
||||
}
|
||||
push(
|
||||
&mut remote,
|
||||
&target_branch.name()?,
|
||||
&remote_branch_name,
|
||||
&repo,
|
||||
)?;
|
||||
|
||||
target_branch.set_upstream(&remote_name, &remote_branch_name)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(subdirectory) = subdirectory {
|
||||
std::fs::create_dir_all(subdirectory).map_err(|error| error.to_string())?;
|
||||
}
|
||||
repo.new_worktree(name, &path, &target_branch)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -8,13 +8,13 @@ use helpers::*;
|
||||
fn open_empty_repo() {
|
||||
let tmpdir = init_tmpdir();
|
||||
assert!(matches!(
|
||||
open_repo(tmpdir.path(), true),
|
||||
RepoHandle::open(tmpdir.path(), true),
|
||||
Err(RepoError {
|
||||
kind: RepoErrorKind::NotFound
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
open_repo(tmpdir.path(), false),
|
||||
RepoHandle::open(tmpdir.path(), false),
|
||||
Err(RepoError {
|
||||
kind: RepoErrorKind::NotFound
|
||||
})
|
||||
@@ -25,7 +25,7 @@ fn open_empty_repo() {
|
||||
#[test]
|
||||
fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let tmpdir = init_tmpdir();
|
||||
let repo = init_repo(tmpdir.path(), false)?;
|
||||
let repo = RepoHandle::init(tmpdir.path(), false)?;
|
||||
assert!(!repo.is_bare());
|
||||
assert!(repo.is_empty()?);
|
||||
cleanup_tmpdir(tmpdir);
|
||||
@@ -35,7 +35,7 @@ fn create_repo() -> Result<(), Box<dyn std::error::Error>> {
|
||||
#[test]
|
||||
fn create_repo_with_worktree() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let tmpdir = init_tmpdir();
|
||||
let repo = init_repo(tmpdir.path(), true)?;
|
||||
let repo = RepoHandle::init(tmpdir.path(), true)?;
|
||||
assert!(repo.is_bare());
|
||||
assert!(repo.is_empty()?);
|
||||
cleanup_tmpdir(tmpdir);
|
||||
|
||||
Reference in New Issue
Block a user