1
0
Fork 0
dotfiles/common/.local/bin/clonedev

270 lines
8.3 KiB
Plaintext
Raw Normal View History

2024-02-14 10:35:47 +01:00
#!/bin/perl
2024-02-15 01:08:31 +01:00
2024-02-14 10:35:47 +01:00
use strict;
use warnings;
use feature ("signatures");
use JSON;
2024-02-14 10:35:47 +01:00
use WWW::Curl::Easy;
2024-02-26 23:29:42 +01:00
use File::Path("make_path");
2024-02-27 00:16:43 +01:00
use Data::Dumper;
$Data::Dumper::Pair = " : ";
$Data::Dumper::Indent = 2;
2024-02-26 23:29:42 +01:00
use YAML;
$YAML::Preserve = 1;
2024-02-26 23:29:42 +01:00
# TODO: Show hook/clone output in a prettier fashion (like docker buildx)
# TODO: Allow branch selection
2024-02-28 10:29:08 +01:00
# TODO: Allow updating a single repo
# TODO: Add flags to allow checking for unclean trees, only cloning, only pulling
# TODO: Check if directories are empty before cloning any repos to allow cloning in any order
2024-02-14 10:35:47 +01:00
2024-02-27 13:39:46 +01:00
use constant USERAGENT => "User-Agent: MarxBot/4.2.0 (A script reading some information about repos)";
use constant URL_REGEX =>
qr/^((.*(?:@|:\/\/))[a-zA-Z0-9-_.]+(?:\/|:)(?:[a-zA-Z0-9-_.\/]+\/)?([a-zA-Z0-9-_.]+?)\/([a-zA-Z0-9-_.]+?)(?:\.git)?\/?)$/;
2024-02-14 10:35:47 +01:00
2024-02-15 01:08:31 +01:00
my @handles;
2024-02-26 23:29:42 +01:00
my @messages;
2024-02-15 01:08:31 +01:00
my %conf;
my $active_repos = 0;
2024-02-14 10:35:47 +01:00
my $active_requests = 0;
2024-02-26 23:29:42 +01:00
sub info($message) {
print(`tput setaf 4; tput bold; echo '$message'; tput sgr 0`);
}
2024-02-26 23:29:42 +01:00
sub error($message) {
push(@messages, `tput setaf 1; tput bold; echo '$message'; tput sgr 0`);
2024-02-26 23:29:42 +01:00
}
2024-02-26 23:29:42 +01:00
sub warning($message) {
push(@messages, `tput setaf 3; tput bold; echo '$message'; tput sgr 0`);
2024-02-26 23:29:42 +01:00
}
sub set_curl($handle, $url, @headers) {
2024-02-15 01:08:31 +01:00
$handles[$handle]{curl} = WWW::Curl::Easy->new;
$handles[$handle]{curl}->setopt(CURLOPT_URL, $url);
$handles[$handle]{curl}->pushopt(CURLOPT_HTTPHEADER, [USERAGENT]);
2024-02-14 10:35:47 +01:00
for my $header (@headers) {
$handles[$handle]{curl}->pushopt(CURLOPT_HTTPHEADER, [$header]);
2024-02-14 10:35:47 +01:00
}
$handles[$handle]{curl}->setopt(CURLOPT_PRIVATE, $handle);
$handles[$handle]{curl}->setopt(CURLOPT_WRITEDATA, \$handles[$handle]{memory});
2024-02-14 10:35:47 +01:00
}
sub add_callback($handle, $callback) {
push(@{$handles[$handle]{callbacks}}, $callback);
2024-02-14 10:35:47 +01:00
}
sub exec_curl($handle) {
my $curl = $handles[$handle]{curl};
$curl->perform;
my $status = $curl->getinfo(CURLINFO_HTTP_CODE);
if ($status < 200 || $status > 300) {
my $url = $curl->getinfo(CURLOPT_URL);
error("Curl on $url failed with code $status");
2024-02-14 10:35:47 +01:00
}
}
2024-02-14 10:35:47 +01:00
sub json_decode($handle) {
$handles[$handle]{memory} = JSON::decode_json($handles[$handle]{memory});
2024-02-14 10:35:47 +01:00
}
2024-02-26 23:29:42 +01:00
sub url_filter($handle) {
2024-02-15 01:08:31 +01:00
my @tmp;
my $lookup = $conf{lookups}[$handles[$handle]->{lookup}];
for my $repo (@{$handles[$handle]{memory}}) {
if ($repo->{$lookup->{url_field}}) {
push(@tmp, $repo->{$lookup->{url_field}});
2024-02-27 13:39:46 +01:00
} else {
error("Failed to extract $lookup->{url_field} while processing lookup: $lookup->{name}");
2024-02-15 01:08:31 +01:00
}
}
$handles[$handle]{memory} = \@tmp;
2024-02-14 10:35:47 +01:00
}
2024-02-26 23:29:42 +01:00
sub inject_conf_urls($handle) {
for my $url (@{$conf{extra_urls}}) {
push(@{$handles[$handle]{memory}}, $url);
2024-02-26 23:29:42 +01:00
}
}
2024-02-15 01:08:31 +01:00
sub process_urls($handle) {
my @tmp;
for my $url (@{$handles[$handle]{memory}}) {
2024-02-15 01:08:31 +01:00
my %repo;
if ($url =~ URL_REGEX) {
2024-02-26 23:29:42 +01:00
$repo{url} = $1;
$repo{owner} = $3;
$repo{name} = $4;
$repo{fullname} = "$3/$4";
if (substr($2, -1) eq "@") {
2024-02-26 23:29:42 +01:00
$repo{protocol} = "ssh";
2024-02-27 13:39:46 +01:00
} else {
2024-02-26 23:29:42 +01:00
$repo{protocol} = $2;
}
2024-02-27 13:39:46 +01:00
} else {
2024-02-26 23:29:42 +01:00
error("Failed to parse url: $url");
next;
2024-02-15 01:08:31 +01:00
}
2024-02-26 23:29:42 +01:00
next if (grep($_ eq $repo{fullname}, @{$conf{skip_repos}}));
2024-02-27 00:16:43 +01:00
2024-02-15 01:08:31 +01:00
my $path;
my $repodir = $repo{name};
for my $pt (@{$conf{path_transformations}}) {
if ($repo{fullname} =~ qr/$pt->{match}/) {
$repodir =~ s/$pt->{replace}/$pt->{with}/;
}
}
for my $directory (@{$conf{directories}}) {
for my $regex (@{$directory->{repos}}) {
if (
$repo{fullname} =~ qr/^$regex$/
&& (!$directory->{lookups}
|| grep(qr/$conf{lookups}[ $handles[$handle]{lookup} ]{name}/, @{$directory->{lookups}}))
) {
$repo{path} = `printf $directory->{path}/$repodir`;
}
2024-02-15 01:08:31 +01:00
}
}
2024-02-26 23:29:42 +01:00
if (!$repo{path} && !$conf{lookups}[$handles[$handle]{lookup}]{block_unsorted}) {
$repo{path} = `printf $conf{unsorted_directory}/$repodir`;
} elsif ($repo{path}) {
2024-02-27 13:39:46 +01:00
} else {
next;
2024-02-26 23:29:42 +01:00
}
my $clone_hook = "$conf{hook_dir}/clone/$repo{owner}:$repo{name}";
my $pull_hook = "$conf{hook_dir}/pull/$repo{owner}:$repo{name}";
(-x $clone_hook) and $repo{clone_hook} = "cd $repo{path} && $clone_hook";
(-x $pull_hook) and $repo{pull_hook} = "cd $repo{path} && $pull_hook";
push(@tmp, \%repo);
2024-02-14 10:35:47 +01:00
}
2024-02-15 01:08:31 +01:00
$handles[$handle]{memory} = \@tmp;
2024-02-14 10:35:47 +01:00
}
sub dump_mem($handle) {
print("------ Handle $handle ------\n");
print Dumper($handles[$handle]->{memory});
}
2024-02-14 10:35:47 +01:00
2024-02-26 23:29:42 +01:00
sub folder_is_empty($directory) {
opendir(my $dh, $directory) or return 1;
return scalar(grep($_ ne "." && $_ ne "..", readdir($dh))) == 0;
2024-02-14 10:35:47 +01:00
}
2024-02-26 23:29:42 +01:00
sub handle_repos($handle) {
for my $repo (@{$handles[$handle]->{memory}}) {
if (folder_is_empty("$repo->{path}")) {
make_path($repo->{path});
2024-02-26 23:29:42 +01:00
info("Cloning $repo->{fullname}");
`git -C '$repo->{path}' clone $conf{clone_flags} '$repo->{url}' .`;
($? != 0) and error("Failed to clone $repo->{url} to $repo->{path}");
if ($repo->{clone_hook}) {
2024-02-26 23:29:42 +01:00
info("Running clone hook for $repo->{fullname}");
`$repo->{clone_hook}`;
}
($? != 0) and error("Failed to execute clone hook for $repo->{fullname}");
} elsif (!folder_is_empty("$repo->{path}/.git")) {
2024-02-26 23:29:42 +01:00
info("Pulling $repo->{fullname} to $repo->{path}");
if (`git -C $repo->{path} status -z`) {
warning("$repo->{path} has an unclean tree.");
2024-02-27 13:39:46 +01:00
} else {
2024-02-26 23:29:42 +01:00
`git -C $repo->{path} pull $conf{pull_flags}`;
}
($? != 0) and error("Failed to pull $repo->{url} to $repo->{path}");
if ($repo->{pull_hook}) {
2024-02-26 23:29:42 +01:00
info("Running pull hook for $repo->{fullname}");
`$repo->{pull_hook}`;
}
($? != 0) and error("Failed to execute pull hook for $repo->{fullname}");
2024-02-26 23:29:42 +01:00
}
}
}
2024-02-14 10:35:47 +01:00
2024-02-15 01:08:31 +01:00
sub read_conf() {
2024-02-14 10:35:47 +01:00
my $configdir;
if ($ENV{XDG_CONFIG_HOME}) {
2024-02-14 10:35:47 +01:00
$configdir = "$ENV{XDG_CONFIG_HOME}/clonedev";
2024-02-27 13:39:46 +01:00
} else {
2024-02-14 10:35:47 +01:00
$configdir = "$ENV{HOME}/.config/clonedev";
}
open(my $cfg, '<', $configdir . "/config.yml") or die;
my $hashref = YAML::Load(do {local $/; <$cfg>});
2024-02-14 10:35:47 +01:00
close($cfg);
if (!$hashref->{hook_dir}) {
2024-02-26 23:29:42 +01:00
$hashref->{hook_dir} = "$configdir/hooks";
}
2024-02-15 01:08:31 +01:00
%conf = %$hashref;
2024-02-14 10:35:47 +01:00
}
sub full_pipeline($handle, $url, @headers) {
set_curl($handle, $url, @headers);
exec_curl($handle);
json_decode($handle);
url_filter($handle);
process_urls($handle);
handle_repos($handle);
2024-02-27 01:49:55 +01:00
}
2024-02-14 10:35:47 +01:00
# ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
# ░ ░░░░ ░░░ ░░░ ░░ ░░░ ░
# ▒ ▒▒ ▒▒ ▒▒▒▒ ▒▒▒▒▒ ▒▒▒▒▒ ▒▒ ▒
# ▓ ▓▓ ▓▓▓▓ ▓▓▓▓▓ ▓▓▓▓▓ ▓ ▓ ▓
# █ █ █ ██ █████ █████ ██ █
# █ ████ ██ ████ ██ ██ ███ █
# ████████████████████████████████████████
2024-02-15 01:08:31 +01:00
read_conf();
2024-02-14 10:35:47 +01:00
2024-02-15 01:08:31 +01:00
my $last_handle = 0;
2024-02-26 23:29:42 +01:00
for my $i (keys @{$conf{lookups}}) {
my %lookup = %{$conf{lookups}[$i]};
chomp($ENV{TOKEN} = $lookup{token_cmd} ? `$lookup{token_cmd}` : "");
for (@{$lookup{extra_headers}}) {
2024-02-15 01:08:31 +01:00
$_ = `printf "$_"`;
}
if ($lookup{targets}) {
for my $j (keys @{$lookup{targets}}) {
2024-02-27 01:49:55 +01:00
$last_handle++;
$handles[$last_handle]{lookup} = $i;
full_pipeline($last_handle, "$lookup{api_url}/$lookup{targets}[$j]/$lookup{endpoint}", @{$lookup{extra_headers}});
2024-02-27 01:49:55 +01:00
}
} else {
2024-02-15 01:08:31 +01:00
$last_handle++;
2024-02-26 23:29:42 +01:00
$handles[$last_handle]{lookup} = $i;
full_pipeline($last_handle, "$lookup{api_url}/$lookup{endpoint}", @{$lookup{extra_headers}});
2024-02-15 01:08:31 +01:00
}
}
2024-02-14 10:35:47 +01:00
2024-02-26 23:29:42 +01:00
$last_handle++;
inject_conf_urls($last_handle);
process_urls($last_handle);
handle_repos($last_handle);
2024-02-26 23:29:42 +01:00
for my $message (@messages) {
print($message);
}