2024-02-14 10:35:47 +01:00
|
|
|
#!/bin/perl
|
2024-02-15 01:08:31 +01:00
|
|
|
|
2024-02-14 10:35:47 +01:00
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
use feature ("signatures");
|
|
|
|
use YAML();
|
|
|
|
use JSON();
|
|
|
|
use WWW::Curl::Easy;
|
|
|
|
use WWW::Curl::Multi;
|
2024-02-26 23:29:42 +01:00
|
|
|
use File::Path("make_path");
|
|
|
|
|
2024-02-27 00:16:43 +01:00
|
|
|
use Data::Dumper;
|
|
|
|
$Data::Dumper::Pair = " : ";
|
|
|
|
$Data::Dumper::Indent = 2;
|
2024-02-26 23:29:42 +01:00
|
|
|
|
|
|
|
# TODO: Async the git clones
|
|
|
|
# TODO: Show hook/clone output in a prettier fashion (like docker buildx)
|
2024-02-14 10:35:47 +01:00
|
|
|
|
|
|
|
use constant USERAGENT =>
|
|
|
|
"User-Agent: MarxBot/4.2.0 (A script reading some information about repos)";
|
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
my @handles;
|
2024-02-26 23:29:42 +01:00
|
|
|
my @messages;
|
2024-02-15 01:08:31 +01:00
|
|
|
my %conf;
|
|
|
|
my $active_repos = 0;
|
2024-02-14 10:35:47 +01:00
|
|
|
my $active_requests = 0;
|
|
|
|
|
2024-02-26 23:29:42 +01:00
|
|
|
sub info($message) {
|
|
|
|
print(`tput setaf 4; tput bold; echo '$message'; tput sgr 0`);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub error($message) {
|
|
|
|
push( @messages, `tput setaf 1; tput bold; echo '$message'; tput sgr 0` );
|
|
|
|
}
|
|
|
|
|
|
|
|
sub warning($message) {
|
|
|
|
push( @messages, `tput setaf 3; tput bold; echo '$message'; tput sgr 0` );
|
|
|
|
}
|
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
sub set_curl( $handle, $url, @headers ) {
|
|
|
|
$handles[$handle]{curl} = WWW::Curl::Easy->new;
|
|
|
|
$handles[$handle]{curl}->setopt( CURLOPT_URL, $url );
|
|
|
|
$handles[$handle]{curl}->pushopt( CURLOPT_HTTPHEADER, [USERAGENT] );
|
2024-02-14 10:35:47 +01:00
|
|
|
for my $header (@headers) {
|
2024-02-15 01:08:31 +01:00
|
|
|
$handles[$handle]{curl}->pushopt( CURLOPT_HTTPHEADER, [$header] );
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
2024-02-15 01:08:31 +01:00
|
|
|
$handles[$handle]{curl}->setopt( CURLOPT_PRIVATE, $handle );
|
|
|
|
$handles[$handle]{curl}
|
|
|
|
->setopt( CURLOPT_WRITEDATA, \$handles[$handle]{memory} );
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
sub add_callback( $handle, $callback ) {
|
|
|
|
push( @{ $handles[$handle]{callbacks} }, $callback );
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
|
2024-02-26 23:29:42 +01:00
|
|
|
sub exec_callbacks($handle) {
|
|
|
|
if ( $handles[$handle]{callbacks} ) {
|
|
|
|
for my $callback ( @{ $handles[$handle]->{callbacks} } ) {
|
|
|
|
$callback->($handle);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-14 10:35:47 +01:00
|
|
|
sub exec_multicurl() {
|
|
|
|
my $curlm = WWW::Curl::Multi->new;
|
2024-02-15 01:08:31 +01:00
|
|
|
for my $handle (@handles) {
|
|
|
|
if ($handle) {
|
|
|
|
$curlm->add_handle( $handle->{curl} );
|
|
|
|
$active_requests++;
|
|
|
|
}
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
while ($active_requests) {
|
|
|
|
my $active_transfers = $curlm->perform;
|
|
|
|
if ( $active_transfers != $active_requests ) {
|
|
|
|
while ( my ( $handle, $return_value ) = $curlm->info_read ) {
|
|
|
|
if ($handle) {
|
|
|
|
$active_requests--;
|
2024-02-26 23:29:42 +01:00
|
|
|
exec_callbacks($handle);
|
2024-02-15 01:08:31 +01:00
|
|
|
delete $handles[$handle];
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub json_decode($handle) {
|
2024-02-15 01:08:31 +01:00
|
|
|
$handles[$handle]{memory} = JSON::decode_json( $handles[$handle]{memory} );
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
|
2024-02-26 23:29:42 +01:00
|
|
|
sub url_filter($handle) {
|
2024-02-15 01:08:31 +01:00
|
|
|
my @tmp;
|
2024-02-26 23:29:42 +01:00
|
|
|
my $lookup = $conf{lookups}[ $handles[$handle]->{lookup} ];
|
|
|
|
for my $repo ( @{ $handles[$handle]{memory} } ) {
|
2024-02-27 00:16:43 +01:00
|
|
|
if ( $repo->{ $lookup->{url_field} } ) {
|
|
|
|
push( @tmp, $repo->{ $lookup->{url_field} } );
|
2024-02-26 23:29:42 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
error(
|
2024-02-27 00:16:43 +01:00
|
|
|
"Failed to extract $lookup->{url_field} while processing lookup: $lookup->{name}"
|
2024-02-26 23:29:42 +01:00
|
|
|
);
|
2024-02-15 01:08:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
$handles[$handle]{memory} = \@tmp;
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
|
2024-02-26 23:29:42 +01:00
|
|
|
sub inject_conf_urls($handle) {
|
|
|
|
for my $url ( @{ $conf{extra_urls} } ) {
|
|
|
|
push( @{ $handles[$handle]{memory} }, $url );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
sub process_urls($handle) {
|
|
|
|
my @tmp;
|
2024-02-26 23:29:42 +01:00
|
|
|
for my $url ( @{ $handles[$handle]{memory} } ) {
|
2024-02-15 01:08:31 +01:00
|
|
|
my %repo;
|
|
|
|
if ( $url =~
|
2024-02-26 23:29:42 +01:00
|
|
|
/^((.*(?:@|:\/\/))[a-zA-Z0-9-_.]+(?:\/|:)(?:[a-zA-Z0-9-_.\/]+\/)?([a-zA-Z0-9-_.]+?)\/([a-zA-Z0-9-_.]+?)(?:\.git)?\/?)$/
|
|
|
|
)
|
2024-02-15 01:08:31 +01:00
|
|
|
{
|
2024-02-26 23:29:42 +01:00
|
|
|
$repo{url} = $1;
|
|
|
|
$repo{owner} = $3;
|
|
|
|
$repo{name} = $4;
|
|
|
|
$repo{fullname} = "$3/$4";
|
|
|
|
|
|
|
|
if ( substr( $2, -1 ) eq "@" ) {
|
|
|
|
$repo{protocol} = "ssh";
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$repo{protocol} = $2;
|
|
|
|
}
|
2024-02-15 01:08:31 +01:00
|
|
|
}
|
|
|
|
else {
|
2024-02-26 23:29:42 +01:00
|
|
|
error("Failed to parse url: $url");
|
|
|
|
next;
|
2024-02-15 01:08:31 +01:00
|
|
|
}
|
2024-02-26 23:29:42 +01:00
|
|
|
|
2024-02-27 00:16:43 +01:00
|
|
|
next if (grep({ $_ eq $repo{fullname} } @{$conf{skip_repos}}));
|
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
my $path;
|
2024-02-26 23:29:42 +01:00
|
|
|
for my $directory ( @{ $conf{directories} } ) {
|
2024-02-27 00:16:43 +01:00
|
|
|
if (grep({ $_ eq $repo{fullname} } @{ %$directory{repos} })) {
|
|
|
|
$path = %$directory{path};
|
|
|
|
last;
|
2024-02-15 01:08:31 +01:00
|
|
|
}
|
|
|
|
}
|
2024-02-26 23:29:42 +01:00
|
|
|
|
|
|
|
if ( !$handles[$handle]{lookup} ) {
|
|
|
|
if ($path) {
|
|
|
|
$repo{path} = `printf $path/$repo{name}`;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$repo{path} = `printf $conf{unsorted_directory}/$repo{name}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
elsif ( !$conf{lookups}[ $handles[$handle]{lookup} ]{block_unsorted}
|
|
|
|
&& $repo{path} )
|
|
|
|
{
|
|
|
|
$repo{path} = `printf $conf{unsorted_directory}/$repo{name}`;
|
|
|
|
}
|
|
|
|
elsif ($path) {
|
|
|
|
$repo{path} = `printf $path/$repo{name}`;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
warning("Skipping $repo{fullname}");
|
|
|
|
next;
|
2024-02-15 01:08:31 +01:00
|
|
|
}
|
2024-02-26 23:29:42 +01:00
|
|
|
|
|
|
|
my $clone_hook = "$conf{hook_dir}/clone/$repo{owner}:$repo{name}";
|
|
|
|
my $pull_hook = "$conf{hook_dir}/pull/$repo{owner}:$repo{name}";
|
|
|
|
( -x $clone_hook )
|
|
|
|
and $repo{clone_hook} = "cd $repo{path} && $clone_hook";
|
|
|
|
( -x $pull_hook ) and $repo{pull_hook} = "cd $repo{path} && $pull_hook";
|
|
|
|
push( @tmp, \%repo );
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
2024-02-15 01:08:31 +01:00
|
|
|
$handles[$handle]{memory} = \@tmp;
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
2024-02-26 23:29:42 +01:00
|
|
|
#
|
|
|
|
# sub dump($handle) {
|
|
|
|
# print("------ Handle $handle ------\n");
|
|
|
|
# print Dumper( $handles[$handle]->{memory} );
|
|
|
|
# }
|
2024-02-14 10:35:47 +01:00
|
|
|
|
2024-02-26 23:29:42 +01:00
|
|
|
sub folder_is_empty($directory) {
|
|
|
|
opendir( my $dh, $directory ) or return 1;
|
|
|
|
return scalar( grep { $_ ne "." && $_ ne ".." } readdir($dh) ) == 0;
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
|
2024-02-26 23:29:42 +01:00
|
|
|
sub handle_repos($handle) {
|
|
|
|
for my $repo ( @{ $handles[$handle]->{memory} } ) {
|
|
|
|
if ( folder_is_empty("$repo->{path}") ) {
|
|
|
|
make_path( $repo->{path} );
|
|
|
|
info("Cloning $repo->{fullname}");
|
|
|
|
`git -C '$repo->{path}' clone $conf{clone_flags} '$repo->{url}' .`;
|
|
|
|
( $? != 0 )
|
|
|
|
and error("Failed to clone $repo->{url} to $repo->{path}");
|
|
|
|
if ( $repo->{clone_hook} ) {
|
|
|
|
info("Running clone hook for $repo->{fullname}");
|
|
|
|
`$repo->{clone_hook}`;
|
|
|
|
}
|
|
|
|
( $? != 0 )
|
|
|
|
and error("Failed to execute clone hook for $repo->{fullname}");
|
|
|
|
}
|
|
|
|
elsif ( !folder_is_empty("$repo->{path}/.git") ) {
|
|
|
|
info("Pulling $repo->{fullname} to $repo->{path}");
|
|
|
|
if (`git -C $repo->{path} status -z`) {
|
|
|
|
warn("$repo->{path} has an unclean tree.");
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
`git -C $repo->{path} pull $conf{pull_flags}`;
|
|
|
|
}
|
|
|
|
( $? != 0 )
|
|
|
|
and error("Failed to pull $repo->{url} to $repo->{path}");
|
|
|
|
if ( $repo->{pull_hook} ) {
|
|
|
|
info("Running pull hook for $repo->{fullname}");
|
|
|
|
`$repo->{pull_hook}`;
|
|
|
|
}
|
|
|
|
( $? != 0 )
|
|
|
|
and error("Failed to execute pull hook for $repo->{fullname}");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-02-14 10:35:47 +01:00
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
sub read_conf() {
|
2024-02-14 10:35:47 +01:00
|
|
|
my $configdir;
|
|
|
|
if ( $ENV{XDG_CONFIG_HOME} ) {
|
|
|
|
$configdir = "$ENV{XDG_CONFIG_HOME}/clonedev";
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
$configdir = "$ENV{HOME}/.config/clonedev";
|
|
|
|
}
|
2024-02-15 01:08:31 +01:00
|
|
|
open( my $cfg, '<', $configdir . "/config.yml" ) or die;
|
2024-02-26 23:29:42 +01:00
|
|
|
my $hashref = YAML::Load( do { local $/; <$cfg> } );
|
2024-02-14 10:35:47 +01:00
|
|
|
close($cfg);
|
2024-02-26 23:29:42 +01:00
|
|
|
if ( !$hashref->{hook_dir} ) {
|
|
|
|
$hashref->{hook_dir} = "$configdir/hooks";
|
|
|
|
}
|
2024-02-15 01:08:31 +01:00
|
|
|
%conf = %$hashref;
|
2024-02-14 10:35:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
|
|
|
|
# ░ ░░░░ ░░░ ░░░ ░░ ░░░ ░
|
|
|
|
# ▒ ▒▒ ▒▒ ▒▒▒▒ ▒▒▒▒▒ ▒▒▒▒▒ ▒▒ ▒
|
|
|
|
# ▓ ▓▓ ▓▓▓▓ ▓▓▓▓▓ ▓▓▓▓▓ ▓ ▓ ▓
|
|
|
|
# █ █ █ ██ █████ █████ ██ █
|
|
|
|
# █ ████ ██ ████ ██ ██ ███ █
|
|
|
|
# ████████████████████████████████████████
|
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
read_conf();
|
2024-02-14 10:35:47 +01:00
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
my $last_handle = 0;
|
2024-02-26 23:29:42 +01:00
|
|
|
|
|
|
|
for my $i ( keys @{ $conf{lookups} } ) {
|
|
|
|
my %lookup = %{ $conf{lookups}[$i] };
|
|
|
|
chomp( $ENV{TOKEN} = $lookup{token_cmd} ? `$lookup{token_cmd}` : "" );
|
|
|
|
for ( $lookup{extra_headers} ) {
|
2024-02-15 01:08:31 +01:00
|
|
|
$_ = `printf "$_"`;
|
|
|
|
}
|
2024-02-26 23:29:42 +01:00
|
|
|
for my $j ( keys @{ $lookup{targets} } ) {
|
2024-02-15 01:08:31 +01:00
|
|
|
$last_handle++;
|
2024-02-26 23:29:42 +01:00
|
|
|
$handles[$last_handle]{lookup} = $i;
|
|
|
|
set_curl( $last_handle,
|
|
|
|
"$lookup{api_url}/$lookup{targets}[$j]/$lookup{endpoint}",
|
|
|
|
$lookup{extra_headers} );
|
2024-02-15 01:08:31 +01:00
|
|
|
add_callback( $last_handle, \&json_decode );
|
2024-02-26 23:29:42 +01:00
|
|
|
add_callback( $last_handle, \&url_filter );
|
2024-02-15 01:08:31 +01:00
|
|
|
add_callback( $last_handle, \&process_urls );
|
2024-02-26 23:29:42 +01:00
|
|
|
add_callback( $last_handle, \&handle_repos );
|
2024-02-15 01:08:31 +01:00
|
|
|
}
|
|
|
|
}
|
2024-02-14 10:35:47 +01:00
|
|
|
|
2024-02-15 01:08:31 +01:00
|
|
|
exec_multicurl();
|
2024-02-26 23:29:42 +01:00
|
|
|
|
|
|
|
$last_handle++;
|
|
|
|
add_callback( $last_handle, \&inject_conf_urls );
|
|
|
|
add_callback( $last_handle, \&process_urls );
|
|
|
|
add_callback( $last_handle, \&handle_repos );
|
|
|
|
exec_callbacks($last_handle);
|
|
|
|
|
|
|
|
for my $message (@messages) {
|
|
|
|
print($message);
|
|
|
|
}
|