add path transformation and bugfix clonedev
This commit is contained in:
parent
d900ed8964
commit
0b80f982bb
2 changed files with 167 additions and 111 deletions
|
@ -1,34 +1,38 @@
|
||||||
# token_cmd: "secret-tool lookup gitea api_token"
|
|
||||||
lookups:
|
lookups:
|
||||||
- name: "gitea_owned_repos"
|
# - name: "gitea_owned_repos"
|
||||||
api_url: "https://git.snaile.de/api/v1"
|
# api_url: "https://git.snaile.de/api/v1"
|
||||||
endpoint: "repos"
|
# endpoint: "repos"
|
||||||
url_field: "ssh_url"
|
# url_field: "ssh_url"
|
||||||
token_cmd: "rbw get 'Gitea Snailed' -f 'Clonedev PAT'"
|
# token_cmd: "rbw get 'Gitea Snailed' -f 'Clonedev PAT'"
|
||||||
block_unsorted: false
|
# block_unsorted: false
|
||||||
targets:
|
# targets:
|
||||||
- "orgs/snailed"
|
# - "orgs/snailed"
|
||||||
- "users/luca"
|
# - "users/luca"
|
||||||
extra_headers:
|
# extra_headers:
|
||||||
- "Authorization: token ${TOKEN}"
|
# - "Authorization: token ${TOKEN}"
|
||||||
- name: "github_starred_repos"
|
# - name: "github_starred_repos"
|
||||||
api_url: "https://api.github.com"
|
# api_url: "https://api.github.com"
|
||||||
endpoint: "starred"
|
# endpoint: "starred"
|
||||||
url_field: "ssh_url"
|
# url_field: "ssh_url"
|
||||||
token_cmd: "rbw get 'Github' -f 'Clonedev PAT'"
|
# token_cmd: "rbw get 'Github' -f 'Clonedev PAT'"
|
||||||
block_unsorted: true
|
|
||||||
targets:
|
|
||||||
- "users/ssnailed"
|
|
||||||
extra_headers:
|
|
||||||
- "Authorization: Bearer ${TOKEN}"
|
|
||||||
# - name: "tralios_gitlab"
|
|
||||||
# api_url: "https://gitlab.tralios.de/api/v4"
|
|
||||||
# endpoint: "projects"
|
|
||||||
# url_field: "ssh_url_to_repo"
|
|
||||||
# token_cmd: "rbw get 'Gitlab Tralios' -f 'Clonedev PAT'"
|
|
||||||
# block_unsorted: true
|
# block_unsorted: true
|
||||||
|
# targets:
|
||||||
|
# - "users/ssnailed"
|
||||||
# extra_headers:
|
# extra_headers:
|
||||||
# - "Authorization: Bearer ${TOKEN}"
|
# - "Authorization: Bearer ${TOKEN}"
|
||||||
|
- name: "tralios_gitlab"
|
||||||
|
api_url: "https://gitlab.tralios.de/api/v4"
|
||||||
|
targets:
|
||||||
|
- "groups/197" # ansible-galaxy
|
||||||
|
- "groups/73" # infrastruktur
|
||||||
|
- "groups/26" # kubernetes
|
||||||
|
- "groups/14" # docker
|
||||||
|
endpoint: "projects"
|
||||||
|
url_field: "ssh_url_to_repo"
|
||||||
|
token_cmd: "rbw get 'Gitlab Tralios' -f 'Clonedev PAT'"
|
||||||
|
block_unsorted: true
|
||||||
|
extra_headers:
|
||||||
|
- "Authorization: Bearer ${TOKEN}"
|
||||||
|
|
||||||
extra_urls:
|
extra_urls:
|
||||||
# - "https://git.kernel.org/pub/scm/git/git.git"
|
# - "https://git.kernel.org/pub/scm/git/git.git"
|
||||||
|
@ -46,17 +50,21 @@ directories:
|
||||||
- "luca/st-custom"
|
- "luca/st-custom"
|
||||||
- "luca/tokyonight-icons"
|
- "luca/tokyonight-icons"
|
||||||
- "luca/tokyonight-theme"
|
- "luca/tokyonight-theme"
|
||||||
|
|
||||||
- path: "${XDG_DOCUMENTS_DIR}/dev/nvim"
|
- path: "${XDG_DOCUMENTS_DIR}/dev/nvim"
|
||||||
repos:
|
repos:
|
||||||
- "snailed/taolf"
|
- "snailed/taolf"
|
||||||
|
|
||||||
- path: "${XDG_DOCUMENTS_DIR}/dev/misc"
|
- path: "${XDG_DOCUMENTS_DIR}/dev/misc"
|
||||||
repos:
|
repos:
|
||||||
- "snailed/tidal-scraper"
|
- "snailed/tidal-scraper"
|
||||||
- "b3nj5m1n/xdg-ninja"
|
- "b3nj5m1n/xdg-ninja"
|
||||||
- "jnweiger/led-name-badge-ls32"
|
- "jnweiger/led-name-badge-ls32"
|
||||||
|
|
||||||
- path: "${XDG_DOCUMENTS_DIR}/dev"
|
- path: "${XDG_DOCUMENTS_DIR}/dev"
|
||||||
repos:
|
repos:
|
||||||
- "void-linux/void-packages"
|
- "void-linux/void-packages"
|
||||||
|
|
||||||
- path: "${XDG_DOCUMENTS_DIR}/dev/server"
|
- path: "${XDG_DOCUMENTS_DIR}/dev/server"
|
||||||
repos:
|
repos:
|
||||||
- "snailed/ansible-example"
|
- "snailed/ansible-example"
|
||||||
|
@ -69,6 +77,32 @@ directories:
|
||||||
- "luca/dotfiles-server"
|
- "luca/dotfiles-server"
|
||||||
- "spantaleev/matrix-docker-ansible-deploy"
|
- "spantaleev/matrix-docker-ansible-deploy"
|
||||||
|
|
||||||
|
- path: "${XDG_DOCUMENTS_DIR}/dev/tralios"
|
||||||
|
lookups:
|
||||||
|
- "tralios_gitlab"
|
||||||
|
repos:
|
||||||
|
- "infrastruktur/ansible"
|
||||||
|
- "kubernetes/helm"
|
||||||
|
|
||||||
|
- path: "${XDG_DOCUMENTS_DIR}/dev/tralios/docker"
|
||||||
|
lookups:
|
||||||
|
- "tralios_gitlab"
|
||||||
|
repos:
|
||||||
|
- "docker/phpldapadmin"
|
||||||
|
- "docker/nextcloud_mail_uploader"
|
||||||
|
|
||||||
|
- path: "${XDG_DOCUMENTS_DIR}/dev/tralios/ansible/collections_dev/ansible_collections/tralios"
|
||||||
|
lookups:
|
||||||
|
- "tralios_gitlab"
|
||||||
|
repos:
|
||||||
|
- "ansible-galaxy/kubernetes.*"
|
||||||
|
|
||||||
|
# Transforms the directory name that is cloned into (only the last part of the path)
|
||||||
|
path_transformations:
|
||||||
|
- match: "ansible-galaxy/kubernetes.*"
|
||||||
|
replace: "-"
|
||||||
|
with: "_"
|
||||||
|
|
||||||
unsorted_directory: "${XDG_DOCUMENTS_DIR}/dev/unsorted"
|
unsorted_directory: "${XDG_DOCUMENTS_DIR}/dev/unsorted"
|
||||||
|
|
||||||
skip_repos:
|
skip_repos:
|
||||||
|
|
|
@ -17,63 +17,73 @@ $Data::Dumper::Indent = 2;
|
||||||
# TODO: Show hook/clone output in a prettier fashion (like docker buildx)
|
# TODO: Show hook/clone output in a prettier fashion (like docker buildx)
|
||||||
|
|
||||||
use constant USERAGENT => "User-Agent: MarxBot/4.2.0 (A script reading some information about repos)";
|
use constant USERAGENT => "User-Agent: MarxBot/4.2.0 (A script reading some information about repos)";
|
||||||
|
use constant URL_REGEX =>
|
||||||
|
qr/^((.*(?:@|:\/\/))[a-zA-Z0-9-_.]+(?:\/|:)(?:[a-zA-Z0-9-_.\/]+\/)?([a-zA-Z0-9-_.]+?)\/([a-zA-Z0-9-_.]+?)(?:\.git)?\/?)$/;
|
||||||
|
|
||||||
my @handles;
|
my @handles;
|
||||||
my @messages;
|
my @messages;
|
||||||
my %conf;
|
my %conf;
|
||||||
my $active_repos = 0;
|
my $active_repos = 0;
|
||||||
my $active_requests = 0;
|
my $active_requests = 0;
|
||||||
|
|
||||||
|
|
||||||
sub info($message) {
|
sub info($message) {
|
||||||
print(`tput setaf 4; tput bold; echo '$message'; tput sgr 0`);
|
print(`tput setaf 4; tput bold; echo '$message'; tput sgr 0`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub error($message) {
|
sub error($message) {
|
||||||
push( @messages, `tput setaf 1; tput bold; echo '$message'; tput sgr 0` );
|
push(@messages, `tput setaf 1; tput bold; echo '$message'; tput sgr 0`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub warning($message) {
|
sub warning($message) {
|
||||||
push( @messages, `tput setaf 3; tput bold; echo '$message'; tput sgr 0` );
|
push(@messages, `tput setaf 3; tput bold; echo '$message'; tput sgr 0`);
|
||||||
}
|
}
|
||||||
|
|
||||||
sub set_curl( $handle, $url, @headers ) {
|
|
||||||
|
sub set_curl($handle, $url, @headers) {
|
||||||
$handles[$handle]{curl} = WWW::Curl::Easy->new;
|
$handles[$handle]{curl} = WWW::Curl::Easy->new;
|
||||||
$handles[$handle]{curl}->setopt( CURLOPT_URL, $url );
|
$handles[$handle]{curl}->setopt(CURLOPT_URL, $url);
|
||||||
$handles[$handle]{curl}->pushopt( CURLOPT_HTTPHEADER, [USERAGENT] );
|
$handles[$handle]{curl}->pushopt(CURLOPT_HTTPHEADER, [USERAGENT]);
|
||||||
for my $header (@headers) {
|
for my $header (@headers) {
|
||||||
$handles[$handle]{curl}->pushopt( CURLOPT_HTTPHEADER, [$header] );
|
$handles[$handle]{curl}->pushopt(CURLOPT_HTTPHEADER, [$header]);
|
||||||
}
|
}
|
||||||
$handles[$handle]{curl}->setopt( CURLOPT_PRIVATE, $handle );
|
$handles[$handle]{curl}->setopt(CURLOPT_PRIVATE, $handle);
|
||||||
$handles[$handle]{curl}->setopt( CURLOPT_WRITEDATA, \$handles[$handle]{memory} );
|
$handles[$handle]{curl}->setopt(CURLOPT_WRITEDATA, \$handles[$handle]{memory});
|
||||||
}
|
}
|
||||||
|
|
||||||
sub add_callback( $handle, $callback ) {
|
|
||||||
push( @{ $handles[$handle]{callbacks} }, $callback );
|
sub add_callback($handle, $callback) {
|
||||||
|
push(@{$handles[$handle]{callbacks}}, $callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub exec_callbacks($handle) {
|
sub exec_callbacks($handle) {
|
||||||
if ( $handles[$handle]{callbacks} ) {
|
if ($handles[$handle]{callbacks}) {
|
||||||
for my $callback ( @{ $handles[$handle]->{callbacks} } ) {
|
for my $callback (@{$handles[$handle]->{callbacks}}) {
|
||||||
$callback->($handle);
|
$callback->($handle);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub exec_multicurl() {
|
sub exec_multicurl() {
|
||||||
my $curlm = WWW::Curl::Multi->new;
|
my $curlm = WWW::Curl::Multi->new;
|
||||||
for my $handle (@handles) {
|
for my $handle (@handles) {
|
||||||
if ($handle) {
|
if ($handle) {
|
||||||
$curlm->add_handle( $handle->{curl} );
|
$curlm->add_handle($handle->{curl});
|
||||||
$active_requests++;
|
$active_requests++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
while ($active_requests) {
|
while ($active_requests) {
|
||||||
my $active_transfers = $curlm->perform;
|
my $active_transfers = $curlm->perform;
|
||||||
if ( $active_transfers != $active_requests ) {
|
if ($active_transfers != $active_requests) {
|
||||||
while ( my ( $handle, $ret ) = $curlm->info_read ) {
|
while (my ($handle, $ret) = $curlm->info_read) {
|
||||||
if ($handle) {
|
if ($handle) {
|
||||||
$active_requests--;
|
$active_requests--;
|
||||||
exec_callbacks($handle);
|
exec_callbacks($handle);
|
||||||
|
|
||||||
# TODO: proper error checking
|
# TODO: proper error checking
|
||||||
# $handles[$handle]{curl}->getinfo(CURLINFO_HTTP_CODE);
|
# $handles[$handle]{curl}->getinfo(CURLINFO_HTTP_CODE);
|
||||||
delete $handles[$handle];
|
delete $handles[$handle];
|
||||||
|
@ -83,40 +93,44 @@ sub exec_multicurl() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub json_decode($handle) {
|
sub json_decode($handle) {
|
||||||
$handles[$handle]{memory} = JSON::decode_json( $handles[$handle]{memory} );
|
$handles[$handle]{memory} = JSON::decode_json($handles[$handle]{memory});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub url_filter($handle) {
|
sub url_filter($handle) {
|
||||||
my @tmp;
|
my @tmp;
|
||||||
my $lookup = $conf{lookups}[ $handles[$handle]->{lookup} ];
|
my $lookup = $conf{lookups}[$handles[$handle]->{lookup}];
|
||||||
for my $repo ( @{ $handles[$handle]{memory} } ) {
|
for my $repo (@{$handles[$handle]{memory}}) {
|
||||||
if ( $repo->{ $lookup->{url_field} } ) {
|
if ($repo->{$lookup->{url_field}}) {
|
||||||
push( @tmp, $repo->{ $lookup->{url_field} } );
|
push(@tmp, $repo->{$lookup->{url_field}});
|
||||||
} else {
|
} else {
|
||||||
error( "Failed to extract $lookup->{url_field} while processing lookup: $lookup->{name}" );
|
error("Failed to extract $lookup->{url_field} while processing lookup: $lookup->{name}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
$handles[$handle]{memory} = \@tmp;
|
$handles[$handle]{memory} = \@tmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub inject_conf_urls($handle) {
|
sub inject_conf_urls($handle) {
|
||||||
for my $url ( @{ $conf{extra_urls} } ) {
|
for my $url (@{$conf{extra_urls}}) {
|
||||||
push( @{ $handles[$handle]{memory} }, $url );
|
push(@{$handles[$handle]{memory}}, $url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub process_urls($handle) {
|
sub process_urls($handle) {
|
||||||
my @tmp;
|
my @tmp;
|
||||||
for my $url ( @{ $handles[$handle]{memory} } ) {
|
for my $url (@{$handles[$handle]{memory}}) {
|
||||||
my %repo;
|
my %repo;
|
||||||
if ( $url =~ /^((.*(?:@|:\/\/))[a-zA-Z0-9-_.]+(?:\/|:)(?:[a-zA-Z0-9-_.\/]+\/)?([a-zA-Z0-9-_.]+?)\/([a-zA-Z0-9-_.]+?)(?:\.git)?\/?)$/ ) {
|
if ($url =~ URL_REGEX) {
|
||||||
$repo{url} = $1;
|
$repo{url} = $1;
|
||||||
$repo{owner} = $3;
|
$repo{owner} = $3;
|
||||||
$repo{name} = $4;
|
$repo{name} = $4;
|
||||||
$repo{fullname} = "$3/$4";
|
$repo{fullname} = "$3/$4";
|
||||||
|
|
||||||
if ( substr( $2, -1 ) eq "@" ) {
|
if (substr($2, -1) eq "@") {
|
||||||
$repo{protocol} = "ssh";
|
$repo{protocol} = "ssh";
|
||||||
} else {
|
} else {
|
||||||
$repo{protocol} = $2;
|
$repo{protocol} = $2;
|
||||||
|
@ -126,97 +140,114 @@ sub process_urls($handle) {
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
next if ( grep( $_ eq $repo{fullname}, @{ $conf{skip_repos} } ) );
|
next if (grep($_ eq $repo{fullname}, @{$conf{skip_repos}}));
|
||||||
|
|
||||||
my $path;
|
my $path;
|
||||||
DIRS: for my $directory ( @{ $conf{directories} } ) {
|
|
||||||
for my $regex ( @{ $directory->{repos} } ) {
|
my $repodir = $repo{name};
|
||||||
if ( $repo{fullname} =~ /$regex/ ) {
|
for my $pt (@{$conf{path_transformations}}) {
|
||||||
$repo{path} = `printf $directory->{path}/$repo{name}`;
|
if ($repo{fullname} =~ qr/$pt->{match}/) {
|
||||||
last DIRS;
|
$repodir =~ s/$pt->{replace}/$pt->{with}/;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for my $directory (@{$conf{directories}}) {
|
||||||
|
for my $regex (@{$directory->{repos}}) {
|
||||||
|
if (
|
||||||
|
$repo{fullname} =~ qr/^$regex$/
|
||||||
|
&& (!$directory->{lookups}
|
||||||
|
|| grep(qr/$conf{lookups}[ $handles[$handle]{lookup} ]{name}/, @{$directory->{lookups}}))
|
||||||
|
) {
|
||||||
|
$repo{path} = `printf $directory->{path}/$repodir`;
|
||||||
|
last;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( !$repo{path} && !$conf{lookups}[ $handles[$handle]{lookup} ]{block_unsorted} ) {
|
if (!$repo{path} && !$conf{lookups}[$handles[$handle]{lookup}]{block_unsorted}) {
|
||||||
$repo{path} = `printf $conf{unsorted_directory}/$repo{name}`;
|
$repo{path} = `printf $conf{unsorted_directory}/$repodir`;
|
||||||
|
} elsif ($repo{path}) {
|
||||||
} else {
|
} else {
|
||||||
next;
|
next;
|
||||||
}
|
}
|
||||||
|
|
||||||
my $clone_hook = "$conf{hook_dir}/clone/$repo{owner}:$repo{name}";
|
my $clone_hook = "$conf{hook_dir}/clone/$repo{owner}:$repo{name}";
|
||||||
my $pull_hook = "$conf{hook_dir}/pull/$repo{owner}:$repo{name}";
|
my $pull_hook = "$conf{hook_dir}/pull/$repo{owner}:$repo{name}";
|
||||||
( -x $clone_hook ) and $repo{clone_hook} = "cd $repo{path} && $clone_hook";
|
(-x $clone_hook) and $repo{clone_hook} = "cd $repo{path} && $clone_hook";
|
||||||
( -x $pull_hook ) and $repo{pull_hook} = "cd $repo{path} && $pull_hook";
|
(-x $pull_hook) and $repo{pull_hook} = "cd $repo{path} && $pull_hook";
|
||||||
push( @tmp, \%repo );
|
push(@tmp, \%repo);
|
||||||
}
|
}
|
||||||
$handles[$handle]{memory} = \@tmp;
|
$handles[$handle]{memory} = \@tmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub dump($handle) {
|
sub dump($handle) {
|
||||||
print("------ Handle $handle ------\n");
|
print("------ Handle $handle ------\n");
|
||||||
print Dumper( $handles[$handle]->{memory} );
|
print Dumper($handles[$handle]->{memory});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub folder_is_empty($directory) {
|
sub folder_is_empty($directory) {
|
||||||
opendir( my $dh, $directory ) or return 1;
|
opendir(my $dh, $directory) or return 1;
|
||||||
return scalar( grep( $_ ne "." && $_ ne "..", readdir($dh) ) ) == 0;
|
return scalar(grep($_ ne "." && $_ ne "..", readdir($dh))) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub handle_repos($handle) {
|
sub handle_repos($handle) {
|
||||||
for my $repo ( @{ $handles[$handle]->{memory} } ) {
|
for my $repo (@{$handles[$handle]->{memory}}) {
|
||||||
if ( folder_is_empty("$repo->{path}") ) {
|
if (folder_is_empty("$repo->{path}")) {
|
||||||
make_path( $repo->{path} );
|
make_path($repo->{path});
|
||||||
info("Cloning $repo->{fullname}");
|
info("Cloning $repo->{fullname}");
|
||||||
`git -C '$repo->{path}' clone $conf{clone_flags} '$repo->{url}' .`;
|
`git -C '$repo->{path}' clone $conf{clone_flags} '$repo->{url}' .`;
|
||||||
( $? != 0 ) and error("Failed to clone $repo->{url} to $repo->{path}");
|
($? != 0) and error("Failed to clone $repo->{url} to $repo->{path}");
|
||||||
if ( $repo->{clone_hook} ) {
|
if ($repo->{clone_hook}) {
|
||||||
info("Running clone hook for $repo->{fullname}");
|
info("Running clone hook for $repo->{fullname}");
|
||||||
`$repo->{clone_hook}`;
|
`$repo->{clone_hook}`;
|
||||||
}
|
}
|
||||||
( $? != 0 ) and error("Failed to execute clone hook for $repo->{fullname}");
|
($? != 0) and error("Failed to execute clone hook for $repo->{fullname}");
|
||||||
}
|
} elsif (!folder_is_empty("$repo->{path}/.git")) {
|
||||||
elsif ( !folder_is_empty("$repo->{path}/.git") ) {
|
|
||||||
info("Pulling $repo->{fullname} to $repo->{path}");
|
info("Pulling $repo->{fullname} to $repo->{path}");
|
||||||
if (`git -C $repo->{path} status -z`) {
|
if (`git -C $repo->{path} status -z`) {
|
||||||
warn("$repo->{path} has an unclean tree.");
|
warn("$repo->{path} has an unclean tree.");
|
||||||
} else {
|
} else {
|
||||||
`git -C $repo->{path} pull $conf{pull_flags}`;
|
`git -C $repo->{path} pull $conf{pull_flags}`;
|
||||||
}
|
}
|
||||||
( $? != 0 ) and error("Failed to pull $repo->{url} to $repo->{path}");
|
($? != 0) and error("Failed to pull $repo->{url} to $repo->{path}");
|
||||||
if ( $repo->{pull_hook} ) {
|
if ($repo->{pull_hook}) {
|
||||||
info("Running pull hook for $repo->{fullname}");
|
info("Running pull hook for $repo->{fullname}");
|
||||||
`$repo->{pull_hook}`;
|
`$repo->{pull_hook}`;
|
||||||
}
|
}
|
||||||
( $? != 0 ) and error("Failed to execute pull hook for $repo->{fullname}");
|
($? != 0) and error("Failed to execute pull hook for $repo->{fullname}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub read_conf() {
|
sub read_conf() {
|
||||||
my $configdir;
|
my $configdir;
|
||||||
if ( $ENV{XDG_CONFIG_HOME} ) {
|
if ($ENV{XDG_CONFIG_HOME}) {
|
||||||
$configdir = "$ENV{XDG_CONFIG_HOME}/clonedev";
|
$configdir = "$ENV{XDG_CONFIG_HOME}/clonedev";
|
||||||
} else {
|
} else {
|
||||||
$configdir = "$ENV{HOME}/.config/clonedev";
|
$configdir = "$ENV{HOME}/.config/clonedev";
|
||||||
}
|
}
|
||||||
open( my $cfg, '<', $configdir . "/config.yml" ) or die;
|
open(my $cfg, '<', $configdir . "/config.yml") or die;
|
||||||
my $hashref = YAML::Load( do { local $/; <$cfg> } );
|
my $hashref = YAML::Load(do {local $/; <$cfg>});
|
||||||
close($cfg);
|
close($cfg);
|
||||||
if ( !$hashref->{hook_dir} ) {
|
if (!$hashref->{hook_dir}) {
|
||||||
$hashref->{hook_dir} = "$configdir/hooks";
|
$hashref->{hook_dir} = "$configdir/hooks";
|
||||||
}
|
}
|
||||||
for my $dir ( @{ $hashref->{directories} } ) {
|
|
||||||
grep( s/\//\\\//, @{ $dir->{repos} } );
|
|
||||||
}
|
|
||||||
%conf = %$hashref;
|
%conf = %$hashref;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub curl_pipeline($handle) {
|
sub curl_pipeline($handle) {
|
||||||
add_callback( $handle, \&json_decode );
|
add_callback($handle, \&json_decode);
|
||||||
add_callback( $handle, \&url_filter );
|
add_callback($handle, \&url_filter);
|
||||||
add_callback( $handle, \&process_urls );
|
add_callback($handle, \&process_urls);
|
||||||
add_callback( $handle, \&handle_repos );
|
|
||||||
|
# add_callback($handle, \&dump);
|
||||||
|
add_callback($handle, \&handle_repos);
|
||||||
}
|
}
|
||||||
|
|
||||||
# ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
|
# ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
|
||||||
|
@ -231,32 +262,23 @@ read_conf();
|
||||||
|
|
||||||
my $last_handle = 0;
|
my $last_handle = 0;
|
||||||
|
|
||||||
for my $i ( keys @{ $conf{lookups} } ) {
|
for my $i (keys @{$conf{lookups}}) {
|
||||||
my %lookup = %{ $conf{lookups}[$i] };
|
my %lookup = %{$conf{lookups}[$i]};
|
||||||
chomp( $ENV{TOKEN} = $lookup{token_cmd} ? `$lookup{token_cmd}` : "" );
|
chomp($ENV{TOKEN} = $lookup{token_cmd} ? `$lookup{token_cmd}` : "");
|
||||||
for ( @{ $lookup{extra_headers} } ) {
|
for (@{$lookup{extra_headers}}) {
|
||||||
$_ = `printf "$_"`;
|
$_ = `printf "$_"`;
|
||||||
}
|
}
|
||||||
if ( $lookup{targets} ) {
|
if ($lookup{targets}) {
|
||||||
for my $j ( keys @{ $lookup{targets} } ) {
|
for my $j (keys @{$lookup{targets}}) {
|
||||||
$last_handle++;
|
$last_handle++;
|
||||||
$handles[$last_handle]{lookup} = $i;
|
$handles[$last_handle]{lookup} = $i;
|
||||||
set_curl(
|
set_curl($last_handle, "$lookup{api_url}/$lookup{targets}[$j]/$lookup{endpoint}", @{$lookup{extra_headers}});
|
||||||
$last_handle,
|
|
||||||
"$lookup{api_url}/$lookup{targets}[$j]/$lookup{endpoint}",
|
|
||||||
@{ $lookup{extra_headers} }
|
|
||||||
);
|
|
||||||
curl_pipeline($last_handle);
|
curl_pipeline($last_handle);
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
$last_handle++;
|
$last_handle++;
|
||||||
$handles[$last_handle]{lookup} = $i;
|
$handles[$last_handle]{lookup} = $i;
|
||||||
set_curl(
|
set_curl($last_handle, "$lookup{api_url}/$lookup{endpoint}", @{$lookup{extra_headers}});
|
||||||
$last_handle,
|
|
||||||
"$lookup{api_url}/$lookup{endpoint}",
|
|
||||||
@{ $lookup{extra_headers} }
|
|
||||||
);
|
|
||||||
curl_pipeline($last_handle);
|
curl_pipeline($last_handle);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -264,9 +286,9 @@ for my $i ( keys @{ $conf{lookups} } ) {
|
||||||
exec_multicurl();
|
exec_multicurl();
|
||||||
|
|
||||||
$last_handle++;
|
$last_handle++;
|
||||||
add_callback( $last_handle, \&inject_conf_urls );
|
add_callback($last_handle, \&inject_conf_urls);
|
||||||
add_callback( $last_handle, \&process_urls );
|
add_callback($last_handle, \&process_urls);
|
||||||
add_callback( $last_handle, \&handle_repos );
|
add_callback($last_handle, \&handle_repos);
|
||||||
exec_callbacks($last_handle);
|
exec_callbacks($last_handle);
|
||||||
|
|
||||||
for my $message (@messages) {
|
for my $message (@messages) {
|
||||||
|
|
Loading…
Add table
Reference in a new issue