1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
|
#!/usr/bin/perl
# $Id: probe-mirmon,v 1.4 2009/08/19 23:15:46 karl Exp $
# public domain. Originally written by Karl Berry, 2009.
#
# Probe rsync url's for mirmon; use wget for anything else.
# From description at http://people.cs.uu.nl/henkp/mirmon.
#
# Also requires a patch to mirmon itself to accept rsync urls
# (and I wanted https too):
# --- /usr/local/share/mirmon/ORIG/mirmon 2007-08-18 18:05:47.000000000 +0200
# +++ /usr/local/share/mirmon/mirmon 2009-07-03 22:38:00.000000000 +0200
# @@ -386,3 +386,3 @@
# my ( $type, $site, $home ) ;
# - if ( $url =~ m!^(ftp|http)://([^/:]+)(:\d+)?/! )
# + if ( $url =~ m!^(ftp|https?|rsync)://([^/:]+)(:\d+)?/! )
# { $type = $1 ; $site = $2 ; $home = $& ; }
main(@ARGV);
use strict;
use warnings;
use Date::Parse (); # dev-perl/TimeDate
use File::Tempdir; # dev-perl/File-Tempdir
use WWW::Curl::Easy;
use Capture::Tiny qw/capture/;
sub main {
my ( $timeout, $url ) = @_;
if ( $url =~ m,^rsync://, ) {
handle_rsync( $timeout, $url );
}
elsif ( $url =~ m,^ftp://, ) {
# Hacky, at some point CURL stopped returning the output here; just go back to wget for now.
#handle_libcurl( $timeout, $url );
handle_wget( $timeout, $url );
}
else {
handle_libcurl( $timeout, $url );
}
}
sub handle_libcurl {
my ( $timeout, $url ) = @_;
my $curl = WWW::Curl::Easy->new;
$curl->setopt(CURLOPT_HEADER, 0);
$curl->setopt(CURLOPT_CONNECTTIMEOUT, $timeout);
$curl->setopt(CURLOPT_TIMEOUT, $timeout);
$curl->setopt(CURLOPT_FTP_USE_EPSV, 1);
$curl->setopt(CURLOPT_URL, $url);
$curl->setopt(CURLOPT_VERBOSE, 1) if $url =~ m,^ftp://,;
# A filehandle, reference to a scalar or reference to a typeglob can be used here.
my $response_body;
$curl->setopt(CURLOPT_WRITEDATA,\$response_body);
# Starts the actual request
my $retcode = $curl->perform;
# Looking at the results...
exit 800 unless ($retcode == 0);
my $response_code = $curl->getinfo(CURLINFO_HTTP_CODE);
exit 801 unless ($response_code == 200);
exit 802 unless defined($response_body);
chomp $response_body;
print(munge_date($response_body), "\n");
exit 0;
#print("An error happened: $retcode ".$curl->strerror($retcode)." ".$curl->errbuf."\n");
}
sub handle_wget {
my ( $timeout, $url ) = @_;
# TODO: replace this with native HTTP
# TODO: munge the output!
exec {'/usr/bin/wget'} 'wget', qw( -q --passive-ftp -O - -T ), $timeout, '-t', 1, $url;
}
sub handle_rsync {
my ( $timeout, $url ) = @_;
my $tmpdir = File::Tempdir->new();
my $dir = $tmpdir->name;
my $file = $url;
$file =~ s/\W/_/g; # translate all non-letters to _
# https://stackoverflow.com/a/6331618/1583179
my ($stdout, $stderr, $ret) = capture {
system '/usr/bin/rsync', qw( -q --no-motd --timeout ), $timeout, $url, "$dir/$file";
};
if ($ret!=0) {
#warn "rsync failed, exit code $fail, $! $? $@\n";
#exit $ret;
exit 800;
}
open my $fh, '<', "$dir/$file" or do {
warn "Opening Downloaded timestamp Failed";
exit 900; # rediculous exit code.
};
print munge_date(<$fh>);
exit 0;
}
sub munge_date {
no warnings 'numeric'; ## no critic (TestingAndDebugging::ProhibitNoWarnings)
my $timestr = shift;
my $timestamp = int($timestr);
my $year2020 = 1577836800;
my $year2038 = 2145916800;
# If the string starts with an epoch, just use that
if($timestamp >= $year2020 && $timestamp <= $year2038) {
return $timestamp;
} else {
my $timestamp = Date::Parse::str2time($timestr);
return $timestamp if defined($timestamp);
}
return -1;
}
|