forked from thenationofalex/Sitemap-e
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsitemap-e.pl
82 lines (70 loc) · 2.84 KB
/
sitemap-e.pl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
#!/usr/bin/perl -w
use strict;
use warnings;
use WWW::Mechanize;
use URI;
use DateTime;
use Getopt::Long;
use IO::Tee;
my $mech = WWW::Mechanize->new();
my $base_url = $ARGV[0] or die "Error Enter URL\n";
my @unwanted_urls; GetOptions ("skip=s@" => \@unwanted_urls);
my %scanned_urls = ();
my @crawl_urls = ();
my $current_url = $base_url;
$scanned_urls{$current_url} = 1;
my $header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!-- Created by the nation of alex Sitemap Generator -->\n";
my $html = "<!DOCTYPE HTML><html lang=\"en\"><head>Sitemap HTML</head><body><ul>\n";
#XML
open SITEMAP_FILE, "> sitemap.xml" or die "Could not create sitemap xml file\n";
print SITEMAP_FILE $header;
print SITEMAP_FILE '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'."\n";
#HTML
open SITEMAP_HTML, "> sitemap.html" or die "Could not create sitemap html file\n";
print SITEMAP_HTML $html;
#ROBOTS
open SITEMAP_ROBOTS, ">robots.txt" or die "Could not create Robots file\n";
print SITEMAP_ROBOTS "#Robots.txt Generated By the nation of alex for Robots \n User-agent: * \n Disallow: \n Sitemap: " . $base_url . "sitemap.xml\n";
close SITEMAP_ROBOTS;
while ($current_url)
{
my $response; eval { $response = $mech->get($current_url); };
if ($@) {
print STDERR "\tCouldn't retrieve this url \"$current_url\".\n\t\tIt was found via ".$scanned_urls{$current_url}."\n";
$current_url = pop(@crawl_urls);
next;
}
if ($response->content_type eq 'text/html') {
my $datetime = DateTime->from_epoch(epoch => time());
if ($response->last_modified) {
$datetime = DateTime->from_epoch(epoch => $response->last_modified);
}
my @links = $mech->links();
for my $link (@links) {
my $abs_url = URI->new_abs($link->url, $current_url)->canonical;
next if $abs_url =~ /\#/;
next if $abs_url =~ /\?/;
next if $abs_url =~ /auto/;
if ($abs_url =~ m/$base_url/ && !exists($scanned_urls{$abs_url})) {
if (grep { index($abs_url,$_) > -1 } @unwanted_urls) {
print STDERR "\tSkipping $abs_url as requested\n";
} else {
push(@crawl_urls, $abs_url);
}
$scanned_urls{$abs_url} = $current_url;
}
}
print "$current_url\n";
print SITEMAP_FILE '<url>'."\n".'<loc>' . $current_url . '</loc><lastmod>' . $datetime->ymd .'T'. $datetime->hms . '+00:00</lastmod>' . "\n" .'</url>' . "\n";
print SITEMAP_HTML '<li><a href="'. $current_url .'">'. $mech->title() . '</a></li>'. "\n";
}
$current_url = pop(@crawl_urls);
}
print SITEMAP_FILE '</urlset>';
close SITEMAP_FILE;
print SITEMAP_HTML '</ul></body></html>';
close SITEMAP_HTML;
print xml_check();
sub xml_check {
system ("xmllint --noout --nowarning sitemap.xml");
}