Annotation of pandia/crawler, revision 1.8

1.1       snw         1: #!/usr/bin/env perl
                      2: 
                      3: # 
1.8     ! snw         4: # $Id: crawler,v 1.7 2025/07/02 15:03:05 snw Exp $
1.1       snw         5: #  Copyright (C) 2025 Coherent Logic Development LLC
                      6: #
                      7: # Author: Serena Willis <snw@coherent-logic.com>
                      8: #
                      9: # Licensed AGPL-3.0
                     10: #
1.2       snw        11: # $Log: crawler,v $
1.8     ! snw        12: # Revision 1.7  2025/07/02 15:03:05  snw
        !            13: # Add support for restricted mode
        !            14: #
1.7       snw        15: # Revision 1.6  2025/07/01 19:20:47  snw
                     16: # Stop crawling and indexing URLs containing page fragments
                     17: #
1.6       snw        18: # Revision 1.5  2025/06/28 00:33:32  snw
                     19: # Update locking
                     20: #
1.5       snw        21: # Revision 1.4  2025/06/27 16:20:30  snw
                     22: # Add blacklist
                     23: #
1.4       snw        24: # Revision 1.3  2025/06/27 02:14:47  snw
                     25: # Initial operational capability
                     26: #
1.3       snw        27: # Revision 1.2  2025/06/25 19:38:48  snw
                     28: # Add indexer
                     29: #
1.2       snw        30: # Revision 1.1  2025/06/25 13:44:37  snw
                     31: # Renaming
                     32: #
1.1       snw        33: # Revision 1.2  2025/06/25 03:10:01  snw
                     34: # Initial working crawler
                     35: #
                     36: # Revision 1.1.1.1  2025/06/23 23:17:08  snw
                     37: # Initial commit
                     38: #
                     39: #
                     40: 
                     41: use Getopt::Long;
                     42: use HTTP::Tiny;
                     43: use HTML::TreeBuilder;
                     44: use URI;
                     45: use DBI;
                     46: use WWW::RobotRules;
1.4       snw        47: use Fcntl qw(:flock);
1.1       snw        48: use LWP::Simple qw(get);
1.7       snw        49: use Config::IniFiles;
1.1       snw        50: 
1.4       snw        51: my $rules = WWW::RobotRules->new('pandia-crawler/0.0.1');
1.1       snw        52: my $dbh = "";
                     53: my $dsn = "";
                     54: my $skips = 0;
                     55: my $inserts = 0;
                     56: my $seed = "";
                     57: my $depth = 0;
1.2       snw        58: my $blacklist_matches = 0;
                     59: my $robots_txt_denies = 0;
                     60: my $invalid_scheme_skips = 0;
1.7       snw        61: my $mode;
1.1       snw        62: 
                     63: sub store_url {
1.2       snw        64:     my ($url, $parent) = @_;
                     65: 
1.6       snw        66:     if (index($url, '#') != -1) {
1.7       snw        67:         print "F";
1.6       snw        68:         return;
                     69:     }
                     70:     
1.4       snw        71:     if($url ne "" && length($url) <= 255 && substr($url, 0, 6) ne "mailto" && substr($url, 0, 4) eq "http") {       
                     72:        
1.2       snw        73:        my $u = URI->new($url);
1.4       snw        74:        my $domain = $u->host;  
1.2       snw        75:        my $scheme = $u->scheme;
1.7       snw        76:         my @parts = split($domain, '.');
                     77:         my $tld = $parts[-1];
                     78:         
                     79:         if ($mode eq 'restricted') {
                     80:             my $tld_ok = 0;
                     81:             foreach (@allowed_tlds) {
                     82:                 my $allowed = $_;
                     83:                 
                     84:                 if($tld eq $allowed) {
                     85:                     $tld_ok = 1;
                     86:                     last;
                     87:                 }
                     88:             }
                     89:             if($tld_ok == 0) {
                     90:                 print "T";
                     91:                 return;
                     92:             }
                     93:         }
1.2       snw        94: 
                     95:        my $sth = $dbh->prepare("INSERT INTO url_domains (url_domain) VALUES (?)");
                     96:        $sth->execute($domain);
                     97:        
                     98:        my $ins = $dbh->prepare("INSERT INTO crawl_queue (url, parent_url, url_domain, scheme) VALUES (?, ?, ?, ?)");
1.1       snw        99:            
1.2       snw       100:        if(not $ins->execute($url, $parent, $domain, $scheme)) {
                    101:            $skips = $skips + 1;
                    102:            print "d";
                    103:        }
                    104:        else {
                    105:            print ".";
                    106:            $inserts = $inserts + 1;        
                    107:            if($depth < $maxdepth) {
                    108:                $depth = $depth + 1;
                    109:                crawl_url($url);
                    110:            }
                    111:            else {
                    112:                print "l";
                    113:            }
1.1       snw       114:        }
1.2       snw       115:     }
                    116:     else {
                    117:        print "x";
                    118:     }
1.1       snw       119: }
                    120: 
                    121: sub crawl_url {
                    122:     my ($url) = @_;
                    123: 
                    124:     my $u = URI->new($url);
                    125: 
                    126:     if ($u->scheme ne "http" && $u->scheme ne "https") {
1.2       snw       127:        $invalid_scheme_skips = $invalid_scheme_skips + 1;
                    128:        print "s";
1.1       snw       129:        return;
                    130:     }
                    131: 
                    132:     my $sth = $dbh->prepare("SELECT url_domain FROM blacklist WHERE url_domain=?");
                    133:     $sth->execute($u->host);
                    134:     if($sth->rows > 0) {
1.2       snw       135:        print "b";
                    136:        $blacklist_matches = $blacklist_matches + 1;
1.1       snw       137:        return;
                    138:     }
                    139:     
                    140:     my $robots_url = $u->scheme . '://' . $u->host . "/robots.txt";
                    141: 
                    142:     my $robots_txt = get $robots_url;
                    143:     $rules->parse($robots_url, $robots_txt) if defined $robots_txt;
                    144: 
                    145:     if(!$rules->allowed($url)) {
1.2       snw       146:        print "r";
                    147:        $robots_txt_denies = $robots_txt_denies + 1;
1.1       snw       148:        return;
                    149:     }
                    150:     
                    151:     my $origurl = $url;
                    152:     ($baseurl) = $origurl =~ m! (.+?\w) (?: /|\z) !x;
                    153:     
                    154:     my $http = HTTP::Tiny->new(agent => "pandia-crawler/0.0.1");
                    155:     my $tree = HTML::TreeBuilder->new();
                    156: 
                    157:     my $response = $http->get($url);
1.3       snw       158: 
                    159: 
1.1       snw       160:     $tree->parse($response->{content});
                    161: 
                    162:     my @links = $tree->find_by_tag_name('a');
                    163: 
                    164:     my $href = "";
                    165:     my $firstchar = "";
                    166:     my $final = "";
                    167:     
                    168:     foreach my $link (@links) {
                    169:         $href = $link->attr('href');
                    170:        $firstchar = substr($href, 0, 1);
                    171:        $final = "";
                    172: 
                    173:        if($firstchar eq '/') {
                    174:            $final = $baseurl . $href;     
                    175:        }
                    176:        elsif($href eq '##') {
                    177:            $final = $baseurl;
                    178:        }
                    179:        elsif($firstchar eq '#') {
                    180:            $final = $baseurl . '/' . $href;
                    181:        }
                    182:        else {
                    183:            $final = $href;
                    184:        }
                    185:            
1.2       snw       186:        store_url($final, $url);
1.1       snw       187:     }
                    188: 
                    189:     $depth = $depth - 1;
                    190: }
                    191: 
                    192: $| = 1;
                    193: print "pandia crawler v0.0.1\n";
                    194: print " Copyright (C) 2025 Coherent Logic Development LLC\n\n";
                    195: 
1.7       snw       196: my $profile;
                    197: 
                    198: GetOptions("profile=s" => \$profile,
                    199:            "seed=s" => \$seed,
                    200:            "maxdepth=n" =>\$maxdepth)
1.1       snw       201:     or die("error in command line arguments");
                    202: 
1.7       snw       203: my $cfg = Config::IniFiles->new(-file => "/etc/pandia.ini");
                    204: 
                    205: $dbhost = $cfg->val($profile, 'dbhost');
                    206: $dbname = $cfg->val($profile, 'dbname');
                    207: $dbusername = $cfg->val($profile, 'dbuser');
                    208: $dbpw = $cfg->val($profile, 'dbpass');
                    209: $tmp = $cfg->val($profile, 'allowed_tlds');
                    210: 
                    211: if($tmp ne '*') {
1.8     ! snw       212:     $mode = 'restricted';
1.7       snw       213:     @allowed_tlds = split(',', $tmp);
                    214:     print "pandia:  crawler restricted to these TLDs:  ";
                    215:     foreach (@allowed_tlds) {
                    216:         print ".$_ ";
                    217:     }
                    218:     print "\n";
                    219: }
                    220: else {
                    221:     print "pandia:  crawler unrestricted\n";
                    222:     $mode = 'normal';
                    223: }
                    224: 
1.1       snw       225: print "pandia:  connecting to $dbname database at $dbhost...";
                    226: 
                    227: $dsn = "DBI:mysql:database=$dbname;host=$dbhost;port=3306;mysql_connect_timeout=5;";
                    228: $dbh = DBI->connect($dsn, $dbusername, $dbpw, {RaiseError => 0, PrintError => 0});
                    229: die "pandia:  failed to connect to MySQL database: DBI->errstr()" unless $dbh;
                    230: 
                    231: print "[OK]\n";
                    232: 
1.7       snw       233: print "pandia:  each character represents the following status for a URL:\n";
                    234: print "  .    URL added to indexer queue\n";
                    235: print "  l    crawl exceeded max depth\n";
                    236: print "  x    URL too long or invalid scheme\n";
                    237: print "  d    URL was a duplicate\n";
                    238: print "  b    crawl was blocked by robots.txt\n";
                    239: print "  F    URL contained a fragment\n";
                    240: print "  T    URL was from a disallowed top-level domain\n\n";
                    241: 
                    242: if($seed ne "") {        
                    243:     print "pandia:  crawling seed $seed to a maximum depth of $maxdepth...";
1.1       snw       244:     sleep 1;
                    245:     crawl_url($seed);
                    246:     print "[OK]\n";
                    247: }
                    248: else {
1.5       snw       249:     open my $file, ">", "/tmp/pandia_crawler.lock" or die $!; 
1.4       snw       250:     flock $file, LOCK_EX|LOCK_NB or die "Unable to lock file $!";
                    251: 
1.1       snw       252:     my $sth = $dbh->prepare("SELECT url FROM crawl_queue");
                    253:     $sth->execute();
                    254:     my $qlen = $sth->rows;
                    255: 
                    256:     
                    257:     print "pandia:  crawling queue with length of $qlen to a maximum depth of $maxdepth";
                    258:     sleep 1;
                    259:     while (my @row = $sth->fetchrow_array()) {
                    260:        my $url = @row[0];
                    261:        crawl_url($url);
                    262:     }
                    263:     print "[OK]\n";
                    264: }
                    265: 
                    266: 
                    267: my $total = $inserts + $skips;
                    268: 
                    269: print "pandia:  $inserts URL(s) enqueued for analysis; $skips skipped [$total URL(s) seen this run]\n";
1.2       snw       270: print "          - $blacklist_matches blacklist matches\n";
                    271: print "          - $invalid_scheme_skips URLs skipped due to invalid scheme\n";
                    272: print "          - $robots_txt_denies URLs skipped due to robots.txt\n";
                    273:     
                    274:     

FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>