Annotation of pandia/crawler, revision 1.4

1.1       snw         1: #!/usr/bin/env perl
                      2: 
                      3: # 
1.4     ! snw         4: # $Id: crawler,v 1.3 2025/06/27 02:14:47 snw Exp $
1.1       snw         5: #  Copyright (C) 2025 Coherent Logic Development LLC
                      6: #
                      7: # Author: Serena Willis <snw@coherent-logic.com>
                      8: #
                      9: # Licensed AGPL-3.0
                     10: #
1.2       snw        11: # $Log: crawler,v $
1.4     ! snw        12: # Revision 1.3  2025/06/27 02:14:47  snw
        !            13: # Initial operational capability
        !            14: #
1.3       snw        15: # Revision 1.2  2025/06/25 19:38:48  snw
                     16: # Add indexer
                     17: #
1.2       snw        18: # Revision 1.1  2025/06/25 13:44:37  snw
                     19: # Renaming
                     20: #
1.1       snw        21: # Revision 1.2  2025/06/25 03:10:01  snw
                     22: # Initial working crawler
                     23: #
                     24: # Revision 1.1.1.1  2025/06/23 23:17:08  snw
                     25: # Initial commit
                     26: #
                     27: #
                     28: 
                     29: use Getopt::Long;
                     30: use HTTP::Tiny;
                     31: use HTML::TreeBuilder;
                     32: use URI;
                     33: use DBI;
                     34: use WWW::RobotRules;
1.4     ! snw        35: use Fcntl qw(:flock);
1.1       snw        36: use LWP::Simple qw(get);
                     37: 
1.4     ! snw        38: my $rules = WWW::RobotRules->new('pandia-crawler/0.0.1');
1.1       snw        39: my $dbh = "";
                     40: my $dsn = "";
                     41: my $skips = 0;
                     42: my $inserts = 0;
                     43: my $seed = "";
                     44: my $depth = 0;
1.2       snw        45: my $blacklist_matches = 0;
                     46: my $robots_txt_denies = 0;
                     47: my $invalid_scheme_skips = 0;
1.1       snw        48: 
                     49: sub store_url {
1.2       snw        50:     my ($url, $parent) = @_;
                     51: 
1.4     ! snw        52:     if($url ne "" && length($url) <= 255 && substr($url, 0, 6) ne "mailto" && substr($url, 0, 4) eq "http") {       
        !            53:        
1.2       snw        54:        my $u = URI->new($url);
1.4     ! snw        55:        my $domain = $u->host;  
1.2       snw        56:        my $scheme = $u->scheme;
                     57: 
                     58:        my $sth = $dbh->prepare("INSERT INTO url_domains (url_domain) VALUES (?)");
                     59:        $sth->execute($domain);
                     60:        
                     61:        my $ins = $dbh->prepare("INSERT INTO crawl_queue (url, parent_url, url_domain, scheme) VALUES (?, ?, ?, ?)");
1.1       snw        62:            
1.2       snw        63:        if(not $ins->execute($url, $parent, $domain, $scheme)) {
                     64:            $skips = $skips + 1;
                     65:            print "d";
                     66:        }
                     67:        else {
                     68:            print ".";
                     69:            $inserts = $inserts + 1;        
                     70:            if($depth < $maxdepth) {
                     71:                $depth = $depth + 1;
                     72:                crawl_url($url);
                     73:            }
                     74:            else {
                     75:                print "l";
                     76:            }
1.1       snw        77:        }
1.2       snw        78:     }
                     79:     else {
                     80:        print "x";
                     81:     }
1.1       snw        82: }
                     83: 
                     84: sub crawl_url {
                     85:     my ($url) = @_;
                     86: 
                     87:     my $u = URI->new($url);
                     88: 
                     89:     if ($u->scheme ne "http" && $u->scheme ne "https") {
1.2       snw        90:        $invalid_scheme_skips = $invalid_scheme_skips + 1;
                     91:        print "s";
1.1       snw        92:        return;
                     93:     }
                     94: 
                     95:     my $sth = $dbh->prepare("SELECT url_domain FROM blacklist WHERE url_domain=?");
                     96:     $sth->execute($u->host);
                     97:     if($sth->rows > 0) {
1.2       snw        98:        print "b";
                     99:        $blacklist_matches = $blacklist_matches + 1;
1.1       snw       100:        return;
                    101:     }
                    102:     
                    103:     my $robots_url = $u->scheme . '://' . $u->host . "/robots.txt";
                    104: 
                    105:     my $robots_txt = get $robots_url;
                    106:     $rules->parse($robots_url, $robots_txt) if defined $robots_txt;
                    107: 
                    108:     if(!$rules->allowed($url)) {
1.2       snw       109:        print "r";
                    110:        $robots_txt_denies = $robots_txt_denies + 1;
1.1       snw       111:        return;
                    112:     }
                    113:     
                    114:     my $origurl = $url;
                    115:     ($baseurl) = $origurl =~ m! (.+?\w) (?: /|\z) !x;
                    116:     
                    117:     my $http = HTTP::Tiny->new(agent => "pandia-crawler/0.0.1");
                    118:     my $tree = HTML::TreeBuilder->new();
                    119: 
                    120:     my $response = $http->get($url);
1.3       snw       121: 
                    122: 
1.1       snw       123:     $tree->parse($response->{content});
                    124: 
                    125:     my @links = $tree->find_by_tag_name('a');
                    126: 
                    127:     my $href = "";
                    128:     my $firstchar = "";
                    129:     my $final = "";
                    130:     
                    131:     foreach my $link (@links) {
                    132:         $href = $link->attr('href');
                    133:        $firstchar = substr($href, 0, 1);
                    134:        $final = "";
                    135: 
                    136:        if($firstchar eq '/') {
                    137:            $final = $baseurl . $href;     
                    138:        }
                    139:        elsif($href eq '##') {
                    140:            $final = $baseurl;
                    141:        }
                    142:        elsif($firstchar eq '#') {
                    143:            $final = $baseurl . '/' . $href;
                    144:        }
                    145:        else {
                    146:            $final = $href;
                    147:        }
                    148:            
1.2       snw       149:        store_url($final, $url);
1.1       snw       150:     }
                    151: 
                    152:     $depth = $depth - 1;
                    153: }
                    154: 
                    155: $| = 1;
                    156: print "pandia crawler v0.0.1\n";
                    157: print " Copyright (C) 2025 Coherent Logic Development LLC\n\n";
                    158: 
                    159: GetOptions("dbhost=s" => \$dbhost,
                    160:            "dbname=s" => \$dbname,
                    161:            "dbusername=s" => \$dbusername,
                    162:            "dbpw=s" => \$dbpw,
                    163:           "seed=s" => \$seed,
                    164:           "maxdepth=n" =>\$maxdepth)
                    165:     or die("error in command line arguments");
                    166: 
                    167: print "pandia:  connecting to $dbname database at $dbhost...";
                    168: 
                    169: $dsn = "DBI:mysql:database=$dbname;host=$dbhost;port=3306;mysql_connect_timeout=5;";
                    170: $dbh = DBI->connect($dsn, $dbusername, $dbpw, {RaiseError => 0, PrintError => 0});
                    171: die "pandia:  failed to connect to MySQL database: DBI->errstr()" unless $dbh;
                    172: 
                    173: print "[OK]\n";
                    174: 
                    175: if($seed ne "") {
                    176:     print "pandia:  crawling seed $seed to a maximum depth of $maxdepth";
                    177:     sleep 1;
                    178:     crawl_url($seed);
                    179:     print "[OK]\n";
                    180: }
                    181: else {
1.4     ! snw       182:     open my $file, ">", "pandia_crawler.lock" or die $!; 
        !           183:     flock $file, LOCK_EX|LOCK_NB or die "Unable to lock file $!";
        !           184: 
1.1       snw       185:     my $sth = $dbh->prepare("SELECT url FROM crawl_queue");
                    186:     $sth->execute();
                    187:     my $qlen = $sth->rows;
                    188: 
                    189:     
                    190:     print "pandia:  crawling queue with length of $qlen to a maximum depth of $maxdepth";
                    191:     sleep 1;
                    192:     while (my @row = $sth->fetchrow_array()) {
                    193:        my $url = @row[0];
                    194:        crawl_url($url);
                    195:     }
                    196:     print "[OK]\n";
                    197: }
                    198: 
                    199: 
                    200: my $total = $inserts + $skips;
                    201: 
                    202: print "pandia:  $inserts URL(s) enqueued for analysis; $skips skipped [$total URL(s) seen this run]\n";
1.2       snw       203: print "          - $blacklist_matches blacklist matches\n";
                    204: print "          - $invalid_scheme_skips URLs skipped due to invalid scheme\n";
                    205: print "          - $robots_txt_denies URLs skipped due to robots.txt\n";
                    206:     
                    207:     

FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>