--- pandia/Pandia.pm 2025/06/28 23:54:11 1.1 +++ pandia/Pandia.pm 2025/06/30 02:18:44 1.2 @@ -1,7 +1,7 @@ #!/usr/bin/env perl # -# $Id: Pandia.pm,v 1.1 2025/06/28 23:54:11 snw Exp $ +# $Id: Pandia.pm,v 1.2 2025/06/30 02:18:44 snw Exp $ # Copyright (C) 2025 Coherent Logic Development LLC # # Author: Serena Willis @@ -9,6 +9,9 @@ # Licensed AGPL-3.0 # # $Log: Pandia.pm,v $ +# Revision 1.2 2025/06/30 02:18:44 snw +# Updates +# # Revision 1.1 2025/06/28 23:54:11 snw # Add new OO module # @@ -17,7 +20,7 @@ package Pandia; use strict; -#use warnings; +use warnings; use HTTP::Tiny; use HTML::TreeBuilder; @@ -28,28 +31,85 @@ use Fcntl qw(:flock); use LWP::Simple qw(get); use Config::IniFiles; use Thread::Pool; +use HTTP::Date; +use POSIX qw(strftime); my $indices_waiting : shared; sub index { - my ($url, $domain, $dsn, $dbuser, $dbpass) = @_; + my ($url, $domain, $dsn, $dbuser, $dbpass, $reindex) = @_; + print "pandia: thread connecting to MySQL database..."; + my $dbh = DBI->connect($dsn, $dbuser, $dbpass, {RaiseError => 0, PrintError => 1}); if(not $dbh) { - print "pandia: failed to connect to MySQL database\n"; + print "[FAIL]\n"; goto nodb_cleanup; } + print "[OK]\n"; my $http = HTTP::Tiny->new(agent => "pandia-crawler/0.0.1", timeout => 60); my $tree = HTML::TreeBuilder->new(); + my $tries; - my $head = $http->head($url); - if(not $head->{success}) { - print "pandia: http HEAD failure; skipping $url\n"; - goto cleanup; + my $head; + print "pandia: HEAD $url\n"; + $head = $http->head($url); + if(not $head->{success}) { + print "pandia: HEAD fail $url\n"; + goto nodb_cleanup; + } + else { + print "pandia: HEAD OK $url\n"; } + + proc_head: my $headers = $head->{headers}; my $content_type = $headers->{'content-type'}; + my $last_modified; + my $last_modified_sys; + + if ($reindex == 1) { + print "pandia: REINDEX $url\n"; + my $last_modified_t = $headers->{'last-modified'}; + $last_modified_sys = str2time($last_modified_t); + + if($last_modified_sys) { + print "pandia: GET_LAST_INDEX_DT $url\n"; + my $sth = $dbh->prepare("SELECT last_indexed_dt FROM url_fulltext WHERE url=?"); + $sth->execute($url); + print "pandia: GOT_LAST_INDEX_DT $url\n"; + + if($sth->rows < 1) { + print "pandia: page not indexed\n"; + goto nodb_cleanup; + } + + my $hashref = $sth->fetchrow_hashref(); + my $last_indexed = str2time($hashref->{last_indexed_dt}); + + if($last_modified_sys > $last_indexed) { + print "pandia: $url has been modified since the last time it was indexed\n"; + my $sth = $dbh->prepare("DELETE FROM url_fulltext WHERE url=?"); + $sth->execute($url); + print "pandia: INDEXDELETE $url\n"; + } + else { + print "pandia: $url is still up-to-date in the index\n"; + goto cleanup; + } + + } + else { + print "pandia: no modify info; skipping $url\n"; + goto nodb_cleanup; + } + } + else { + print "pandia: INDEX $url\n"; + $last_modified = strftime("%Y-%m-%d %H:%M", localtime); + } + my $title = ""; my $fulltext = ""; my $fullhtml = ""; @@ -80,7 +140,7 @@ sub index { $title = $tree->look_down('_tag', 'title')->as_text; $title =~ s/[^\x00-\x7F]//g; - #print "pandia: processing $url [$title]\n"; + print "pandia: processing $url [$title]\n"; $fulltext = $tree->as_text; $fulltext =~ s/[^\x00-\x7F]//g; @@ -100,6 +160,7 @@ sub index { $sth = $dbh->prepare("INSERT INTO url_fulltext(url, url_domain, page_title, body, body_html) VALUES (?, ?, ?, ?, ?)"); my $tries = 0; while(1) { + print "pandia: INSERTINDEX $url\n"; $sth->execute($url, $domain, $title, $fulltext, $fullhtml); if($DBI::err) { if($tries > 5) { @@ -122,7 +183,7 @@ sub index { cleanup: my $sthuc = $dbh->prepare("UPDATE crawl_queue SET analyzed=1 WHERE url=?"); - my $tries = 0; + $tries = 0; while(1) { $sthuc->execute($url); if($DBI::err) { @@ -142,7 +203,6 @@ sub index { $dbh->disconnect(); nodb_cleanup: - lock($indices_waiting); $indices_waiting = $indices_waiting - 1; } @@ -193,7 +253,7 @@ sub run_index_batch { my $dbh = DBI->connect($self->{dsn}, $self->{dbuser}, $self->{dbpass}, {RaiseError => 1, PrintError => 0}); my $sth = $dbh->prepare("SELECT * FROM crawl_queue WHERE analyzed=0 LIMIT ?"); - $sth->execute($self->{index_workers} * 4); + $sth->execute($self->{index_workers}); $indices_waiting = $sth->rows; @@ -206,14 +266,55 @@ sub run_index_batch { while (my $hashref = $sth->fetchrow_hashref()) { $tmpi = $tmpi + 1; print "pandia: sending $hashref->{url} to worker thread\n"; - $self->{index_pool}->job($hashref->{url}, $hashref->{url_domain}, $self->{dsn}, $self->{dbuser}, $self->{dbpass}); + $self->{index_pool}->job($hashref->{url}, $hashref->{url_domain}, $self->{dsn}, $self->{dbuser}, $self->{dbpass}, 0); } + print "pandia: $indices_waiting total pages to be processed\n"; + +done: + $sth->finish(); + $dbh->disconnect(); + my $start_time = time(); + while($indices_waiting > 0) { + my $end_time = time(); + my $time_diff = $end_time - $start_time; + + if($time_diff > 60) { + print "pandia: timing out\n"; + last; + } + print "pandia: $indices_waiting URLs still in-process [$time_diff seconds elapsed]\n"; + sleep(10); + } + $self->{index_pool}->shutdown; +} + +sub run_reindex_batch { + my ($self) = @_; + + my $dbh = DBI->connect($self->{dsn}, $self->{dbuser}, $self->{dbpass}, {RaiseError => 1, PrintError => 0}); + + my $sth = $dbh->prepare("SELECT url, url_domain FROM crawl_queue WHERE analyzed=1 ORDER BY RAND() LIMIT ?"); + $sth->execute($self->{index_workers}); + + $indices_waiting = $sth->rows; + + if($indices_waiting == 0) { + print "pandia: nothing to reindex\n"; + goto done; + } + + my $tmpi = 0; + while (my $hashref = $sth->fetchrow_hashref()) { + $tmpi = $tmpi + 1; + print "pandia: sending $hashref->{url} to worker thread\n"; + $self->{index_pool}->job($hashref->{url}, $hashref->{url_domain}, $self->{dsn}, $self->{dbuser}, $self->{dbpass}, 1); + } print "pandia: $indices_waiting total pages to be processed\n"; -done: + done: $sth->finish(); $dbh->disconnect(); @@ -222,7 +323,7 @@ done: my $end_time = time(); my $time_diff = $end_time - $start_time; - if($time_diff > $indices_waiting * 20) { + if($time_diff > 60) { print "pandia: timing out\n"; last; } @@ -230,6 +331,7 @@ done: sleep(10); } $self->{index_pool}->shutdown; + } 1;