X-Git-Url: http://dxcluster.net/gitweb/gitweb.cgi?a=blobdiff_plain;f=perl%2FGeomag.pm;h=f8a1ec720c0d820f9c82ee1e3eee9635098b57e2;hb=766014807e2521cc60a219c6eda5b49d13ffc70f;hp=e84e5d50c35a5d4a472800713cd02480c81ea084;hpb=88665a2bed3b9ec9e97237938a95a045b2a21bb4;p=spider.git diff --git a/perl/Geomag.pm b/perl/Geomag.pm index e84e5d50..f8a1ec72 100644 --- a/perl/Geomag.pm +++ b/perl/Geomag.pm @@ -5,7 +5,7 @@ # # Copyright (c) 1998 - Dirk Koopman G1TLH # -# $Id$ +# # package Geomag; @@ -16,11 +16,14 @@ use DXLog; use Julian; use IO::File; use DXDebug; +use DXDupe; +use Time::HiRes qw(gettimeofday tv_interval); use strict; + use vars qw($date $sfi $k $a $r $forecast @allowed @denied $fp $node $from $dirprefix $param - %dup $duplth $dupage); + $duplth $dupage $filterdef); $fp = 0; # the DXLog fcb $date = 0; # the unix time of the WWV (notional) @@ -33,18 +36,39 @@ $node = ""; # originating node $from = ""; # who this came from @allowed = (); # if present only these callsigns are regarded as valid WWV updators @denied = (); # if present ignore any wwv from these callsigns -%dup = (); # the spot duplicates hash $duplth = 20; # the length of text to use in the deduping $dupage = 12*3600; # the length of time to hold spot dups -$dirprefix = "$main::data/wwv"; +$dirprefix = "$main::local_data/wwv"; $param = "$dirprefix/param"; +our $maxcache = 10; +our @cache; + + +$filterdef = bless ([ + # tag, sort, field, priv, special parser + ['by', 'c', 0], + ['origin', 'c', 1], + ['channel', 'c', 2], + ['by_dxcc', 'nc', 3], + ['by_itu', 'ni', 4], + ['by_zone', 'nz', 5], + ['origin_dxcc', 'nc', 6], + ['origin_itu', 'ni', 7], + ['origin_zone', 'nz', 8], + ], 'Filter::Cmd'); + sub init { $fp = DXLog::new('wwv', 'dat', 'm'); - mkdir $dirprefix, 0777 if !-e $dirprefix; # now unnecessary DXLog will create it do "$param" if -e "$param"; + # read in existing data + my $t0 = [gettimeofday]; + dbg(sprintf "WWV read in upto %d records into cache", $maxcache); + @cache = readfile($main::systime); + shift @cache while @cache > $maxcache; + dbg(sprintf "WWV read in last %d records into cache in %dmS", scalar @cache, _diffms($t0)); confess $@ if $@; } @@ -66,19 +90,23 @@ sub store close $fh; # log it - $fp->writeunix($date, "$from^$date^$sfi^$a^$k^$forecast^$node^$r"); + my $s ="$from^$date^$sfi^$a^$k^$forecast^$node^$r"; + $fp->writeunix($date, $s); + push @cache, [ split /\^/, $s ]; + shift @cache while @cache > $maxcache; } # update WWV info in one go (usually from a PC23) sub update { my ($mydate, $mytime, $mysfi, $mya, $myk, $myforecast, $myfrom, $mynode, $myr) = @_; - if ((@allowed && grep {$_ eq $from} @allowed) || - (@denied && !grep {$_ eq $from} @denied) || + $myfrom =~ s/-\d+$//; + if ((@allowed && grep {$_ eq $myfrom} @allowed) || + (@denied && !grep {$_ eq $myfrom} @denied) || (@allowed == 0 && @denied == 0)) { # my $trydate = cltounix($mydate, sprintf("%02d18Z", $mytime)); - if ($mydate >= $date) { + if ($mydate > $date) { if ($myr) { $r = 0 + $myr; } else { @@ -163,15 +191,22 @@ sub search { my $from = shift; my $to = shift; - my @date = $fp->unixtoj(shift); + my $t = shift; + my $date = $fp->unixtoj($t); my $pattern = shift; my $search; my @out; my $eval; my $count; - - $search = 1; - $eval = qq( + + if ($t == $main::systime && ($to <= $maxcache)) { + dbg("using wwv cache") if isdbg('wwv'); + @out = reverse @cache; + pop @out while @out > $to; + } else { + dbg("using wwv file(s))") if isdbg('wwv'); + $search = 1; + $eval = qq( my \$c; my \$ref; for (\$c = \$#in; \$c >= 0; \$c--) { @@ -185,22 +220,23 @@ sub search } ); - $fp->close; # close any open files - - my $fh = $fp->open(@date); - for ($count = 0; $count < $to; ) { - my @in = (); - if ($fh) { - while (<$fh>) { - chomp; - push @in, [ split '\^' ] if length > 2; + $fp->close; # close any open files + + my $fh = $fp->open($date); + for ($count = 0; $count < $to; ) { + my @in = (); + if ($fh) { + while (<$fh>) { + chomp; + push @in, [ split '\^' ] if length > 2; + } + eval $eval; # do the search on this file + return ("Geomag search error", $@) if $@; + last if $count >= $to; # stop after n } - eval $eval; # do the search on this file - return ("Geomag search error", $@) if $@; - last if $count >= $to; # stop after n + $fh = $fp->openprev(); # get the next file + last if !$fh; } - $fh = $fp->openprev(); # get the next file - last if !$fh; } return @out; @@ -231,8 +267,8 @@ sub print_item # sub readfile { - my @date = $fp->unixtoj(shift); - my $fh = $fp->open(@date); + my $date = $fp->unixtoj(shift); + my $fh = $fp->open($date); my @spots = (); my @in; @@ -248,37 +284,19 @@ sub readfile # enter the spot for dup checking and return true if it is already a dup sub dup { - my ($d, $sfi, $k, $a, $text) = @_; + my ($d, $sfi, $k, $a, $text, $call) = @_; # dump if too old return 2 if $d < $main::systime - $dupage; - $d /= 60; # to the nearest minute - chomp $text; - $text = substr($text, 0, $duplth) if length $text > $duplth; - my $dupkey = "$d|$sfi|$k|$a|$text"; - return 1 if exists $dup{$dupkey}; - $dup{$dupkey} = $d * 60; # in seconds (to the nearest minute) - return 0; -} - -# called every hour and cleans out the dup cache -sub process -{ - my $cutoff = $main::systime - $dupage; - while (my ($key, $val) = each %dup) { - delete $dup{$key} if $val < $cutoff; - } + my $dupkey = "W$d|$sfi|$k|$a|$call"; + return DXDupe::check($dupkey, $main::systime+$dupage); } sub listdups { - my @out; - for (sort { $dup{$a} <=> $dup{$b} } keys %dup) { - my $val = $dup{$_}; - push @out, "$_ = $val (" . cldatetime($val) . ")"; - } - return @out; + return DXDupe::listdups('W', $dupage, @_); } 1; __END__; +