Sometimes you need to do a tail -f, grep the results, and then nuke the file and start all over again... wouldn't it be nice if you could just run a single command to do it, and optionally have it serve the output up on a port? Example command line: hotgrep /that/file/over.there '.+' 2000 Which will ''tail -f'' the file in a very cool way! UNIX only. This IS an intentionally stripped-down version which does not do EVERYTHING that anybody might want... it is here for didactic purposes only, but is fully functional as described. If somebody intends to modify '''this wiki page''', I hope they will keep that in mind. ---- #!tclsh ;## coded by Phil Ehrens for LIGO/LDAS 09/29/99 ;## a combined tail -f, and egrep wrapped in a server! ;## some experimental handling of HTML tags is ;## implemented. ;## ;## See sample client at end of script. set msg " hotgrep command syntax:\n hotgrep filename 'rx_pattern' \[delay in secs.\] \[server_port\]\n Only filename and rx_pattern are required, refresh delay defaults to 2 seconds, and a server will only be set up if you spec a port. Remember to protect your regexp pattern with ''s to avoid shell expansion! See the script for an example client for the server version." ;## helpful hint if { [ llength $argv ] < 2 } { puts stderr $msg exit; } ;## server port configuration stub proc cfg {cid addr port} { fileevent $cid readable "handle $cid" } ;## server port handler proc handle {cid} { puts $cid $::buffer close $cid set ::buffer {} } ;## simple html filter proc html { data } { regsub -all {<[Ll][Ii]>} $data { * } data regsub -all {<[Ii][Mm][Gg][^>]+>} $data {* } data regsub -all {<[^>]+>} $data {} data regsub -all {\\\}} $data \} data regsub -all {\\\{} $data \{ data return $data } ;## manage memory usage, since otherwise things COULD ;## blow up! proc truncateBuffer {} { if { ! $::server } { return } set lines [ split $::buffer "\n" ] set length [ llength $lines ] if { $length > 256 } { set first [ expr { 256 - $length } ] set last [ expr { $length - 1 } ] set ::buffer [ join [ lrange $lines $first $last ] "\n" ] } } ;## see if file was reopened during sleep proc statFile {} { file stat $::fname fstat set inode $fstat(ino) if { $inode != $::inode } { catch { close $::fid; unset ::fid } } if { ! [ info exists ::fid ] } { set ::fid [ open $::fname r ] set ::inode $inode puts "file \"$::fname\" opened as $::fid" } } proc readWrite {} { while { [ gets $::fid line ] >= 0 } { if { [ regexp $::rx $line ] } { set line [ html $line ] if { $::server } { append ::buffer "$line\n" } else { puts stdout $line } } } } ;## the hot-grepper! file does NOT need to exist ;## at startup. File can get nuked without a hiccup! proc run {} { truncateBuffer if { [ file exists $::fname ] } { statFile readWrite } else { ;## maybe the file got nuked? Handle it! catch { close $::fid; unset ::fid } } ;## and loop after $::delay run } proc init {} { if { [ info exists ::fname ] } { return } ;## default delay = 2 seconds set ::delay 2000 set ::server 0 set ::buffer {} set ::inode {} set ::fname {} set ::rx {} ;## read the command line ::argv if { [ catch { set ::fname [ lindex $::argv 0 ] set ::rx [ lindex $::argv 1 ] if { [ llength $::argv ] >= 3 } { if { [ catch { set ::delay [ expr { [ lindex $::argv 2 ] * 1000 } ] } err ] } { return -code error "\n$err\nDid you protect your regexp?\n" } } ;## if a port was specified, get the number. if { [ llength $::argv ] == 4 } { set server_port [ lindex $::argv 3 ] } ;## if a port was specified, hook it up! if { [ info exists server_port ] } { set ::server 1 set cid [ socket -server cfg $server_port ] puts "socket \"$server_port\" opened as $cid" } } err ] } { return -code error "$err\n$msg" } run } ;## MAIN init vwait enter-mainloop ;##~~~~~~snip~~~~~~ ;## EXAMPLE CLIENT proc client { host port } { set sid [ socket $host $port ] puts $sid {} flush $sid while { [ gets $sid line ] > 0 } { puts $line } close $sid after 3000 "client $host $port" } ;## after 100 client $host $port ;## vwait enter-mainloop