Origin: http://wiki.tcl.tk/3211 Author: Reinhard Max.
This little script sends it's command line arguments as a query to the online dictionary at http://dict.leo.org and writes the parsed result to stdout. It uses Tcl's http package and the htmlparse and ncgi packages from Tcllib.
The scraper part (everything inside the ::dict.leo.org namespace) could also be included from other frontends. It's [query] proc takes a list of words to search for, and returns a list of english/german pairs that matched the query.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 | --------------------------------------------------------------------------------
package require http
package require htmlparse
package require ncgi
namespace eval ::dict.leo.org {
variable table ""
proc parse {tag close options body} {
variable TD
variable table
switch -- $close$tag {
TD {set TD ""}
/TD {if {[llength $TD]} {lappend table [string trim $TD]}}
default {append TD [string map { { }} $body]}
}
}
proc query {query} {
variable table
set url "http://dict.leo.org/?search=[::ncgi::encode $query]"
set tok [::http::geturl $url]
foreach line [split [::http::data $tok] "\n"] {
if {[string match "*search results*" $line]} break
}
::http::cleanup $tok
set table ""
::htmlparse::parse -cmd ::dict.leo.org::parse $line
return $table
}
}
proc max {a b} {expr {$a > $b ? $a : $b}}
proc main {argv} {
set table [dict.leo.org::query [join $argv]]
set max 0
foreach c $table {set max [max $max [string length $c]]}
set sep [string repeat = $max]
set table [linsert $table 0 " English" " Deutsch" $sep $sep]
foreach {c1 c2} $table {
puts [format "%-*s %-*s" $max $c1 $max $c2]
}
puts ""
}
main $argv
|
Richard Suchenwirth comments: Proud owners of a firewall might have to add a line like http::config -proxyhost proxy -proxyport 80
at the very top of proc query. Helped in my case to really get out.