tail -1000 /var/log/mail.log | grep -E -o "\bto=<[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,6}\b" | grep -v '[DOMAINS YOU WANT TO EXCLUDE]'
Saturday, August 13, 2016
Saturday, July 30, 2016
Geocode thousands of addresses with perl
I'm using this jquery store locator and needed a way to geocode 7K addresses. Google only lets you code 2500 in a 24H hour period, so this will quit when the query limit is reached. When it runs again it will pick up where is left off. So it could theoretically be run on a cron until it's done.
#!/usr/bin/perl use strict; use DBI; use Google::GeoCoder::Smart; my $geo = Google::GeoCoder::Smart->new(); my $host =""; my $database =""; my $user =""; my $mysqlpassword =""; my $dbh = DBI->connect("DBI:mysql:database=$database;host=$host","$user","$mysqlpassword",{'RaiseError'=>1}); my $ctr= 0; my $sql = "SELECT * FROM sb_locations WHERE lat = \"\""; my $sth = &db_query($sql); while (my $ref = $sth->fetchrow_hashref) { my ($num, $error, @results, $returntext) = $geo->geocode( "address" => stripChars($ref->{'address'}), "city" => stripChars($ref->{'city'}), "state" => stripChars($ref->{'state'}), "zip" => stripChars($ref->{'postal'}) ); my $lat; my $lng; eval { $lat = $results[0]{geometry}{location}{lat}; }; eval { $lng = $results[0]{geometry}{location}{lng}; }; print "id: $ref->{'id'} returntext?: $returntext error?: $error lat: $lat lng: $lng\n"; if ($error eq 'OVER_QUERY_LIMIT') { last; } if (($lat) && ($lng)) { $lat = $dbh->quote($lat); $lng = $dbh->quote($lng); my $upd_q = "UPDATE sb_locations SET lat = $lat, lng = $lng WHERE id = $ref->{'id'}"; my $sth2 = &db_query($upd_q); $sth2->finish(); } $ctr++; if (($ctr%10)==0) { print "sleeping\n"; sleep 5; } } $sth->finish(); undef ($sql); undef ($sth); $dbh->disconnect(); sub stripChars { my($text) = @_; $text =~ s/^\s*//; # strip out leading space if there is one $text =~ s/\n/ /g; # strip carraige returns $text =~ s/\t/ /g; # strip tabs $text =~ s/\a/ /g; # strip carraige returns $text =~ s/"/'/g; # strip quotes and replace with single quotes $text =~ s/\s+/ /g; # strip repeating spaces and replace with one $text =~ s/[^[:ascii:]]+//g; return ($text); } # end sub strip chars sub db_query { my ($query) = @_; my $sth = $dbh->prepare("$query"); #print $query; $sth->execute; #print $dbh->err; my $err = $dbh->err; my $errstr = $dbh->errstr; if ($err) { print "$err: $errstr on query $query"; } return $sth; } # end sub db_query
Tuesday, April 26, 2016
Add transparent PNG to a JPG using ImageMagick Convert
convert shirt.jpg art.png -gravity center -composite -matte output.jpg
Tuesday, March 22, 2016
recursively search and replace ambersands (or any character) in filenames
Replaces the ampersands with underscores in any file ending in .jpg (all subdirs)
find . -type f -iname "*.jpg" -exec rename 's/&/_/' {} \;Same thing, but on systems that don't have 'rename' (like OS X)
find . -name '*.jpg' -type f -exec bash -c 'mv "$1" "${1/\/\&/_/}"' -- {} \;
Friday, March 11, 2016
wget a site for caching purposes (spider it)
Sometimes you want to crawl a site so that it gets cached on its server. This will "spider" a site and not download content
wget -r -l6 --spider -D mysite.com http://mysite.com/
Subscribe to:
Posts (Atom)