Use gzip to save a crap ton of disk space, closing #45

This commit is contained in:
Kai Hendry
2014-07-11 15:18:55 +08:00
parent 066e0bcf5c
commit db391ed349
3 changed files with 6 additions and 2 deletions

View File

@@ -38,7 +38,7 @@ echo "<a href=\"http://$HTTP_HOST/u/$id\"><h1 class=\"alert alert-success\">Goto
echo "<a href=\"http://$id.$HTTP_HOST\"><h1 class=\"alert alert-success\">Goto http://$id.$HTTP_HOST to grep!</h1></a>";
}
symlink ("$id.txt", "tweets.txt");
symlink ("$id.txt.gz", "tweets.txt.gz");
echo `sed -e "s,TIMESTAMP,$(date)," ../../greptweet.appcache > greptweet.appcache`;
echo "<pre>";

View File

@@ -26,6 +26,8 @@ fi
page=1
saved=0
test -s "$1.txt.gz" && gunzip "$1.txt.gz"
if test -s "$1.txt"
then
saved=$(wc -l < "$1.txt")
@@ -61,3 +63,5 @@ saved=$(wc -l < "$1.txt")
done
echo $1 saved $saved tweets
gzip "$1.txt"

View File

@@ -10,7 +10,7 @@ $QUERY=urldecode(escapeshellarg(urlencode($_GET['q'])));
//fwrite($fp, $_GET['q'] . " : " . $QUERY . "\n");
//fclose($fp);
exec("grep -hi $QUERY tweets.txt", $array);
exec("zgrep -hi $QUERY tweets.txt", $array);
$data = json_encode($array);
echo $_GET['jsoncallback'] . '(' . $data . ');';
?>