Page MenuHomeMusing Studio

No OneTemporary

diff --git a/bots/README.md b/bots/README.md
new file mode 100644
index 0000000..65394df
--- /dev/null
+++ b/bots/README.md
@@ -0,0 +1,8 @@
+bots
+====
+
+This package helps the backend determine which clients are bots or crawlers.
+
+## Write.as Usage
+
+This is used to prevent certain things when viewing posts, like incrementing the view count.
diff --git a/bots/bots.go b/bots/bots.go
index 6d82180..dd2141a 100644
--- a/bots/bots.go
+++ b/bots/bots.go
@@ -1,38 +1,43 @@
+// This package helps the backend determine which clients are bots or crawlers.
+// In Write.as, this is used to prevent certain things when viewing posts, like
+// incrementing the view count.
package bots
var bots = map[string]bool{
"bitlybot": true,
"crawlernutchtest/Nutch-1.9": true,
"ExactSeekCrawler/1.0": true,
"Googlebot-Image/1.0": true,
"LinkedInBot/1.0 (compatible; Mozilla/5.0; Jakarta Commons-HttpClient/3.1 +http://www.linkedin.com)": true,
"LivelapBot/0.2 (http://site.livelap.com/crawler)": true,
"Mozilla/5.0 (compatible; AhrefsBot/5.0; +http://ahrefs.com/robot/)": true,
"Mozilla/5.0 (compatible; DotBot/1.1; http://www.opensiteexplorer.org/dotbot, help@moz.com)": true,
"Mozilla/5.0 (compatible; Exabot/3.0; +http://www.exabot.com/go/robot)": true,
"Mozilla/5.0 (compatible; Findxbot/1.0; +http://www.findxbot.com)": true,
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)": true,
"Mozilla/5.0 (compatible; Kraken/0.1; http://linkfluence.net/; bot@linkfluence.net)": true,
"Mozilla/5.0 (compatible; Linux x86_64; Mail.RU_Bot/2.0; +http://go.mail.ru/help/robots)": true,
"Mozilla/5.0 (compatible; MojeekBot/0.6; +https://www.mojeek.com/bot.html)": true,
"Mozilla/5.0 (compatible; OpenHoseBot/2.1; +http://www.openhose.org/bot.html)": true,
"Mozilla/5.0 (compatible; PaperLiBot/2.1; http://support.paper.li/entries/20023257-what-is-paper-li)": true,
"Mozilla/5.0 (compatible; redditbot/1.0; +http://www.reddit.com/feedback)": true,
"Mozilla/5.0 (compatible; SeznamBot/3.2; +http://fulltext.sblog.cz/)": true,
"Mozilla/5.0 (compatible; uMBot-LN/1.0; mailto: crawling@ubermetrics-technologies.com)": true,
"Mozilla/5.0+(compatible; UptimeRobot/2.0; http://www.uptimerobot.com/)": true,
"Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)": true,
"Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)": true,
"spiderbot": true,
"TelegramBot": true,
"Traackr.com Bot": true,
"Twitterbot/1.0": true,
"voltron": true,
}
+// IsBot returns whether or not the provided User-Agent string is a known bot
+// or crawler.
func IsBot(ua string) bool {
if _, ok := bots[ua]; ok {
return true
}
return false
}
diff --git a/bots/findBots.sh b/bots/findBots.sh
index 3e95392..d873224 100755
--- a/bots/findBots.sh
+++ b/bots/findBots.sh
@@ -1,32 +1,37 @@
#!/bin/bash
#
# Generates a Go map containing all bots that have accessed Write.as
#
cat /var/log/$1 | grep -i 'bot\|crawler\|voltron' | awk -F\" '{print $4}' | sort | uniq > bots.txt
rm bots.go
cat > bots.go << EOM
+// This package helps the backend determine which clients are bots or crawlers.
+// In Write.as, this is used to prevent certain things when viewing posts, like
+// incrementing the view count.
package bots
var bots = map[string]bool {
EOM
while read b; do
if [ -n "$b" ]; then
echo " \"$b\": true," >> bots.go
fi
done <bots.txt
cat >> bots.go << EOM
};
+// IsBot returns whether or not the provided User-Agent string is a known bot
+// or crawler.
func IsBot(ua string) bool {
if _, ok := bots[ua]; ok {
return true
}
return false
}
EOM

File Metadata

Mime Type
text/x-diff
Expires
Fri, Dec 26, 5:05 AM (16 h, 28 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3561157

Event Timeline