-
-
Save RLHawk1/5a315be39d70a21ff842ad88bec42582 to your computer and use it in GitHub Desktop.
Detect crawlers/bots/spiders in PHP (simple and fast)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
/** | |
* Check if the user agent string is one of a crawler, spider, or bot. | |
* | |
* @return bool | |
* TRUE if the user agent is a bot, FALSE if not. | |
*/ | |
function is_crawler() { | |
// User lowercase string for comparison. | |
$user_agent = strtolower($_SERVER['HTTP_USER_AGENT']); | |
// A list of some common words used only for bots and crawlers. | |
$bot_identifiers = array( | |
'bot', | |
'slurp', | |
'crawler', | |
'spider', | |
'curl', | |
'facebook', | |
'fetch', | |
); | |
// See if one of the identifiers is in the UA string. | |
foreach ($bot_identifiers as $identifier) { | |
if (strpos($user_agent, $identifier) !== FALSE) { | |
return TRUE; | |
} | |
} | |
return FALSE; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment