Last active
March 30, 2018 22:53
-
-
Save nickdaugherty/dcc2b3e89da63233c0d4 to your computer and use it in GitHub Desktop.
Example code for running a VIP site behind a reverse proxy with a subdirectory. This assumes the proxy server can't rewrite the html, such as on CloudFront. Props to 10up / Dropbox for much of the code.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
define( 'PROXY_URL', 'https://mysite.com/subdirectory' ); | |
/** | |
* Map the mysite home page to the proxy subdirectory | |
*/ | |
function mysite_map_home_page(){ | |
add_rewrite_rule( '^'. get_mysite_proxy_subdirectory() .'/?$', 'index.php', 'top' ); | |
add_rewrite_rule( '^'. get_mysite_proxy_subdirectory() .'/page/?([0-9]{1,})/?$', 'index.php?paged=$matches[1]', 'top' ); | |
} | |
add_action('init', 'mysite_map_home_page'); | |
/** | |
* Replace Site url with the entered proxy url for mapping proxy urls | |
* | |
* @param $url | |
* | |
* @return string | |
*/ | |
function mysite_filter_proxy_url( $content ) { | |
$vip_url = trailingslashit( get_site_url() ); | |
$proxy_url = trailingslashit( PROXY_URL ); | |
$content_reference = $content; | |
if ( $proxy_url && false === strpos( trailingslashit( $content_reference ) , $proxy_url) ) { | |
// We must replace urls that already have the subdirectory, as well as urls that don't, | |
// (depending on how WP created the url) | |
list( $subdir ) = explode( '/', trim( parse_url( $proxy_url, PHP_URL_PATH ), '/' ) ); | |
$vip_url_with_subdir = trailingslashit( $vip_url . $subdir ); | |
$search = array( $vip_url_with_subdir, $vip_url ); | |
$replace = array( $proxy_url, $proxy_url ); | |
$content = str_replace( $search, $replace, $content ); | |
} | |
return $content; | |
} | |
add_filter( 'post_comments_feed_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'category_feed_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'category_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'day_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'feed_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'month_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'page_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'post_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'year_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'the_permalink', 'mysite_filter_proxy_url' ); | |
add_filter( 'tag_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'term_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'comment_url', 'mysite_filter_proxy_url' ); | |
add_filter( 'the_content', 'mysite_filter_proxy_url' ); | |
add_filter( 'get_shortlink', 'mysite_filter_proxy_url' ); | |
add_filter( 'get_pagenum_link', 'mysite_filter_proxy_url' ); | |
add_filter( 'redirect_canonical', 'mysite_filter_proxy_url' ); | |
/** | |
* Home URL that is replaced with the proxy url if it exits | |
* | |
* @return string | |
*/ | |
function mysite_home_url( $path = '/' ) { | |
return mysite_filter_proxy_url( home_url( esc_attr( $path ) ), false ); | |
} | |
/** | |
* Return proxy slug | |
* | |
* @return string | |
*/ | |
function get_mysite_proxy_subdirectory() { | |
return 'mysubdirectory' | |
} | |
/** | |
* Filter all the rewrite rules to include the proxy base slug | |
* | |
* @param $rules | |
*/ | |
function mysite_proxy_rewrite( $rules){ | |
$new_rules = array(); | |
$proxy_subdirectory = get_mysite_proxy_subdirectory(); | |
foreach ($rules as $rule => $rewrite) { | |
if ( false === strpos($rule, '^' . $proxy_subdirectory) && ! strpos( $rule, $proxy_subdirectory ) ) { | |
if( '^' === substr( $rule , 0, 1) ){ | |
$rule = substr_replace($rule, '^' . $proxy_subdirectory . '/' , 0, 1); | |
$new_rules[$rule] = $rewrite; | |
} else { | |
$rule = $proxy_subdirectory . '/' . $rule; | |
$new_rules[$rule] = $rewrite; | |
} | |
} else { | |
$new_rules[$rule] = $rewrite; | |
} | |
} | |
return $new_rules; | |
} | |
add_filter('rewrite_rules_array', 'mysite_proxy_rewrite'); | |
/** | |
* Fix pingback header | |
* | |
* @param $headers | |
* @param $this | |
* | |
* @return mixed | |
*/ | |
function mysite_filter_pingback_url( $headers, $this ){ | |
$headers['X-Pingback'] = mysite_home_url( '/xmlrpc.php' ) ; | |
return $headers; | |
} | |
add_filter( 'wp_headers', 'mysite_filter_pingback_url', 10, 2); | |
/** | |
* Map sitemap.xml to match proxy | |
* | |
* @param $url | |
* @param $path | |
* @param $orig_scheme | |
* @param $blog_id | |
* | |
* @return mixed | |
*/ | |
function mysite_sitemap_proxy_urls( $url, $path, $orig_scheme, $blog_id ){ | |
//If the path is not for the sitemap.xml return the $url. | |
if ( false === strpos( $path, 'sitemap.xml') ) { | |
return $url; | |
} | |
$vip_url = trailingslashit( get_site_url() ); | |
$proxy_url = trailingslashit( PROXY_URL ); | |
if ( $proxy_url && false === strpos( $url, $proxy_url) ) { | |
// We must replace urls that already have the subdirectory, as well as urls that don't, | |
// (depending on how WP created the url) | |
list( $subdir ) = explode( '/', trim( parse_url( $proxy_url, PHP_URL_PATH ), '/' ) ); | |
$vip_url_with_subdir = trailingslashit( $vip_url . $subdir ); | |
$search = array( $vip_url_with_subdir, $vip_url ); | |
$replace = array( $proxy_url, $proxy_url ); | |
$url = str_replace( $search, $replace, $url ); | |
} | |
return $url; | |
} | |
add_filter( 'home_url', 'mysite_sitemap_proxy_urls', 10, 4); | |
/** | |
* Replace preview url with original VIP url | |
* | |
* @param $preview_link | |
* @param $post | |
* | |
* @return mixed | |
*/ | |
function mysite_vip_preview_url( $preview_link, $post ){ | |
$vip_url = trailingslashit( get_site_url() ); | |
$proxy_url = trailingslashit( PROXY_URL ); | |
if( 'draft' == $post->post_status || 'auto-draft' == $post->post_status ){ | |
return str_replace( $proxy_url, $vip_url, $preview_link ); | |
} | |
return str_replace( $proxy_url, $vip_url . get_mysite_proxy_subdirectory() . '/', $preview_link ); | |
} | |
add_filter('preview_post_link', 'mysite_vip_preview_url', 10, 2); | |
// Disallow all direct crawling - bots should use the robots.txt on the main site | |
add_filter( 'robots_txt', function() { | |
return "User-Agent: *\nDisallow: /"; | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment