do_robots

The timeline below displays how wordpress function do_robots has changed across different WordPress versions. If a version is not listed, refer to the next available version below.

WordPress Version: 6.5

/**
 * Displays the default robots.txt file content.
 *
 * @since 2.1.0
 * @since 5.3.0 Remove the "Disallow: /" output if search engine visibility is
 *              discouraged in favor of robots meta HTML tag via wp_robots_no_robots()
 *              filter callback.
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    $site_url = parse_url(site_url());
    $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
    $output .= "Disallow: {$path}/wp-admin/\n";
    $output .= "Allow: {$path}/wp-admin/admin-ajax.php\n";
    /**
     * Filters the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output The robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 5.7

/**
 * Displays the default robots.txt file content.
 *
 * @since 2.1.0
 * @since 5.3.0 Remove the "Disallow: /" output if search engine visiblity is
 *              discouraged in favor of robots meta HTML tag via wp_robots_no_robots()
 *              filter callback.
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    $site_url = parse_url(site_url());
    $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
    $output .= "Disallow: {$path}/wp-admin/\n";
    $output .= "Allow: {$path}/wp-admin/admin-ajax.php\n";
    /**
     * Filters the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output The robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 5.3

/**
 * Displays the default robots.txt file content.
 *
 * @since 2.1.0
 * @since 5.3.0 Remove the "Disallow: /" output if search engine visiblity is
 *              discouraged in favor of robots meta HTML tag in wp_no_robots().
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    $site_url = parse_url(site_url());
    $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
    $output .= "Disallow: {$path}/wp-admin/\n";
    $output .= "Allow: {$path}/wp-admin/admin-ajax.php\n";
    /**
     * Filters the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output The robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 4.6

/**
 * Display the robots.txt file content.
 *
 * The echo content should be with usage of the permalinks or for creating the
 * robots.txt file.
 *
 * @since 2.1.0
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    if ('0' == $public) {
        $output .= "Disallow: /\n";
    } else {
        $site_url = parse_url(site_url());
        $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
        $output .= "Disallow: {$path}/wp-admin/\n";
        $output .= "Allow: {$path}/wp-admin/admin-ajax.php\n";
    }
    /**
     * Filters the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output Robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 4.4

/**
 * Display the robots.txt file content.
 *
 * The echo content should be with usage of the permalinks or for creating the
 * robots.txt file.
 *
 * @since 2.1.0
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    if ('0' == $public) {
        $output .= "Disallow: /\n";
    } else {
        $site_url = parse_url(site_url());
        $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
        $output .= "Disallow: {$path}/wp-admin/\n";
        $output .= "Allow: {$path}/wp-admin/admin-ajax.php\n";
    }
    /**
     * Filter the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output Robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 4.0

/**
 * Display the robots.txt file content.
 *
 * The echo content should be with usage of the permalinks or for creating the
 * robots.txt file.
 *
 * @since 2.1.0
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    if ('0' == $public) {
        $output .= "Disallow: /\n";
    } else {
        $site_url = parse_url(site_url());
        $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
        $output .= "Disallow: {$path}/wp-admin/\n";
    }
    /**
     * Filter the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output Robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 3.9

/**
 * Display the robots.txt file content.
 *
 * The echo content should be with usage of the permalinks or for creating the
 * robots.txt file.
 *
 * @since 2.1.0
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    /**
     * Fires when displaying the robots.txt file.
     *
     * @since 2.1.0
     */
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    if ('0' == $public) {
        $output .= "Disallow: /\n";
    } else {
        $site_url = parse_url(site_url());
        $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
        $output .= "Disallow: {$path}/wp-admin/\n";
        $output .= "Disallow: {$path}/wp-includes/\n";
    }
    /**
     * Filter the robots.txt output.
     *
     * @since 3.0.0
     *
     * @param string $output Robots.txt output.
     * @param bool   $public Whether the site is considered "public".
     */
    echo apply_filters('robots_txt', $output, $public);
}

WordPress Version: 3.7

/**
 * Display the robots.txt file content.
 *
 * The echo content should be with usage of the permalinks or for creating the
 * robots.txt file.
 *
 * @since 2.1.0
 * @uses do_action() Calls 'do_robotstxt' hook for displaying robots.txt rules.
 */
function do_robots()
{
    header('Content-Type: text/plain; charset=utf-8');
    do_action('do_robotstxt');
    $output = "User-agent: *\n";
    $public = get_option('blog_public');
    if ('0' == $public) {
        $output .= "Disallow: /\n";
    } else {
        $site_url = parse_url(site_url());
        $path = (!empty($site_url['path'])) ? $site_url['path'] : '';
        $output .= "Disallow: {$path}/wp-admin/\n";
        $output .= "Disallow: {$path}/wp-includes/\n";
    }
    echo apply_filters('robots_txt', $output, $public);
}