* @ingroup SpecialPage
*/
+use Mediawiki\MediaWikiServices;
+
/**
* A special page that allows users to export pages in a XML file
*
$pages = array_keys( $pageSet );
- // Normalize titles to the same format and remove dupes, see bug 17374
+ // Normalize titles to the same format and remove dupes, see T19374
foreach ( $pages as $k => $v ) {
$pages[$k] = str_replace( " ", "_", $v );
}
/* Ok, let's get to it... */
if ( $history == WikiExporter::CURRENT ) {
$lb = false;
- $db = wfGetDB( DB_SLAVE );
+ $db = wfGetDB( DB_REPLICA );
$buffer = WikiExporter::BUFFER;
} else {
// Use an unbuffered query; histories may be very long!
- $lb = wfGetLBFactory()->newMainLB();
- $db = $lb->getConnection( DB_SLAVE );
+ $lb = MediaWikiServices::getInstance()->getDBLoadBalancerFactory()->newMainLB();
+ $db = $lb->getConnection( DB_REPLICA );
$buffer = WikiExporter::STREAM;
// This might take a while... :D
$exporter->allPages();
} else {
foreach ( $pages as $page ) {
- # Bug 8824: Only export pages the user can read
+ # T10824: Only export pages the user can read
$title = Title::newFromText( $page );
if ( is_null( $title ) ) {
// @todo Perhaps output an <error> tag or something.
$name = $title->getDBkey();
- $dbr = wfGetDB( DB_SLAVE );
+ $dbr = wfGetDB( DB_REPLICA );
$res = $dbr->select(
[ 'page', 'categorylinks' ],
[ 'page_namespace', 'page_title' ],
$maxPages = $this->getConfig()->get( 'ExportPagelistLimit' );
- $dbr = wfGetDB( DB_SLAVE );
+ $dbr = wfGetDB( DB_REPLICA );
$res = $dbr->select(
'page',
[ 'page_namespace', 'page_title' ],
* @return array
*/
private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
- $dbr = wfGetDB( DB_SLAVE );
+ $dbr = wfGetDB( DB_REPLICA );
foreach ( $inputPages as $page ) {
$title = Title::newFromText( $page );