* @ingroup SpecialPage
*/
+use Mediawiki\MediaWikiServices;
+
/**
* A special page that allows users to export pages in a XML file
*
$formDescriptor += [
'wpDownload' => [
'type' => 'check',
- 'name' =>'wpDownload',
+ 'name' => 'wpDownload',
'id' => 'wpDownload',
'default' => $request->wasPosted() ? $request->getCheck( 'wpDownload' ) : true,
'label-message' => 'export-download',
* @param bool $exportall Whether to export everything
*/
private function doExport( $page, $history, $list_authors, $exportall ) {
-
// If we are grabbing everything, enable full history and ignore the rest
if ( $exportall ) {
$history = WikiExporter::FULL;
$pages = array_keys( $pageSet );
- // Normalize titles to the same format and remove dupes, see bug 17374
+ // Normalize titles to the same format and remove dupes, see T19374
foreach ( $pages as $k => $v ) {
$pages[$k] = str_replace( " ", "_", $v );
}
$buffer = WikiExporter::BUFFER;
} else {
// Use an unbuffered query; histories may be very long!
- $lb = wfGetLBFactory()->newMainLB();
+ $lb = MediaWikiServices::getInstance()->getDBLoadBalancerFactory()->newMainLB();
$db = $lb->getConnection( DB_REPLICA );
$buffer = WikiExporter::STREAM;
$exporter->allPages();
} else {
foreach ( $pages as $page ) {
- # Bug 8824: Only export pages the user can read
+ # T10824: Only export pages the user can read
$title = Title::newFromText( $page );
if ( is_null( $title ) ) {
// @todo Perhaps output an <error> tag or something.