Some small doc tweaks to reduce Doxygen warnings, namely:
[lhc/web/wiklou.git] / includes / Export.php
1 <?php
2 # Copyright (C) 2003, 2005, 2006 Brion Vibber <brion@pobox.com>
3 # http://www.mediawiki.org/
4 #
5 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 2 of the License, or
8 # (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License along
16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 # http://www.gnu.org/copyleft/gpl.html
19
20
21 /**
22 *
23 * @addtogroup SpecialPage
24 */
25 class WikiExporter {
26 var $list_authors = false ; # Return distinct author list (when not returning full history)
27 var $author_list = "" ;
28
29 const FULL = 0;
30 const CURRENT = 1;
31
32 const BUFFER = 0;
33 const STREAM = 1;
34
35 const TEXT = 0;
36 const STUB = 1;
37
38 /**
39 * If using WikiExporter::STREAM to stream a large amount of data,
40 * provide a database connection which is not managed by
41 * LoadBalancer to read from: some history blob types will
42 * make additional queries to pull source data while the
43 * main query is still running.
44 *
45 * @param Database $db
46 * @param mixed $history one of WikiExporter::FULL or WikiExporter::CURRENT, or an
47 * associative array:
48 * offset: non-inclusive offset at which to start the query
49 * limit: maximum number of rows to return
50 * dir: "asc" or "desc" timestamp order
51 * @param int $buffer one of WikiExporter::BUFFER or WikiExporter::STREAM
52 */
53 function __construct( &$db, $history = WikiExporter::CURRENT,
54 $buffer = WikiExporter::BUFFER, $text = WikiExporter::TEXT ) {
55 $this->db =& $db;
56 $this->history = $history;
57 $this->buffer = $buffer;
58 $this->writer = new XmlDumpWriter();
59 $this->sink = new DumpOutput();
60 $this->text = $text;
61 }
62
63 /**
64 * Set the DumpOutput or DumpFilter object which will receive
65 * various row objects and XML output for filtering. Filters
66 * can be chained or used as callbacks.
67 *
68 * @param mixed $callback
69 */
70 function setOutputSink( &$sink ) {
71 $this->sink =& $sink;
72 }
73
74 function openStream() {
75 $output = $this->writer->openStream();
76 $this->sink->writeOpenStream( $output );
77 }
78
79 function closeStream() {
80 $output = $this->writer->closeStream();
81 $this->sink->writeCloseStream( $output );
82 }
83
84 /**
85 * Dumps a series of page and revision records for all pages
86 * in the database, either including complete history or only
87 * the most recent version.
88 */
89 function allPages() {
90 return $this->dumpFrom( '' );
91 }
92
93 /**
94 * Dumps a series of page and revision records for those pages
95 * in the database falling within the page_id range given.
96 * @param int $start Inclusive lower limit (this id is included)
97 * @param int $end Exclusive upper limit (this id is not included)
98 * If 0, no upper limit.
99 */
100 function pagesByRange( $start, $end ) {
101 $condition = 'page_id >= ' . intval( $start );
102 if( $end ) {
103 $condition .= ' AND page_id < ' . intval( $end );
104 }
105 return $this->dumpFrom( $condition );
106 }
107
108 /**
109 * @param Title $title
110 */
111 function pageByTitle( $title ) {
112 return $this->dumpFrom(
113 'page_namespace=' . $title->getNamespace() .
114 ' AND page_title=' . $this->db->addQuotes( $title->getDbKey() ) );
115 }
116
117 function pageByName( $name ) {
118 $title = Title::newFromText( $name );
119 if( is_null( $title ) ) {
120 return new WikiError( "Can't export invalid title" );
121 } else {
122 return $this->pageByTitle( $title );
123 }
124 }
125
126 function pagesByName( $names ) {
127 foreach( $names as $name ) {
128 $this->pageByName( $name );
129 }
130 }
131
132
133 // -------------------- private implementation below --------------------
134
135 # Generates the distinct list of authors of an article
136 # Not called by default (depends on $this->list_authors)
137 # Can be set by Special:Export when not exporting whole history
138 function do_list_authors ( $page , $revision , $cond ) {
139 $fname = "do_list_authors" ;
140 wfProfileIn( $fname );
141 $this->author_list = "<contributors>";
142 //rev_deleted
143 $nothidden = '(rev_deleted & '.Revision::DELETED_USER.') = 0';
144
145 $sql = "SELECT DISTINCT rev_user_text,rev_user FROM {$page},{$revision} WHERE page_id=rev_page AND $nothidden AND " . $cond ;
146 $result = $this->db->query( $sql, $fname );
147 $resultset = $this->db->resultObject( $result );
148 while( $row = $resultset->fetchObject() ) {
149 $this->author_list .= "<contributor>" .
150 "<username>" .
151 htmlentities( $row->rev_user_text ) .
152 "</username>" .
153 "<id>" .
154 $row->rev_user .
155 "</id>" .
156 "</contributor>";
157 }
158 wfProfileOut( $fname );
159 $this->author_list .= "</contributors>";
160 }
161
162 function dumpFrom( $cond = '' ) {
163 $fname = 'WikiExporter::dumpFrom';
164 wfProfileIn( $fname );
165
166 $page = $this->db->tableName( 'page' );
167 $revision = $this->db->tableName( 'revision' );
168 $text = $this->db->tableName( 'text' );
169
170 $order = 'ORDER BY page_id';
171 $limit = '';
172
173 if( $this->history == WikiExporter::FULL ) {
174 $join = 'page_id=rev_page';
175 } elseif( $this->history == WikiExporter::CURRENT ) {
176 if ( $this->list_authors && $cond != '' ) { // List authors, if so desired
177 $this->do_list_authors ( $page , $revision , $cond );
178 }
179 $join = 'page_id=rev_page AND page_latest=rev_id';
180 } elseif ( is_array( $this->history ) ) {
181 $join = 'page_id=rev_page';
182 if ( $this->history['dir'] == 'asc' ) {
183 $op = '>';
184 $order .= ', rev_timestamp';
185 } else {
186 $op = '<';
187 $order .= ', rev_timestamp DESC';
188 }
189 if ( !empty( $this->history['offset'] ) ) {
190 $join .= " AND rev_timestamp $op " . $this->db->addQuotes(
191 $this->db->timestamp( $this->history['offset'] ) );
192 }
193 if ( !empty( $this->history['limit'] ) ) {
194 $limitNum = intval( $this->history['limit'] );
195 if ( $limitNum > 0 ) {
196 $limit = "LIMIT $limitNum";
197 }
198 }
199 } else {
200 wfProfileOut( $fname );
201 return new WikiError( "$fname given invalid history dump type." );
202 }
203 $where = ( $cond == '' ) ? '' : "$cond AND";
204
205 if( $this->buffer == WikiExporter::STREAM ) {
206 $prev = $this->db->bufferResults( false );
207 }
208 if( $cond == '' ) {
209 // Optimization hack for full-database dump
210 $revindex = $pageindex = $this->db->useIndexClause("PRIMARY");
211 $straight = ' /*! STRAIGHT_JOIN */ ';
212 } else {
213 $pageindex = '';
214 $revindex = '';
215 $straight = '';
216 }
217 if( $this->text == WikiExporter::STUB ) {
218 $sql = "SELECT $straight * FROM
219 $page $pageindex,
220 $revision $revindex
221 WHERE $where $join
222 $order $limit";
223 } else {
224 $sql = "SELECT $straight * FROM
225 $page $pageindex,
226 $revision $revindex,
227 $text
228 WHERE $where $join AND rev_text_id=old_id
229 $order $limit";
230 }
231 $result = $this->db->query( $sql, $fname );
232 $wrapper = $this->db->resultObject( $result );
233 $this->outputStream( $wrapper );
234
235 if ( $this->list_authors ) {
236 $this->outputStream( $wrapper );
237 }
238
239 if( $this->buffer == WikiExporter::STREAM ) {
240 $this->db->bufferResults( $prev );
241 }
242
243 wfProfileOut( $fname );
244 }
245
246 /**
247 * Runs through a query result set dumping page and revision records.
248 * The result set should be sorted/grouped by page to avoid duplicate
249 * page records in the output.
250 *
251 * The result set will be freed once complete. Should be safe for
252 * streaming (non-buffered) queries, as long as it was made on a
253 * separate database connection not managed by LoadBalancer; some
254 * blob storage types will make queries to pull source data.
255 *
256 * @param ResultWrapper $resultset
257 * @access private
258 */
259 function outputStream( $resultset ) {
260 $last = null;
261 while( $row = $resultset->fetchObject() ) {
262 if( is_null( $last ) ||
263 $last->page_namespace != $row->page_namespace ||
264 $last->page_title != $row->page_title ) {
265 if( isset( $last ) ) {
266 $output = $this->writer->closePage();
267 $this->sink->writeClosePage( $output );
268 }
269 $output = $this->writer->openPage( $row );
270 $this->sink->writeOpenPage( $row, $output );
271 $last = $row;
272 }
273 $output = $this->writer->writeRevision( $row );
274 $this->sink->writeRevision( $row, $output );
275 }
276 if( isset( $last ) ) {
277 $output = $this->author_list . $this->writer->closePage();
278 $this->sink->writeClosePage( $output );
279 }
280 $resultset->free();
281 }
282 }
283
284 class XmlDumpWriter {
285
286 /**
287 * Returns the export schema version.
288 * @return string
289 */
290 function schemaVersion() {
291 return "0.3"; // FIXME: upgrade to 0.4 when updated XSD is ready, for the revision deletion bits
292 }
293
294 /**
295 * Opens the XML output stream's root <mediawiki> element.
296 * This does not include an xml directive, so is safe to include
297 * as a subelement in a larger XML stream. Namespace and XML Schema
298 * references are included.
299 *
300 * Output will be encoded in UTF-8.
301 *
302 * @return string
303 */
304 function openStream() {
305 global $wgContLanguageCode;
306 $ver = $this->schemaVersion();
307 return wfElement( 'mediawiki', array(
308 'xmlns' => "http://www.mediawiki.org/xml/export-$ver/",
309 'xmlns:xsi' => "http://www.w3.org/2001/XMLSchema-instance",
310 'xsi:schemaLocation' => "http://www.mediawiki.org/xml/export-$ver/ " .
311 "http://www.mediawiki.org/xml/export-$ver.xsd",
312 'version' => $ver,
313 'xml:lang' => $wgContLanguageCode ),
314 null ) .
315 "\n" .
316 $this->siteInfo();
317 }
318
319 function siteInfo() {
320 $info = array(
321 $this->sitename(),
322 $this->homelink(),
323 $this->generator(),
324 $this->caseSetting(),
325 $this->namespaces() );
326 return " <siteinfo>\n " .
327 implode( "\n ", $info ) .
328 "\n </siteinfo>\n";
329 }
330
331 function sitename() {
332 global $wgSitename;
333 return wfElement( 'sitename', array(), $wgSitename );
334 }
335
336 function generator() {
337 global $wgVersion;
338 return wfElement( 'generator', array(), "MediaWiki $wgVersion" );
339 }
340
341 function homelink() {
342 return wfElement( 'base', array(), Title::newMainPage()->getFullUrl() );
343 }
344
345 function caseSetting() {
346 global $wgCapitalLinks;
347 // "case-insensitive" option is reserved for future
348 $sensitivity = $wgCapitalLinks ? 'first-letter' : 'case-sensitive';
349 return wfElement( 'case', array(), $sensitivity );
350 }
351
352 function namespaces() {
353 global $wgContLang;
354 $spaces = " <namespaces>\n";
355 foreach( $wgContLang->getFormattedNamespaces() as $ns => $title ) {
356 $spaces .= ' ' . wfElement( 'namespace', array( 'key' => $ns ), $title ) . "\n";
357 }
358 $spaces .= " </namespaces>";
359 return $spaces;
360 }
361
362 /**
363 * Closes the output stream with the closing root element.
364 * Call when finished dumping things.
365 */
366 function closeStream() {
367 return "</mediawiki>\n";
368 }
369
370
371 /**
372 * Opens a <page> section on the output stream, with data
373 * from the given database row.
374 *
375 * @param object $row
376 * @return string
377 * @access private
378 */
379 function openPage( $row ) {
380 $out = " <page>\n";
381 $title = Title::makeTitle( $row->page_namespace, $row->page_title );
382 $out .= ' ' . wfElementClean( 'title', array(), $title->getPrefixedText() ) . "\n";
383 $out .= ' ' . wfElement( 'id', array(), strval( $row->page_id ) ) . "\n";
384 if( '' != $row->page_restrictions ) {
385 $out .= ' ' . wfElement( 'restrictions', array(),
386 strval( $row->page_restrictions ) ) . "\n";
387 }
388 return $out;
389 }
390
391 /**
392 * Closes a <page> section on the output stream.
393 *
394 * @access private
395 */
396 function closePage() {
397 return " </page>\n";
398 }
399
400 /**
401 * Dumps a <revision> section on the output stream, with
402 * data filled in from the given database row.
403 *
404 * @param object $row
405 * @return string
406 * @access private
407 */
408 function writeRevision( $row ) {
409 $fname = 'WikiExporter::dumpRev';
410 wfProfileIn( $fname );
411
412 $out = " <revision>\n";
413 $out .= " " . wfElement( 'id', null, strval( $row->rev_id ) ) . "\n";
414
415 $ts = wfTimestamp( TS_ISO_8601, $row->rev_timestamp );
416 $out .= " " . wfElement( 'timestamp', null, $ts ) . "\n";
417
418 if( $row->rev_deleted & Revision::DELETED_USER ) {
419 $out .= " " . wfElement( 'contributor', array( 'deleted' => 'deleted' ) ) . "\n";
420 } else {
421 $out .= " <contributor>\n";
422 if( $row->rev_user ) {
423 $out .= " " . wfElementClean( 'username', null, strval( $row->rev_user_text ) ) . "\n";
424 $out .= " " . wfElement( 'id', null, strval( $row->rev_user ) ) . "\n";
425 } else {
426 $out .= " " . wfElementClean( 'ip', null, strval( $row->rev_user_text ) ) . "\n";
427 }
428 $out .= " </contributor>\n";
429 }
430
431 if( $row->rev_minor_edit ) {
432 $out .= " <minor/>\n";
433 }
434 if( $row->rev_deleted & Revision::DELETED_COMMENT ) {
435 $out .= " " . wfElement( 'comment', array( 'deleted' => 'deleted' ) ) . "\n";
436 } elseif( $row->rev_comment != '' ) {
437 $out .= " " . wfElementClean( 'comment', null, strval( $row->rev_comment ) ) . "\n";
438 }
439
440 if( $row->rev_deleted & Revision::DELETED_TEXT ) {
441 $out .= " " . wfElement( 'text', array( 'deleted' => 'deleted' ) ) . "\n";
442 } elseif( isset( $row->old_text ) ) {
443 // Raw text from the database may have invalid chars
444 $text = strval( Revision::getRevisionText( $row ) );
445 $out .= " " . wfElementClean( 'text',
446 array( 'xml:space' => 'preserve' ),
447 strval( $text ) ) . "\n";
448 } else {
449 // Stub output
450 $out .= " " . wfElement( 'text',
451 array( 'id' => $row->rev_text_id ),
452 "" ) . "\n";
453 }
454
455 $out .= " </revision>\n";
456
457 wfProfileOut( $fname );
458 return $out;
459 }
460
461 }
462
463
464 /**
465 * Base class for output stream; prints to stdout or buffer or whereever.
466 */
467 class DumpOutput {
468 function writeOpenStream( $string ) {
469 $this->write( $string );
470 }
471
472 function writeCloseStream( $string ) {
473 $this->write( $string );
474 }
475
476 function writeOpenPage( $page, $string ) {
477 $this->write( $string );
478 }
479
480 function writeClosePage( $string ) {
481 $this->write( $string );
482 }
483
484 function writeRevision( $rev, $string ) {
485 $this->write( $string );
486 }
487
488 /**
489 * Override to write to a different stream type.
490 * @return bool
491 */
492 function write( $string ) {
493 print $string;
494 }
495 }
496
497 /**
498 * Stream outputter to send data to a file.
499 */
500 class DumpFileOutput extends DumpOutput {
501 var $handle;
502
503 function DumpFileOutput( $file ) {
504 $this->handle = fopen( $file, "wt" );
505 }
506
507 function write( $string ) {
508 fputs( $this->handle, $string );
509 }
510 }
511
512 /**
513 * Stream outputter to send data to a file via some filter program.
514 * Even if compression is available in a library, using a separate
515 * program can allow us to make use of a multi-processor system.
516 */
517 class DumpPipeOutput extends DumpFileOutput {
518 function DumpPipeOutput( $command, $file = null ) {
519 if( !is_null( $file ) ) {
520 $command .= " > " . wfEscapeShellArg( $file );
521 }
522 $this->handle = popen( $command, "w" );
523 }
524 }
525
526 /**
527 * Sends dump output via the gzip compressor.
528 */
529 class DumpGZipOutput extends DumpPipeOutput {
530 function DumpGZipOutput( $file ) {
531 parent::DumpPipeOutput( "gzip", $file );
532 }
533 }
534
535 /**
536 * Sends dump output via the bgzip2 compressor.
537 */
538 class DumpBZip2Output extends DumpPipeOutput {
539 function DumpBZip2Output( $file ) {
540 parent::DumpPipeOutput( "bzip2", $file );
541 }
542 }
543
544 /**
545 * Sends dump output via the p7zip compressor.
546 */
547 class Dump7ZipOutput extends DumpPipeOutput {
548 function Dump7ZipOutput( $file ) {
549 $command = "7za a -bd -si " . wfEscapeShellArg( $file );
550 // Suppress annoying useless crap from p7zip
551 // Unfortunately this could suppress real error messages too
552 $command .= " >/dev/null 2>&1";
553 parent::DumpPipeOutput( $command );
554 }
555 }
556
557
558
559 /**
560 * Dump output filter class.
561 * This just does output filtering and streaming; XML formatting is done
562 * higher up, so be careful in what you do.
563 */
564 class DumpFilter {
565 function DumpFilter( &$sink ) {
566 $this->sink =& $sink;
567 }
568
569 function writeOpenStream( $string ) {
570 $this->sink->writeOpenStream( $string );
571 }
572
573 function writeCloseStream( $string ) {
574 $this->sink->writeCloseStream( $string );
575 }
576
577 function writeOpenPage( $page, $string ) {
578 $this->sendingThisPage = $this->pass( $page, $string );
579 if( $this->sendingThisPage ) {
580 $this->sink->writeOpenPage( $page, $string );
581 }
582 }
583
584 function writeClosePage( $string ) {
585 if( $this->sendingThisPage ) {
586 $this->sink->writeClosePage( $string );
587 $this->sendingThisPage = false;
588 }
589 }
590
591 function writeRevision( $rev, $string ) {
592 if( $this->sendingThisPage ) {
593 $this->sink->writeRevision( $rev, $string );
594 }
595 }
596
597 /**
598 * Override for page-based filter types.
599 * @return bool
600 */
601 function pass( $page ) {
602 return true;
603 }
604 }
605
606 /**
607 * Simple dump output filter to exclude all talk pages.
608 */
609 class DumpNotalkFilter extends DumpFilter {
610 function pass( $page ) {
611 return !Namespace::isTalk( $page->page_namespace );
612 }
613 }
614
615 /**
616 * Dump output filter to include or exclude pages in a given set of namespaces.
617 */
618 class DumpNamespaceFilter extends DumpFilter {
619 var $invert = false;
620 var $namespaces = array();
621
622 function DumpNamespaceFilter( &$sink, $param ) {
623 parent::DumpFilter( $sink );
624
625 $constants = array(
626 "NS_MAIN" => NS_MAIN,
627 "NS_TALK" => NS_TALK,
628 "NS_USER" => NS_USER,
629 "NS_USER_TALK" => NS_USER_TALK,
630 "NS_PROJECT" => NS_PROJECT,
631 "NS_PROJECT_TALK" => NS_PROJECT_TALK,
632 "NS_IMAGE" => NS_IMAGE,
633 "NS_IMAGE_TALK" => NS_IMAGE_TALK,
634 "NS_MEDIAWIKI" => NS_MEDIAWIKI,
635 "NS_MEDIAWIKI_TALK" => NS_MEDIAWIKI_TALK,
636 "NS_TEMPLATE" => NS_TEMPLATE,
637 "NS_TEMPLATE_TALK" => NS_TEMPLATE_TALK,
638 "NS_HELP" => NS_HELP,
639 "NS_HELP_TALK" => NS_HELP_TALK,
640 "NS_CATEGORY" => NS_CATEGORY,
641 "NS_CATEGORY_TALK" => NS_CATEGORY_TALK );
642
643 if( $param{0} == '!' ) {
644 $this->invert = true;
645 $param = substr( $param, 1 );
646 }
647
648 foreach( explode( ',', $param ) as $key ) {
649 $key = trim( $key );
650 if( isset( $constants[$key] ) ) {
651 $ns = $constants[$key];
652 $this->namespaces[$ns] = true;
653 } elseif( is_numeric( $key ) ) {
654 $ns = intval( $key );
655 $this->namespaces[$ns] = true;
656 } else {
657 throw new MWException( "Unrecognized namespace key '$key'\n" );
658 }
659 }
660 }
661
662 function pass( $page ) {
663 $match = isset( $this->namespaces[$page->page_namespace] );
664 return $this->invert xor $match;
665 }
666 }
667
668
669 /**
670 * Dump output filter to include only the last revision in each page sequence.
671 */
672 class DumpLatestFilter extends DumpFilter {
673 var $page, $pageString, $rev, $revString;
674
675 function writeOpenPage( $page, $string ) {
676 $this->page = $page;
677 $this->pageString = $string;
678 }
679
680 function writeClosePage( $string ) {
681 if( $this->rev ) {
682 $this->sink->writeOpenPage( $this->page, $this->pageString );
683 $this->sink->writeRevision( $this->rev, $this->revString );
684 $this->sink->writeClosePage( $string );
685 }
686 $this->rev = null;
687 $this->revString = null;
688 $this->page = null;
689 $this->pageString = null;
690 }
691
692 function writeRevision( $rev, $string ) {
693 if( $rev->rev_id == $this->page->page_latest ) {
694 $this->rev = $rev;
695 $this->revString = $string;
696 }
697 }
698 }
699
700 /**
701 * Base class for output stream; prints to stdout or buffer or whereever.
702 */
703 class DumpMultiWriter {
704 function DumpMultiWriter( $sinks ) {
705 $this->sinks = $sinks;
706 $this->count = count( $sinks );
707 }
708
709 function writeOpenStream( $string ) {
710 for( $i = 0; $i < $this->count; $i++ ) {
711 $this->sinks[$i]->writeOpenStream( $string );
712 }
713 }
714
715 function writeCloseStream( $string ) {
716 for( $i = 0; $i < $this->count; $i++ ) {
717 $this->sinks[$i]->writeCloseStream( $string );
718 }
719 }
720
721 function writeOpenPage( $page, $string ) {
722 for( $i = 0; $i < $this->count; $i++ ) {
723 $this->sinks[$i]->writeOpenPage( $page, $string );
724 }
725 }
726
727 function writeClosePage( $string ) {
728 for( $i = 0; $i < $this->count; $i++ ) {
729 $this->sinks[$i]->writeClosePage( $string );
730 }
731 }
732
733 function writeRevision( $rev, $string ) {
734 for( $i = 0; $i < $this->count; $i++ ) {
735 $this->sinks[$i]->writeRevision( $rev, $string );
736 }
737 }
738 }
739
740 function xmlsafe( $string ) {
741 $fname = 'xmlsafe';
742 wfProfileIn( $fname );
743
744 /**
745 * The page may contain old data which has not been properly normalized.
746 * Invalid UTF-8 sequences or forbidden control characters will make our
747 * XML output invalid, so be sure to strip them out.
748 */
749 $string = UtfNormal::cleanUp( $string );
750
751 $string = htmlspecialchars( $string );
752 wfProfileOut( $fname );
753 return $string;
754 }
755
756 ?>