removed a debug output when setting up the dropbox for language variants
[lhc/web/wiklou.git] / includes / SearchUpdate.php
1 <?php
2 # $Id$
3 /**
4 * See deferred.doc
5 * @package MediaWiki
6 */
7
8 /**
9 *
10 * @package MediaWiki
11 */
12 class SearchUpdate {
13
14 /* private */ var $mId = 0, $mNamespace, $mTitle, $mText;
15 /* private */ var $mTitleWords;
16
17 function SearchUpdate( $id, $title, $text = false ) {
18 $nt = Title::newFromText( $title );
19 if( $nt ) {
20 $this->mId = $id;
21 $this->mText = $text;
22
23 $this->mNamespace = $nt->getNamespace();
24 $this->mTitle = $nt->getText(); # Discard namespace
25
26 $this->mTitleWords = $this->mTextWords = array();
27 } else {
28 wfDebug( "SearchUpdate object created with invalid title '$title'\n" );
29 }
30 }
31
32 function doUpdate() {
33 global $wgDBminWordLen, $wgLang, $wgDisableSearchUpdate;
34
35 if( $wgDisableSearchUpdate || !$this->mId ) {
36 return false;
37 }
38 $fname = 'SearchUpdate::doUpdate';
39 wfProfileIn( $fname );
40
41 require_once( 'SearchEngine.php' );
42 $lc = SearchEngine::legalSearchChars() . '&#;';
43 $db =& wfGetDB( DB_MASTER );
44 $searchindex = $db->tableName( 'searchindex' );
45
46 if( $this->mText == false ) {
47 # Just update the title
48 $lowpri = $db->lowPriorityOption();
49 $sql = "UPDATE $lowpri $searchindex SET si_title='" .
50 $db->strencode( Title::indexTitle( $this->mNamespace, $this->mTitle ) ) .
51 "' WHERE si_page={$this->mId}";
52 $db->query( $sql, "SearchUpdate::doUpdate" );
53 wfProfileOut( $fname );
54 return;
55 }
56
57 # Language-specific strip/conversion
58 $text = $wgLang->stripForSearch( $this->mText );
59
60 wfProfileIn( $fname.'-regexps' );
61 $text = preg_replace( "/<\\/?\\s*[A-Za-z][A-Za-z0-9]*\\s*([^>]*?)>/",
62 ' ', strtolower( " " . $text /*$this->mText*/ . " " ) ); # Strip HTML markup
63 $text = preg_replace( "/(^|\\n)\\s*==\\s+([^\\n]+)\\s+==\\s/sD",
64 "\\2 \\2 \\2 ", $text ); # Emphasize headings
65
66 # Strip external URLs
67 $uc = "A-Za-z0-9_\\/:.,~%\\-+&;#?!=()@\\xA0-\\xFF";
68 $protos = "http|https|ftp|mailto|news|gopher";
69 $pat = "/(^|[^\\[])({$protos}):[{$uc}]+([^{$uc}]|$)/";
70 $text = preg_replace( $pat, "\\1 \\3", $text );
71
72 $p1 = "/([^\\[])\\[({$protos}):[{$uc}]+]/";
73 $p2 = "/([^\\[])\\[({$protos}):[{$uc}]+\\s+([^\\]]+)]/";
74 $text = preg_replace( $p1, "\\1 ", $text );
75 $text = preg_replace( $p2, "\\1 \\3 ", $text );
76
77 # Internal image links
78 $pat2 = "/\\[\\[image:([{$uc}]+)\\.(gif|png|jpg|jpeg)([^{$uc}])/i";
79 $text = preg_replace( $pat2, " \\1 \\3", $text );
80
81 $text = preg_replace( "/([^{$lc}])([{$lc}]+)]]([a-z]+)/",
82 "\\1\\2 \\2\\3", $text ); # Handle [[game]]s
83
84 # Strip all remaining non-search characters
85 $text = preg_replace( "/[^{$lc}]+/", " ", $text );
86
87 # Handle 's, s'
88 #
89 # $text = preg_replace( "/([{$lc}]+)'s /", "\\1 \\1's ", $text );
90 # $text = preg_replace( "/([{$lc}]+)s' /", "\\1s ", $text );
91 #
92 # These tail-anchored regexps are insanely slow. The worst case comes
93 # when Japanese or Chinese text (ie, no word spacing) is written on
94 # a wiki configured for Western UTF-8 mode. The Unicode characters are
95 # expanded to hex codes and the "words" are very long paragraph-length
96 # monstrosities. On a large page the above regexps may take over 20
97 # seconds *each* on a 1GHz-level processor.
98 #
99 # Following are reversed versions which are consistently fast
100 # (about 3 milliseconds on 1GHz-level processor).
101 #
102 $text = strrev( preg_replace( "/ s'([{$lc}]+)/", " s'\\1 \\1", strrev( $text ) ) );
103 $text = strrev( preg_replace( "/ 's([{$lc}]+)/", " s\\1", strrev( $text ) ) );
104
105 # Strip wiki '' and '''
106 $text = preg_replace( "/''[']*/", " ", $text );
107 wfProfileOut( "$fname-regexps" );
108 $db->replace( $searchindex, array(array('si_page')),
109 array(
110 'si_page' => $this->mId,
111 'si_title' => $db->strencode( Title::indexTitle( $this->mNamespace, $this->mTitle ) ),
112 'si_text' => $db->strencode( $text )
113 ), 'SearchUpdate::doUpdate' );
114 wfProfileOut( $fname );
115 }
116 }
117
118 ?>