* Added a test for a link with multiple pipes
[lhc/web/wiklou.git] / includes / SearchUpdate.php
1 <?php
2 /**
3 * See deferred.txt
4 * @package MediaWiki
5 */
6
7 /**
8 *
9 * @package MediaWiki
10 */
11 class SearchUpdate {
12
13 /* private */ var $mId = 0, $mNamespace, $mTitle, $mText;
14 /* private */ var $mTitleWords;
15
16 function SearchUpdate( $id, $title, $text = false ) {
17 $nt = Title::newFromText( $title );
18 if( $nt ) {
19 $this->mId = $id;
20 $this->mText = $text;
21
22 $this->mNamespace = $nt->getNamespace();
23 $this->mTitle = $nt->getText(); # Discard namespace
24
25 $this->mTitleWords = $this->mTextWords = array();
26 } else {
27 wfDebug( "SearchUpdate object created with invalid title '$title'\n" );
28 }
29 }
30
31 function doUpdate() {
32 global $wgDBminWordLen, $wgContLang, $wgDisableSearchUpdate;
33
34 if( $wgDisableSearchUpdate || !$this->mId ) {
35 return false;
36 }
37 $fname = 'SearchUpdate::doUpdate';
38 wfProfileIn( $fname );
39
40 require_once( 'SearchEngine.php' );
41 $search =& SearchEngine::create();
42 $lc = $search->legalSearchChars() . '&#;';
43
44 if( $this->mText == false ) {
45 $search->updateTitle($this->mId,
46 Title::indexTitle( $this->mNamespace, $this->mTitle ));
47 wfProfileOut( $fname );
48 return;
49 }
50
51 # Language-specific strip/conversion
52 $text = $wgContLang->stripForSearch( $this->mText );
53
54 wfProfileIn( $fname.'-regexps' );
55 $text = preg_replace( "/<\\/?\\s*[A-Za-z][A-Za-z0-9]*\\s*([^>]*?)>/",
56 ' ', strtolower( " " . $text /*$this->mText*/ . " " ) ); # Strip HTML markup
57 $text = preg_replace( "/(^|\\n)\\s*==\\s+([^\\n]+)\\s+==\\s/sD",
58 "\\2 \\2 \\2 ", $text ); # Emphasize headings
59
60 # Strip external URLs
61 $uc = "A-Za-z0-9_\\/:.,~%\\-+&;#?!=()@\\xA0-\\xFF";
62 $protos = "http|https|ftp|mailto|news|gopher";
63 $pat = "/(^|[^\\[])({$protos}):[{$uc}]+([^{$uc}]|$)/";
64 $text = preg_replace( $pat, "\\1 \\3", $text );
65
66 $p1 = "/([^\\[])\\[({$protos}):[{$uc}]+]/";
67 $p2 = "/([^\\[])\\[({$protos}):[{$uc}]+\\s+([^\\]]+)]/";
68 $text = preg_replace( $p1, "\\1 ", $text );
69 $text = preg_replace( $p2, "\\1 \\3 ", $text );
70
71 # Internal image links
72 $pat2 = "/\\[\\[image:([{$uc}]+)\\.(gif|png|jpg|jpeg)([^{$uc}])/i";
73 $text = preg_replace( $pat2, " \\1 \\3", $text );
74
75 $text = preg_replace( "/([^{$lc}])([{$lc}]+)]]([a-z]+)/",
76 "\\1\\2 \\2\\3", $text ); # Handle [[game]]s
77
78 # Strip all remaining non-search characters
79 $text = preg_replace( "/[^{$lc}]+/", " ", $text );
80
81 # Handle 's, s'
82 #
83 # $text = preg_replace( "/([{$lc}]+)'s /", "\\1 \\1's ", $text );
84 # $text = preg_replace( "/([{$lc}]+)s' /", "\\1s ", $text );
85 #
86 # These tail-anchored regexps are insanely slow. The worst case comes
87 # when Japanese or Chinese text (ie, no word spacing) is written on
88 # a wiki configured for Western UTF-8 mode. The Unicode characters are
89 # expanded to hex codes and the "words" are very long paragraph-length
90 # monstrosities. On a large page the above regexps may take over 20
91 # seconds *each* on a 1GHz-level processor.
92 #
93 # Following are reversed versions which are consistently fast
94 # (about 3 milliseconds on 1GHz-level processor).
95 #
96 $text = strrev( preg_replace( "/ s'([{$lc}]+)/", " s'\\1 \\1", strrev( $text ) ) );
97 $text = strrev( preg_replace( "/ 's([{$lc}]+)/", " s\\1", strrev( $text ) ) );
98
99 # Strip wiki '' and '''
100 $text = preg_replace( "/''[']*/", " ", $text );
101 wfProfileOut( "$fname-regexps" );
102 $search->update($this->mId, Title::indexTitle( $this->mNamespace, $this->mTitle ),
103 $text);
104 wfProfileOut( $fname );
105 }
106 }
107
108 /**
109 * Placeholder class
110 * @package MediaWiki
111 */
112 class SearchUpdateMyISAM extends SearchUpdate {
113 # Inherits everything
114 }
115
116 ?>