* Allow wiki links in "protect-robotspolicy", I imagine people are likely to want...
[lhc/web/wiklou.git] / includes / User.php
index 4ecd49d..9b62e2d 100644 (file)
@@ -531,6 +531,12 @@ class User {
                global $wgContLang;
                $name = $wgContLang->ucfirst( $name );
 
+               # Reject names containing '#'; these will be cleaned up
+               # with title normalisation, but then it's too late to
+               # check elsewhere
+               if( strpos( $name, '#' ) !== false )
+                       return false;
+
                # Clean up name according to title rules
                $t = Title::newFromText( $name );
                if( is_null( $t ) ) {
@@ -2132,6 +2138,8 @@ class User {
                // extra options or other effects on the parser cache.
                wfRunHooks( 'PageRenderingHash', array( &$confstr ) );
 
+               // Make it a valid memcached key fragment
+               $confstr = str_replace( ' ', '_', $confstr );
                $this->mHash = $confstr;
                return $confstr;
        }
@@ -2141,6 +2149,17 @@ class User {
                return $this->mBlock && $this->mBlock->mCreateAccount;
        }
 
+       /**
+        * Determine if the user is blocked from using Special:Emailuser.
+        *
+        * @public
+        * @return boolean
+        */
+       function isBlockedFromEmailuser() {
+               $this->getBlockedStatus();
+               return $this->mBlock && $this->mBlock->mBlockEmail;
+       }
+
        function isAllowedToCreateAccount() {
                return $this->isAllowed( 'createaccount' ) && !$this->isBlockedFromCreateAccount();
        }