*/
class MediaWikiPageNameNormalizer {
+ /**
+ * @var Http
+ */
+ private $http;
+
+ /**
+ * @param Http|null $http
+ */
+ public function __construct( Http $http = null ) {
+ if ( !$http ) {
+ $http = new Http();
+ }
+
+ $this->http = $http;
+ }
+
/**
* Returns the normalized form of the given page title, using the
* normalization rules of the given site. If the given title is a redirect,
$pageName = Validator::cleanUp( $pageName );
// Build the args for the specific call
- $args = array(
+ $args = [
'action' => 'query',
'prop' => 'info',
'redirects' => true,
// but that maxage can avoid the extreme delay. On the other hand
// maxage could be nice to use anyhow as it stops unnecessary requests.
// Also consider smaxage if maxage is used.
- );
+ ];
$url = wfAppendQuery( $apiUrl, $args );
// Go on call the external site
// @todo we need a good way to specify a timeout here.
- $ret = Http::get( $url, array(), __METHOD__ );
+ $ret = $this->http->get( $url, [], __METHOD__ );
if ( $ret === false ) {
wfDebugLog( "MediaWikiSite", "call to external site failed: $url" );
return false;
}
// Loop over the tree different named structures, that otherwise are similar
- $structs = array(
+ $structs = [
'normalized' => 'from',
'converted' => 'from',
'redirects' => 'from',
'pages' => 'title'
- );
+ ];
foreach ( $structs as $listId => $fieldId ) {
// Check if the substructure exist at all.
if ( !isset( $externalData['query'][$listId] ) ) {