function testPost(ISblamPost $p) { $links = $p->getLinks(); if ($links === NULL) { return NULL; } $linkcount = count($links); $authorlink = $p->getAuthorURI() ? 1 : 0; // count separately, because this link may be unrelated to post's contents, so shouldn't skew link/words ratio if ($linkcount + $authorlink == 0) { if (strlen($p->getText()) > 20) { return array(-0.5, self::CERTAINITY_NORMAL, "No links"); } return NULL; // don't give nolinks bonus to posts with no content (no content is abnormal and it may be another way to spam) } if ($linkcount + $authorlink == 1) { return array(0.1, self::CERTAINITY_LOW, "Single link"); } if ($linkcount + $authorlink == 2) { return array(0.2, self::CERTAINITY_LOW, "Two links"); } $numwords = count(preg_split('![^a-z0-9\\x7F-\\xFF-]+|https?://[^\\]\\[\\s\'"<>]+!i', $p->getText(), 500, PREG_SPLIT_NO_EMPTY)); // long posts may legitimately have more links. can't set any limits, because wiki pages may contain lots of links. $ratio = round($linkcount * 100 / (10 + $numwords)); if ($ratio > 22) { return array(0.45, self::CERTAINITY_NORMAL, "Flooded with links (A{$ratio}: {$linkcount} per {$numwords} words)"); } if ($ratio > 17) { return array(0.35, self::CERTAINITY_NORMAL, "Flooded with links (B{$ratio}: {$linkcount} per {$numwords} words)"); } if ($ratio > 12) { return array(0.25, self::CERTAINITY_NORMAL, "Flooded with links (C{$ratio}: {$linkcount} per {$numwords} words)"); } if ($ratio > 6) { return array(0.25, self::CERTAINITY_NORMAL, "Lots of links (D{$ratio}: {$linkcount} per {$numwords} words)"); } return array(0.25, self::CERTAINITY_LOW, "Some links (E{$ratio}: {$linkcount} per {$numwords} words)"); }
function testPost(ISblamPost $p) { if ($this->keywords === NULL) { $this->importBlocklist2($this->blocklist); } if (!count($this->keywords)) { return NULL; } $res1 = $this->testText($p->getText() . ' ' . $p->getAuthorName()); $res2 = 0; $res3 = 0; $alluris = ''; if ($uri = $p->getAuthorURI()) { $alluris .= strtolower($uri); } if ($uri = $p->getAuthorEmail()) { $alluris .= ' ' . strtolower($uri); } foreach ($p->getLinks() as $link) { if ($label = $link->getLabel()) { $res2 += count(array_intersect($this->getKeywordsFromText($label), $this->keywords)); } if ($uri = $link->getURI()) { $alluris .= ' ' . strtolower($uri); } } $cnt = 0; str_replace($this->keywords, $this->keywords, $alluris, $res3); $sum = $res1 + $res2 + $res3; if (!$sum) { return NULL; } //array(-0.1,self::CERTAINITY_LOW, "No banned keywords"); $out = array(); if ($res1) { $out[] = array(1.2 - 1 / $res1, $sum > 2 ? self::CERTAINITY_HIGH : self::CERTAINITY_NORMAL, "Banned keywords in text ({$res1})"); } if ($res2) { $out[] = array(1.2 - 1 / ($res2 + 1), self::CERTAINITY_HIGH, "Banned keywords in link labels ({$res2})"); } if ($res3) { $out[] = array(1.2 - 1 / $res3, $sum > 2 ? self::CERTAINITY_HIGH : self::CERTAINITY_NORMAL, "Banned keywords in URLs ({$res3})"); } if (count($out)) { return $out; } }
protected function extractWordsFromPost(ISblamPost $p) { // get both raw and stripped text, to find more phrases (word count doesn't matter) $txt = $p->getRawContent() . ' ' . rawurldecode($p->getText()) . ' ' . $p->getAuthorName() . ' ' . $p->getAuthorEmail() . ' ' . $p->getAuthorURI(); return self::extractWords($txt, $this->db->ignore); }