diff --git a/phpunit.xml b/phpunit.xml
index 3e9126b..fd879dc 100644
--- a/phpunit.xml
+++ b/phpunit.xml
@@ -22,6 +22,7 @@
tests/TyposDirectiveTest.php
tests/t1gorTest.php
tests/MultipleUserAgentsRulesTest.php
+ tests/Issue13Test.php
diff --git a/tests/Issue13Test.php b/tests/Issue13Test.php
new file mode 100644
index 0000000..a0f1be4
--- /dev/null
+++ b/tests/Issue13Test.php
@@ -0,0 +1,60 @@
+
+getRules();
+ $robotsTxtValidator = new RobotsTxtValidator($rulesRobotsTxt);
+
+ #length 10 is more significant
+ $this->assertTrue($robotsTxtValidator->isUrlAllow('/anyfolder.html'));
+ }
+
+ public function testIsUrlAllow2()
+ {
+ $robotsTxtContentIssue = "
+User-agent: *
+Allow: /any #length 4 exactly
+Disallow: /*.html #length 2 or 7?
+";
+ $parserRobotsTxt = new RobotsTxtParser($robotsTxtContentIssue);
+ $rulesRobotsTxt = $parserRobotsTxt->getRules();
+ $robotsTxtValidator = new RobotsTxtValidator($rulesRobotsTxt);
+
+ # it is not allowed according to google, so here length 7.
+ $this->assertFalse($robotsTxtValidator->isUrlAllow('/anyfolder.html'));
+ }
+
+ public function testIsUrlAllow3()
+ {
+ $robotsTxtContentIssue = "
+User-agent: *
+Allow: /any #length 4
+Disallow: /any* #length 5
+";
+ $parserRobotsTxt = new RobotsTxtParser($robotsTxtContentIssue);
+ $rulesRobotsTxt = $parserRobotsTxt->getRules();
+ $robotsTxtValidator = new RobotsTxtValidator($rulesRobotsTxt);
+
+ #disallowed because disallow length is longer (5 vs 4)
+ $this->assertFalse($robotsTxtValidator->isUrlAllow('/any'));
+ }
+}