mirror of
https://github.com/dkam/probot.git
synced 2025-12-28 09:14:53 +00:00
Gemspec adds the version, but tests also need it.
This commit is contained in:
@@ -2,5 +2,6 @@
|
||||
|
||||
$LOAD_PATH.unshift File.expand_path("../lib", __dir__)
|
||||
require "probot"
|
||||
require "probot/version" # for testing the version number - otherwise the gemspec does it.
|
||||
|
||||
require "minitest/autorun"
|
||||
|
||||
@@ -100,13 +100,13 @@ class TestProbot < Minitest::Test
|
||||
|
||||
def test_some_tests
|
||||
TEST_CASES.each_with_index do |test_case, ind|
|
||||
r = Robots.new(test_case[:txt])
|
||||
r = Probot.new(test_case[:txt])
|
||||
|
||||
assert_equal test_case[:found_agents], r.found_agents, "found_agents for test #{ind}"
|
||||
assert_equal test_case[:sitemap], r.sitemap, "sitemap for test #{ind}"
|
||||
|
||||
test_case[:tests].each do |tst|
|
||||
r = Robots.new(test_case[:txt], agent: tst[:agent])
|
||||
r = Probot.new(test_case[:txt], agent: tst[:agent])
|
||||
|
||||
tst[:allowed].each do |url|
|
||||
assert r.allowed?(url), "expected #{url} to be allowed, for agent #{tst[:agent]} | test #{ind}"
|
||||
@@ -121,24 +121,24 @@ class TestProbot < Minitest::Test
|
||||
|
||||
# https://developers.google.com/search/docs/crawling-indexing/robots/robots_txt#url-matching-based-on-path-values
|
||||
def test_googles_tests
|
||||
assert Robots.new(%(allow: /p\ndisallow: /)).matching_rule("https://example.com/page") == {allow: /\/p/}
|
||||
assert Robots.new(%(allow: /folder\ndisallow: /folder)).matching_rule("https://example.com/folder/page") == {allow: /\/folder/}
|
||||
assert Robots.new(%(allow: /page\ndisallow: /*.htm)).matching_rule("https://example.com/page.htm") == {disallow: /\/.*\.htm/}
|
||||
assert Robots.new(%(allow: /page\ndisallow: /*.ph)).matching_rule("https://example.com/page.php5") == {disallow: /\/.*\.ph/} # FAIL
|
||||
assert Robots.new(%(allow: /$\ndisallow: /)).matching_rule("https://example.com/") == {allow: /\/$/}
|
||||
assert Robots.new(%(allow: /$\ndisallow: /)).matching_rule("https://example.com/page.htm") == {disallow: /\//}
|
||||
assert Probot.new(%(allow: /p\ndisallow: /)).matching_rule("https://example.com/page") == {allow: /\/p/}
|
||||
assert Probot.new(%(allow: /folder\ndisallow: /folder)).matching_rule("https://example.com/folder/page") == {allow: /\/folder/}
|
||||
assert Probot.new(%(allow: /page\ndisallow: /*.htm)).matching_rule("https://example.com/page.htm") == {disallow: /\/.*\.htm/}
|
||||
assert Probot.new(%(allow: /page\ndisallow: /*.ph)).matching_rule("https://example.com/page.php5") == {disallow: /\/.*\.ph/} # FAIL
|
||||
assert Probot.new(%(allow: /$\ndisallow: /)).matching_rule("https://example.com/") == {allow: /\/$/}
|
||||
assert Probot.new(%(allow: /$\ndisallow: /)).matching_rule("https://example.com/page.htm") == {disallow: /\//}
|
||||
end
|
||||
|
||||
def test_empty_allow_disallow
|
||||
assert Robots.new(%(User-agent: *\nAllow:)).rules.dig("*", "allow").empty?
|
||||
assert Robots.new(%(User-agent: *\nDisallow:)).rules.dig("*", "disallow").empty?
|
||||
assert Probot.new(%(User-agent: *\nAllow:)).rules.dig("*", "allow").empty?
|
||||
assert Probot.new(%(User-agent: *\nDisallow:)).rules.dig("*", "disallow").empty?
|
||||
end
|
||||
|
||||
def test_consecutive_user_agents
|
||||
txt = %(User-agent: Curl
|
||||
User-agent: Wget
|
||||
Disallow: /url)
|
||||
r = Robots.new(txt)
|
||||
r = Probot.new(txt)
|
||||
assert r.allowed?("/url") == true
|
||||
|
||||
r.agent = "Curl"
|
||||
@@ -152,7 +152,7 @@ class TestProbot < Minitest::Test
|
||||
end
|
||||
|
||||
def test_unfound_robots
|
||||
r = Robots.new("")
|
||||
r = Probot.new("")
|
||||
assert r.allowed?("/url") == true
|
||||
r.agent = "Curl"
|
||||
assert r.allowed?("/url") == true
|
||||
@@ -161,7 +161,7 @@ class TestProbot < Minitest::Test
|
||||
def test_more_other_tests
|
||||
txt = %(User-agent: rubytest\nDisallow: /no-dir/\nDisallow: /no-page.php\nDisallow: /*-no-dir/\nDisallow: /dir/*.php\nDisallow: *?var\nDisallow: /dir/*?var\n\n# this is a test\nuseragent: *\ndisalow: /test/\n\nsitemap: /sitemapxml.xml\n\n )
|
||||
|
||||
r = Robots.new(txt, agent: "rubytest")
|
||||
r = Probot.new(txt, agent: "rubytest")
|
||||
assert r.allowed?("/dir/page.php") == false
|
||||
assert r.allowed?("/dir/home.php") == false
|
||||
assert r.allowed?("/dir/page") == true
|
||||
|
||||
Reference in New Issue
Block a user