|
@ -21,7 +21,7 @@ class Crawler: |
|
|
self.urls = set([url]) |
|
|
self.urls = set([url]) |
|
|
self.visited = set([url]) |
|
|
self.visited = set([url]) |
|
|
self.exts = ['htm', 'php'] |
|
|
self.exts = ['htm', 'php'] |
|
|
self.allowed_regex = '(\w+)\.((?!htm)(?!rar)\w+)$' |
|
|
|
|
|
|
|
|
self.allowed_regex = '\.((?!htm)(?!php)\w+)$' |
|
|
|
|
|
|
|
|
def set_exts(self, exts): |
|
|
def set_exts(self, exts): |
|
|
self.exts = exts |
|
|
self.exts = exts |
|
@ -33,7 +33,7 @@ class Crawler: |
|
|
allowed_regex = '' |
|
|
allowed_regex = '' |
|
|
for ext in self.exts: |
|
|
for ext in self.exts: |
|
|
allowed_regex += '(!{})'.format(ext) |
|
|
allowed_regex += '(!{})'.format(ext) |
|
|
self.allowed_regex = '(\w+)\.({}\w+)$'.format(allowed_regex) |
|
|
|
|
|
|
|
|
self.allowed_regex = '\.({}\w+)$'.format(allowed_regex) |
|
|
|
|
|
|
|
|
def crawl(self): |
|
|
def crawl(self): |
|
|
self.regex = re.compile(self.allowed_regex) |
|
|
self.regex = re.compile(self.allowed_regex) |
|
@ -70,7 +70,7 @@ class Crawler: |
|
|
|
|
|
|
|
|
def write_xml(self): |
|
|
def write_xml(self): |
|
|
of = open(self.outputfile, 'w') |
|
|
of = open(self.outputfile, 'w') |
|
|
of.write('<?xml version="1.0" encoding="utf-8"?><!--Generated by Screaming Frog SEO Spider 2,55-->\n') |
|
|
|
|
|
|
|
|
of.write('<?xml version="1.0" encoding="utf-8"?>\n') |
|
|
of.write('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">\n') |
|
|
of.write('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">\n') |
|
|
url_str = '<url><loc>{}</loc></url>\n' |
|
|
url_str = '<url><loc>{}</loc></url>\n' |
|
|
while self.visited: |
|
|
while self.visited: |
|
|