aboutsummaryrefslogtreecommitdiff
path: root/pyblog
diff options
context:
space:
mode:
authorneodarz <neodarz@neodarz.net>2019-05-26 15:49:38 +0200
committerneodarz <neodarz@neodarz.net>2019-05-26 15:49:38 +0200
commitc777eefdd0a9caceebd46237cd4170d9e07c8d2a (patch)
treebc27f5170e27c84867ab0b69e91ccf7621bb1de7 /pyblog
parent7e3060cc02aacecd13ba220cf69d1b8ae2517e09 (diff)
downloadmy_new_personal_website-c777eefdd0a9caceebd46237cd4170d9e07c8d2a.tar.xz
my_new_personal_website-c777eefdd0a9caceebd46237cd4170d9e07c8d2a.zip
Move generate_sitemap to external file
Diffstat (limited to '')
-rwxr-xr-xpyblog33
1 files changed, 1 insertions, 32 deletions
diff --git a/pyblog b/pyblog
index c398b932..97a88470 100755
--- a/pyblog
+++ b/pyblog
@@ -51,37 +51,6 @@ from config.config import *
from generators import generators
-def generate_sitemap(feed):
- """Generate sitemap.xml."""
- sitemap = ET.Element("urlset", xmlns="http://www.sitemaps.org/schemas/sitemap/0.9")
- # index
- sitemap.append(utils.make_sitemap_url_element(BLOG_HOME, feed.updated, "daily", 1.0))
- # other top level pages
- for name in os.listdir(BUILDDIR):
- if (not name.endswith(".html") or name == "index.html" or
- re.match("google[a-z0-9]+\.html", name)): # exclude Google's site ownership verification file
- continue
- link = urllib.parse.urljoin(BLOG_HOME, name)
- fullpath = os.path.join(BUILDDIR, name)
- # try to extract updated time
- updated = None
- with open(fullpath, encoding="utf-8") as htmlobj:
- soup = bs4.BeautifulSoup(htmlobj.read(), "lxml")
- if soup.footer is not None:
- updated_tag = soup.footer.find(attrs={"class": "updated"})
- if updated_tag is not None:
- updated = dateutil.parser.parse(updated_tag.text)
- sitemap.append(utils.make_sitemap_url_element(link, updated, "monthly", 0.9))
-
- # blog entries
- for entry in feed.entries:
- sitemap.append(utils.make_sitemap_url_element(entry.link, entry.updated, "monthly", 0.9))
- sitemappath = os.path.join(BUILDDIR, "sitemap.xml")
- with open(sitemappath, "w", encoding="utf-8") as sitemapfile:
- sitemapfile.write('<?xml version="1.0" encoding="UTF-8"?>\n%s\n' %
- ET.tostring(sitemap).decode('utf-8'))
- sys.stderr.write("wrote sitemap.xml\n")
-
def rewrite_title():
"""Override the title of some page for a better render"""
sys.stderr.write("Overriding some titles\n")
@@ -378,7 +347,7 @@ def generate_index_and_feed():
rssxml.write("%s\n" % rss.dump_rss(FEED_MAX_ENTRIES))
sys.stderr.write("wrote rss.xml\n")
- generate_sitemap(feed)
+ generators.generate_sitemap(feed)
# exclude_list is only inialized once to avoid constant disk IO