安装lxml
首先需要pip install lxml
安装lxml库。
如果你在ubuntu上遇到了以下错误:
#include "libxml/xmlversion.h"compilation terminated.error: command 'x86_64-linux-gnu-gcc' failed with exit status 1----------------------------------------Cleaning up... Removing temporary dir /tmp/pip_build_root...Command /usr/bin/python -c "import setuptools, tokenize;__file__='/tmp/pip_build_root/lxml/setup.py';exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('/r/n', '/n'), __file__, 'exec'))" install --record /tmp/pip-O4cIn6-record/install-record.txt --single-version-externally-managed --compile failed with error code 1 in /tmp/pip_build_root/lxmlException information:Traceback (most recent call last): File "/usr/lib/python2.7/dist-packages/pip/basecommand.py", line 122, in main status = self.run(options, args) File "/usr/lib/python2.7/dist-packages/pip/commands/install.py", line 283, in run requirement_set.install(install_options, global_options, root=options.root_path) File "/usr/lib/python2.7/dist-packages/pip/req.py", line 1435, in install requirement.install(install_options, global_options, *args, **kwargs) File "/usr/lib/python2.7/dist-packages/pip/req.py", line 706, in install cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False) File "/usr/lib/python2.7/dist-packages/pip/util.py", line 697, in call_subprocess % (command_desc, proc.returncode, cwd))InstallationError: Command /usr/bin/python -c "import setuptools, tokenize;__file__='/tmp/pip_build_root/lxml/setup.py';exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('/r/n', '/n'), __file__, 'exec'))" install --record /tmp/pip-O4cIn6-record/install-record.txt --single-version-externally-managed --compile failed with error code 1 in /tmp/pip_build_root/lxml
请安装以下依赖:
sudo apt-get install libxml2-dev libxslt1-dev
Python代码
下面是生成sitemap和sitemapindex索引的代码,可以按照需求传入需要的参数,或者增加字段:
#!/usr/bin/env python# -*- coding:utf-8 -*-import ioimport refrom lxml import etreedef generate_xml(filename, url_list): """Generate a new xml file use url_list""" root = etree.Element('urlset', xmlns="http://www.sitemaps.org/schemas/sitemap/0.9") for each in url_list: url = etree.Element('url') loc = etree.Element('loc') loc.text = each url.append(loc) root.append(url) header = u'<?xml version="1.0" encoding="UTF-8"?>/n' s = etree.tostring(root, encoding='utf-8', pretty_print=True) with io.open(filename, 'w', encoding='utf-8') as f: f.write(unicode(header+s))def update_xml(filename, url_list): """Add new url_list to origin xml file.""" f = open(filename, 'r') lines = [i.strip() for i in f.readlines()] f.close() old_url_list = [] for each_line in lines: d = re.findall('<loc>(http:////.+)<//loc>', each_line) old_url_list += d url_list += old_url_list generate_xml(filename, url_list)def generatr_xml_index(filename, sitemap_list, lastmod_list): """Generate sitemap index xml file.""" root = etree.Element('sitemapindex', xmlns="http://www.sitemaps.org/schemas/sitemap/0.9") for each_sitemap, each_lastmod in zip(sitemap_list, lastmod_list): sitemap = etree.Element('sitemap') loc = etree.Element('loc') loc.text = each_sitemap lastmod = etree.Element('lastmod') lastmod.text = each_lastmod sitemap.append(loc) sitemap.append(lastmod) root.append(sitemap) header = u'<?xml version="1.0" encoding="UTF-8"?>/n' s = etree.tostring(root, encoding='utf-8', pretty_print=True) with io.open(filename, 'w', encoding='utf-8') as f: f.write(unicode(header+s))if __name__ == '__main__': urls = ['http://www.baidu.com'] * 10 mods = ['2004-10-01T18:23:17+00:00'] * 10 generatr_xml_index('index.xml', urls, mods)
新闻热点
疑难解答