[website] [fix] fixes in sitemap and rss generators
This commit is contained in:
parent
6241012f08
commit
488a821534
@ -1,5 +1,5 @@
|
||||
import webnotes, conf, os
|
||||
from webnotes.utils import cint, cstr
|
||||
from webnotes.utils import cint, cstr, encode
|
||||
|
||||
def get_templates_path():
|
||||
return os.path.join(os.path.dirname(conf.__file__), "app", "website", "templates")
|
||||
@ -72,7 +72,7 @@ def update_template_args(page_name, args):
|
||||
args[k] = cint(args.get(k) or 0)
|
||||
|
||||
args.url = quote(str(get_request_site_address(full_address=True)), str(""))
|
||||
args.encoded_title = quote(str(args.title or ""), str(""))
|
||||
args.encoded_title = quote(encode(args.title or ""), str(""))
|
||||
|
||||
return args
|
||||
|
@ -49,9 +49,10 @@ rss_item = u"""
|
||||
|
||||
def generate():
|
||||
"""generate rss feed"""
|
||||
import webnotes, os
|
||||
import os, urllib
|
||||
import webnotes
|
||||
from webnotes.model.doc import Document
|
||||
from website.helpers.blog import get_blog_content
|
||||
from webnotes.utils import escape_html
|
||||
|
||||
host = (os.environ.get('HTTPS') and 'https://' or 'http://') + os.environ.get('HTTP_HOST')
|
||||
|
||||
@ -62,7 +63,8 @@ def generate():
|
||||
order by published_on desc limit 20""", as_dict=1)
|
||||
|
||||
for blog in blog_list:
|
||||
blog.link = host + '/' + blog.name + '.html'
|
||||
blog.link = urllib.quote(host + '/' + blog.name + '.html')
|
||||
blog.content = escape_html(blog.content or "")
|
||||
|
||||
items += rss_item % blog
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
# License: GNU General Public License (v3). For more information see license.txt
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
frame_xml = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">%s
|
||||
</urlset>"""
|
||||
@ -32,10 +33,11 @@ def generate(domain):
|
||||
|
||||
for p in pages:
|
||||
if count >= max_items: break
|
||||
page_url = os.path.join(domain, urllib.quote(p[0]))
|
||||
modified = p[1].strftime('%Y-%m-%d')
|
||||
site_map += link_xml % (page_url, modified)
|
||||
count += 1
|
||||
if p[0]:
|
||||
page_url = os.path.join(domain, urllib.quote(p[0]))
|
||||
modified = p[1].strftime('%Y-%m-%d')
|
||||
site_map += link_xml % (page_url, modified)
|
||||
count += 1
|
||||
|
||||
if count >= max_items: break
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user