diff --git a/controllers/accounts_controller.py b/controllers/accounts_controller.py index 73d7608240..576921a530 100644 --- a/controllers/accounts_controller.py +++ b/controllers/accounts_controller.py @@ -23,6 +23,9 @@ from utilities.transaction_base import TransactionBase class AccountsController(TransactionBase): def get_gl_dict(self, args, cancel=None): """this method populates the common properties of a gl entry record""" + if cancel is None: + cancel = (self.doc.docstatus == 2) + gl_dict = { 'company': self.doc.company, 'posting_date': self.doc.posting_date, @@ -30,7 +33,7 @@ class AccountsController(TransactionBase): 'voucher_no': self.doc.name, 'aging_date': self.doc.fields.get("aging_date") or self.doc.posting_date, 'remarks': self.doc.remarks, - 'is_cancelled': self.doc.docstatus == 2 and "Yes" or "No", + 'is_cancelled': cancel and "Yes" or "No", 'fiscal_year': self.doc.fiscal_year, 'debit': 0, 'credit': 0, diff --git a/patches/march_2013/p05_payment_reconciliation.py b/patches/march_2013/p05_payment_reconciliation.py new file mode 100644 index 0000000000..7b6306b385 --- /dev/null +++ b/patches/march_2013/p05_payment_reconciliation.py @@ -0,0 +1,29 @@ +import webnotes + +def execute(): + # delete wrong gle entries created due to a bug in make_gl_entries of Account Controller + # when using payment reconciliation + res = webnotes.conn.sql_list("""select distinct gl1.voucher_no + from `tabGL Entry` gl1, `tabGL Entry` gl2 + where + date(gl1.modified) >= "2013-03-11" + and date(gl1.modified) = date(gl2.modified) + and gl1.voucher_no = gl2.voucher_no + and gl1.voucher_type = "Journal Voucher" + and gl1.voucher_type = gl2.voucher_type + and gl1.posting_date = gl2.posting_date + and gl1.account = gl2.account + and ifnull(gl1.is_cancelled, 'No') = 'No' and ifnull(gl2.is_cancelled, 'No') = 'No' + and ifnull(gl1.against_voucher, '') = ifnull(gl2.against_voucher, '') + and ifnull(gl1.against_voucher_type, '') = ifnull(gl2.against_voucher_type, '') + and gl1.remarks = gl2.remarks + and ifnull(gl1.debit, 0) = ifnull(gl2.credit, 0) + and ifnull(gl1.credit, 0) = ifnull(gl2.debit, 0) + and gl1.name > gl2.name""") + + for r in res: + webnotes.conn.sql("""update `tabGL Entry` set `is_cancelled`='Yes' + where voucher_type='Journal Voucher' and voucher_no=%s""", r) + jv = webnotes.bean("Journal Voucher", r) + jv.run_method("make_gl_entries") + \ No newline at end of file diff --git a/patches/patch_list.py b/patches/patch_list.py index 65fb44e857..1120848535 100644 --- a/patches/patch_list.py +++ b/patches/patch_list.py @@ -212,4 +212,5 @@ patch_list = [ "patches.march_2013.p03_rename_blog_to_blog_post", "execute:webnotes.reload_doc('hr', 'search_criteria', 'monthly_attendance_details')", "patches.march_2013.p04_pos_update_stock_check", + "patches.march_2013.p05_payment_reconciliation", ] \ No newline at end of file diff --git a/selling/doctype/lead/lead.py b/selling/doctype/lead/lead.py index c336fe2132..571cdfd516 100644 --- a/selling/doctype/lead/lead.py +++ b/selling/doctype/lead/lead.py @@ -97,7 +97,6 @@ class DocType(SellingController): return webnotes.conn.get_value('Sales Email Settings',None,'email_id') def on_trash(self): - webnotes.conn.sql("""delete from tabCommunication where lead=%s""", - self.doc.name) + webnotes.conn.sql("""update tabCommunication set lead=null where lead=%s""", self.doc.name) webnotes.conn.sql("""update `tabSupport Ticket` set lead='' where lead=%s""", self.doc.name) diff --git a/setup/doctype/backup_manager/backup_dropbox.py b/setup/doctype/backup_manager/backup_dropbox.py index 3b0857fdd9..2c7fda6ca9 100644 --- a/setup/doctype/backup_manager/backup_dropbox.py +++ b/setup/doctype/backup_manager/backup_dropbox.py @@ -1,6 +1,17 @@ +# SETUP: +# install pip install --upgrade dropbox +# +# Create new Dropbox App +# +# in conf.py, set oauth2 settings +# dropbox_access_key +# dropbox_access_secret + + import os import webnotes -from webnotes.utils import get_request_site_address +from webnotes.utils import get_request_site_address, get_base_path +from webnotes import _ @webnotes.whitelist() def get_dropbox_authorize_url(): @@ -10,7 +21,7 @@ def get_dropbox_authorize_url(): + "?cmd=setup.doctype.backup_manager.backup_dropbox.dropbox_callback" url = sess.build_authorize_url(request_token, return_address) - + return { "url": url, "key": request_token.key, @@ -19,43 +30,43 @@ def get_dropbox_authorize_url(): @webnotes.whitelist(allow_guest=True) def dropbox_callback(oauth_token=None, not_approved=False): + from dropbox import client if not not_approved: if webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key")==oauth_token: - webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 1) + allowed = 1 message = "Dropbox access allowed." sess = get_dropbox_session() sess.set_request_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"), webnotes.conn.get_value("Backup Manager", None, "dropbox_access_secret")) access_token = sess.obtain_access_token() + webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_key", access_token.key) + webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_secret", access_token.secret) + webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", allowed) + dropbox_client = client.DropboxClient(sess) + dropbox_client.file_create_folder("files") - webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_key", - access_token.key) - webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_secret", - access_token.secret) - else: - webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 0) + allowed = 0 message = "Illegal Access Token Please try again." else: - webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 0) + allowed = 0 message = "Dropbox Access not approved." - + webnotes.message_title = "Dropbox Approval" webnotes.message = "

%s

Please close this window.

" % message - + webnotes.conn.commit() webnotes.response['type'] = 'page' webnotes.response['page_name'] = 'message.html' def backup_to_dropbox(): - from dropbox import client, session + from dropbox import client, session, rest from conf import dropbox_access_key, dropbox_secret_key from webnotes.utils.backups import new_backup if not webnotes.conn: webnotes.connect() - sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder") sess.set_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"), @@ -65,24 +76,23 @@ def backup_to_dropbox(): # upload database backup = new_backup() - filename = backup.backup_path_db + filename = os.path.join(get_base_path(), "public", "backups", + os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "database", dropbox_client) - # upload files - response = dropbox_client.metadata("files") + response = dropbox_client.metadata("/files") - - # add missing files - for filename in os.listdir(os.path.join("public", "files")): + # upload files to files folder + filename = os.path.join(get_base_path(), "public", "files") + for filename in os.listdir(filename): found = False for file_metadata in response["contents"]: if filename==os.path.basename(file_metadata["path"]): - if os.stat(os.path.join("public", "files", filename)).st_size==file_metadata["bytes"]: + if os.stat(filename).st_size==file_metadata["bytes"]: found=True - + if not found: - upload_file_to_dropbox(os.path.join("public", "files", filename), "files", dropbox_client) - + upload_file_to_dropbox(os.path.join(get_base_path(),"public", "files", filename), "files", dropbox_client) def get_dropbox_session(): from dropbox import session @@ -91,21 +101,18 @@ def get_dropbox_session(): except ImportError, e: webnotes.msgprint(_("Please set Dropbox access keys in") + " conf.py", raise_exception=True) - sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder") return sess def upload_file_to_dropbox(filename, folder, dropbox_client): - if __name__=="__main__": - print "Uploading " + filename size = os.stat(filename).st_size f = open(filename,'r') - if size > 4194304: uploader = dropbox_client.get_chunked_uploader(f, size) while uploader.offset < size: try: uploader.upload_chunked() + finish(folder + '/' + os.path.basename(filename), overwrite='True') except rest.ErrorResponse, e: pass else: diff --git a/setup/doctype/backup_manager/backup_googledrive.py b/setup/doctype/backup_manager/backup_googledrive.py new file mode 100644 index 0000000000..c794672de5 --- /dev/null +++ b/setup/doctype/backup_manager/backup_googledrive.py @@ -0,0 +1,161 @@ +# SETUP: +# install pip install --upgrade google-api-python-client +# +# In Google API +# - create new API project +# - create new oauth2 client (create installed app type as google \ +# does not support subdomains) +# +# in conf.py, set oauth2 settings +# gdrive_client_id +# gdrive_client_secret + +import httplib2 +import sys +import os +import mimetypes +import webnotes +import oauth2client.client +from webnotes.utils import get_request_site_address, get_base_path +from webnotes import _, msgprint +from apiclient.discovery import build +from apiclient.http import MediaFileUpload + +@webnotes.whitelist() +def get_gdrive_authorize_url(): + flow = get_gdrive_flow() + authorize_url = flow.step1_get_authorize_url() + return { + "authorize_url": authorize_url, + } + +@webnotes.whitelist() +def upload_files(name, mimetype, service, folder_id): + if not webnotes.conn: + webnotes.connect() + file_name = os.path.basename(name) + media_body = MediaFileUpload(name, mimetype=mimetype, resumable=True) + body = { + 'title': file_name, + 'description': 'Backup File', + 'mimetype': mimetype, + 'parents': [{ + 'kind': 'drive#filelink', + 'id': folder_id + }] + } + request = service.files().insert(body=body, media_body=media_body) + response = None + while response is None: + status, response = request.next_chunk() + +def backup_to_gdrive(): + from webnotes.utils.backups import new_backup + found_database = False + found_files = False + if not webnotes.conn: + webnotes.connect() + flow = get_gdrive_flow() + credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials") + credentials = oauth2client.client.Credentials.new_from_json(credentials_json) + http = httplib2.Http() + http = credentials.authorize(http) + drive_service = build('drive', 'v2', http=http) + + # upload database + backup = new_backup() + path = os.path.join(get_base_path(), "public", "backups") + filename = os.path.join(path, os.path.basename(backup.backup_path_db)) + + # upload files to database folder + upload_files(filename, 'application/x-gzip', drive_service, + webnotes.conn.get_value("Backup Manager", None, "database_folder_id")) + + # upload files to files folder + path = os.path.join(get_base_path(), "public", "files") + for files in os.listdir(path): + filename = path + "/" + files + ext = filename.split('.')[-1] + size = os.path.getsize(filename) + if ext == 'gz' or ext == 'gzip': + mimetype = 'application/x-gzip' + else: + mimetype = mimetypes.types_map["." + ext] + #Compare Local File with Server File + param = {} + children = drive_service.children().list( + folderId=webnotes.conn.get_value("Backup Manager", None, "files_folder_id"), + **param).execute() + for child in children.get('items', []): + file = drive_service.files().get(fileId=child['id']).execute() + if files == file['title'] and size == int(file['fileSize']): + found_files = True + break + if not found_files: + upload_files(filename, mimetype, drive_service, webnotes.conn.get_value("Backup Manager", None, "files_folder_id")) + +def get_gdrive_flow(): + from oauth2client.client import OAuth2WebServerFlow + import conf + + if not hasattr(conf, "gdrive_client_id"): + webnotes.msgprint(_("Please set Google Drive access keys in") + " conf.py", + raise_exception=True) + + #callback_url = get_request_site_address(True) \ + # + "?cmd=setup.doctype.backup_manager.backup_googledrive.googledrive_callback" + + # for installed apps since google does not support subdomains + redirect_uri = "urn:ietf:wg:oauth:2.0:oob" + + flow = OAuth2WebServerFlow(conf.gdrive_client_id, conf.gdrive_client_secret, + "https://www.googleapis.com/auth/drive", redirect_uri) + return flow + +@webnotes.whitelist() +def gdrive_callback(verification_code = None): + flow = get_gdrive_flow() + if verification_code: + credentials = flow.step2_exchange(verification_code) + allowed = 1 + + # make folders to save id + http = httplib2.Http() + http = credentials.authorize(http) + drive_service = build('drive', 'v2', http=http) + erpnext_folder_id = create_erpnext_folder(drive_service) + database_folder_id = create_folder('database', drive_service, erpnext_folder_id) + files_folder_id = create_folder('files', drive_service, erpnext_folder_id) + + webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_access_allowed", allowed) + webnotes.conn.set_value("Backup Manager", "Backup Manager", "database_folder_id", database_folder_id) + webnotes.conn.set_value("Backup Manager", "Backup Manager", "files_folder_id", files_folder_id) + final_credentials = credentials.to_json() + webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_credentials", final_credentials) + + webnotes.msgprint("Updated") + +def create_erpnext_folder(service): + if not webnotes.conn: + webnotes.connect() + erpnext = { + 'title': 'erpnext', + 'mimeType': 'application/vnd.google-apps.folder' + } + erpnext = service.files().insert(body=erpnext).execute() + return erpnext['id'] + +def create_folder(name, service, folder_id): + database = { + 'title': name, + 'mimeType': 'application/vnd.google-apps.folder', + 'parents': [{ + 'kind': 'drive#fileLink', + 'id': folder_id + }] + } + database = service.files().insert(body=database).execute() + return database['id'] + +if __name__=="__main__": + backup_to_gdrive() \ No newline at end of file diff --git a/setup/doctype/backup_manager/backup_manager.js b/setup/doctype/backup_manager/backup_manager.js index 154c72ec0e..28315c5bc5 100644 --- a/setup/doctype/backup_manager/backup_manager.js +++ b/setup/doctype/backup_manager/backup_manager.js @@ -1,24 +1,65 @@ +cur_frm.cscript.refresh = function(doc) { + cur_frm.disable_save(); +} + +//dropbox cur_frm.cscript.allow_dropbox_access = function(doc) { - wn.call({ - method: "setup.doctype.backup_manager.backup_dropbox.get_dropbox_authorize_url", - callback: function(r) { - if(!r.exc) { - cur_frm.set_value("dropbox_access_secret", r.message.secret); - cur_frm.set_value("dropbox_access_key", r.message.key); - cur_frm.save(null, function() { - window.open(r.message.url); - }); + if (doc.send_notifications_to == '') { + msgprint("Please enter email address.") + } + else { + wn.call({ + method: "setup.doctype.backup_manager.backup_dropbox.get_dropbox_authorize_url", + callback: function(r) { + if(!r.exc) { + cur_frm.set_value("dropbox_access_secret", r.message.secret); + cur_frm.set_value("dropbox_access_key", r.message.key); + cur_frm.save(null, function() { + window.open(r.message.url); + }); + } } - } - }) + }) + } } cur_frm.cscript.backup_right_now = function(doc) { msgprint("Backing up and uploading. This may take a few minutes.") wn.call({ - method: "setup.doctype.backup_manager.backup_manager.take_backups", + method: "setup.doctype.backup_manager.backup_manager.take_backups_dropbox", callback: function(r) { msgprint("Backups taken. Please check your email for the response.") } }) -} \ No newline at end of file +} +//gdrive +cur_frm.cscript.allow_gdrive_access = function(doc) { + if (doc.send_notifications_to == '') { + msgprint("Please enter email address.") + } + else { + wn.call({ + method: "setup.doctype.backup_manager.backup_googledrive.get_gdrive_authorize_url", + callback: function(r) { + window.open(r.message.authorize_url); + } + }) + } +} + +cur_frm.cscript.validate_gdrive = function(doc) { + wn.call({ + method: "setup.doctype.backup_manager.backup_manager.gdrive_callback", + args: { + verification_code: doc.verification_code + }, + }); +} + +cur_frm.cscript.upload_backups_to_dropbox = function(doc) { + cur_frm.save() +} + +cur_frm.cscript.upload_backups_to_gdrive = function(doc) { + cur_frm.save() +} diff --git a/setup/doctype/backup_manager/backup_manager.py b/setup/doctype/backup_manager/backup_manager.py index 48d48e817c..213aa85500 100644 --- a/setup/doctype/backup_manager/backup_manager.py +++ b/setup/doctype/backup_manager/backup_manager.py @@ -3,6 +3,8 @@ from __future__ import unicode_literals import webnotes from webnotes import _ +from backup_dropbox import dropbox_callback, get_dropbox_session, get_dropbox_authorize_url +from backup_googledrive import gdrive_callback, get_gdrive_flow, get_gdrive_authorize_url class DocType: def __init__(self, d, dl): @@ -16,10 +18,13 @@ def take_backups_weekly(): def take_backups_if(freq): if webnotes.conn.get_value("Backup Manager", None, "upload_backups_to_dropbox")==freq: - take_backups() - + take_backups_dropbox() + + if webnotes.conn.get_value("Backup Manager", None, "upload_backups_to_gdrive")==freq: + take_backups_gdrive() + @webnotes.whitelist() -def take_backups(): +def take_backups_dropbox(): try: from setup.doctype.backup_manager.backup_dropbox import backup_to_dropbox backup_to_dropbox() @@ -27,6 +32,16 @@ def take_backups(): except Exception, e: send_email(False, "Dropbox", e) +#backup to gdrive +@webnotes.whitelist() +def take_backups_gdrive(): + try: + from setup.doctype.backup_manager.backup_googledrive import backup_to_gdrive + backup_to_gdrive() + send_email(True, "Google Drive") + except Exception, e: + send_email(False, "Google Drive", e) + def send_email(success, service_name, error_status=None): if success: subject = "Backup Upload Successful" @@ -44,5 +59,5 @@ def send_email(success, service_name, error_status=None): # email system managers from webnotes.utils.email_lib import sendmail - sendmail(webnotes.conn.get_value("Backup Manager", None, "send_notifications_to").split(","), - subject=subject, msg=message) + sendmail(webnotes.conn.get_value("Backup Manager", None, "send_notifications_to").split(","), + subject=subject, msg=message) \ No newline at end of file diff --git a/setup/doctype/backup_manager/backup_manager.txt b/setup/doctype/backup_manager/backup_manager.txt index a994e7da53..9a43f34746 100644 --- a/setup/doctype/backup_manager/backup_manager.txt +++ b/setup/doctype/backup_manager/backup_manager.txt @@ -1,8 +1,8 @@ [ { - "creation": "2013-03-05 16:35:50", + "creation": "2013-03-15 11:06:59", "docstatus": 0, - "modified": "2013-03-07 12:18:07", + "modified": "2013-03-15 17:27:33", "modified_by": "Administrator", "owner": "Administrator" }, @@ -56,7 +56,9 @@ "doctype": "DocField", "fieldname": "backup_right_now", "fieldtype": "Button", - "label": "Backup Right Now" + "hidden": 1, + "label": "Backup Right Now", + "read_only": 1 }, { "description": "Note: Backups and files are not deleted from Dropbox, you will have to delete them manually.", @@ -102,6 +104,70 @@ "fieldtype": "Button", "label": "Allow Dropbox Access" }, + { + "description": "Note: Backups and files are not deleted from Google Drive, you will have to delete them manually.", + "doctype": "DocField", + "fieldname": "sync_with_gdrive", + "fieldtype": "Section Break", + "label": "Sync with Google Drive" + }, + { + "doctype": "DocField", + "fieldname": "upload_backups_to_gdrive", + "fieldtype": "Select", + "label": "Upload Backups to Google Drive", + "options": "Never\nDaily\nWeekly" + }, + { + "doctype": "DocField", + "fieldname": "allow_gdrive_access", + "fieldtype": "Button", + "label": "Allow Google Drive Access" + }, + { + "doctype": "DocField", + "fieldname": "verification_code", + "fieldtype": "Data", + "label": "Enter Verification Code" + }, + { + "doctype": "DocField", + "fieldname": "validate_gdrive", + "fieldtype": "Button", + "label": "Validate" + }, + { + "doctype": "DocField", + "fieldname": "gdrive_access_allowed", + "fieldtype": "Check", + "hidden": 1, + "label": "Google Drive Access Allowed", + "read_only": 1 + }, + { + "doctype": "DocField", + "fieldname": "gdrive_credentials", + "fieldtype": "Text", + "hidden": 1, + "label": "Credentials", + "read_only": 1 + }, + { + "doctype": "DocField", + "fieldname": "database_folder_id", + "fieldtype": "Data", + "hidden": 1, + "label": "Database Folder ID", + "read_only": 1 + }, + { + "doctype": "DocField", + "fieldname": "files_folder_id", + "fieldtype": "Data", + "hidden": 1, + "label": "Files Folder ID", + "read_only": 1 + }, { "doctype": "DocPerm" } diff --git a/stock/doctype/item/item.py b/stock/doctype/item/item.py index 74bf3f308c..931b7762d3 100644 --- a/stock/doctype/item/item.py +++ b/stock/doctype/item/item.py @@ -238,8 +238,7 @@ class DocType(DocListController): from website.helpers.product import get_parent_item_groups, url_for_website self.parent_groups = get_parent_item_groups(self.doc.item_group) + [{"name":self.doc.name}] self.doc.website_image = url_for_website(self.doc.website_image) - self.doc.title = self.doc.item_name == self.doc.name and self.doc.item_name or \ - (self.doc.item_name + " [" + self.doc.name + "]") + self.doc.title = self.doc.item_name if self.doc.slideshow: from website.helpers.slideshow import get_slideshow diff --git a/website/doctype/style_settings/custom_template.css b/website/doctype/style_settings/custom_template.css index f4038fbdcd..712c748cb8 100644 --- a/website/doctype/style_settings/custom_template.css +++ b/website/doctype/style_settings/custom_template.css @@ -47,10 +47,9 @@ div.outer { {% if doc.page_border %} /* Page Border*/ div.outer { - -moz-box-shadow: 0px 0px 3px rgba(0,0,0,0.9); - -webkit-box-shadow: 0px 0px 3px rgba(0,0,0,0.9); - box-shadow: 0px 0px 3px rgba(0,0,0,0.9); - border-radius: 5px; + box-shadow: 0 0 8px rgba(0, 0, 0, 0.2); + -moz-box-shadow: 0 0 8px rgba(0, 0, 0, 0.2); + -webkibox-shadow: 0 0 8px rgba(0, 0, 0, 0.2); } {% else %} {% if doc.background_color == doc.page_background %} @@ -61,6 +60,11 @@ div.web-footer { {% endif %} {% endif %} +div.web-footer, div.web-footer a { + font-size: 90%; + color: #{{ get_hex_shade(doc.background_color or "ffffff", 70) }}; +} + /* Bootstrap Navbar */ .navbar-inverse .navbar-inner { background-color: #{{ doc.top_bar_background or "444444"}}; diff --git a/website/doctype/style_settings/style_settings.txt b/website/doctype/style_settings/style_settings.txt index 3f898e72f2..df266a5992 100644 --- a/website/doctype/style_settings/style_settings.txt +++ b/website/doctype/style_settings/style_settings.txt @@ -2,7 +2,7 @@ { "creation": "2013-03-08 11:36:53", "docstatus": 0, - "modified": "2013-03-12 13:35:14", + "modified": "2013-03-14 11:57:20", "modified_by": "Administrator", "owner": "Administrator" }, @@ -150,14 +150,14 @@ "fieldtype": "Column Break" }, { - "description": "Add the name of Google Web Font e.g. \"Open Sans\"", + "description": "Add the name of Google Web Font e.g. \"Open Sans\"", "doctype": "DocField", "fieldname": "google_web_font_for_heading", "fieldtype": "Data", "label": "Google Web Font (Heading)" }, { - "description": "Add the name of Google Web Font e.g. \"Open Sans\"", + "description": "Add the name of Google Web Font e.g. \"Open Sans\"", "doctype": "DocField", "fieldname": "google_web_font_for_text", "fieldtype": "Data", diff --git a/website/doctype/website_settings/website_settings.js b/website/doctype/website_settings/website_settings.js index a02c3b2fcc..67e494154f 100644 --- a/website/doctype/website_settings/website_settings.js +++ b/website/doctype/website_settings/website_settings.js @@ -50,5 +50,5 @@ cur_frm.cscript.set_banner_from_image = function(doc) { var src = doc.banner_image; if(src.indexOf("/")==-1) src = "files/" + src; cur_frm.set_value("banner_html", ""); + +"' style='max-width: 200px;'>"); } \ No newline at end of file diff --git a/website/helpers/blog_feed.py b/website/helpers/blog_feed.py index c59a419fc6..41c203e0ad 100644 --- a/website/helpers/blog_feed.py +++ b/website/helpers/blog_feed.py @@ -44,7 +44,7 @@ rss_item = u""" %(content)s %(link)s %(name)s - %(creation)s + %(published_on)s """ def generate(): @@ -57,13 +57,12 @@ def generate(): items = '' blog_list = webnotes.conn.sql("""\ - select page_name as name, modified, creation, title from `tabBlog Post` + select page_name as name, published_on, modified, title, content from `tabBlog Post` where ifnull(published,0)=1 - order by creation desc, modified desc, name asc limit 20""", as_dict=1) + order by published_on desc limit 20""", as_dict=1) for blog in blog_list: blog.link = host + '/' + blog.name + '.html' - blog.content = get_blog_content(blog.name) items += rss_item % blog diff --git a/website/templates/css/login.css b/website/templates/css/login.css index 710f88944b..c2a7af2521 100644 --- a/website/templates/css/login.css +++ b/website/templates/css/login.css @@ -6,6 +6,7 @@ .layout-wrapper { background-color: #fff; + color: #333; padding: 10px; box-shadow: 1px 1px 3px 3px #ccc; font-size: 12px; diff --git a/website/templates/css/product_page.css b/website/templates/css/product_page.css index 5780ee4fd3..566b6b57aa 100644 --- a/website/templates/css/product_page.css +++ b/website/templates/css/product_page.css @@ -1,6 +1,6 @@