Merge branch 'master' of github.com:webnotes/erpnext into sales_purchase_return
Conflicts: patches/patch_list.py
This commit is contained in:
commit
f11e2f8f6f
@ -23,6 +23,9 @@ from utilities.transaction_base import TransactionBase
|
||||
class AccountsController(TransactionBase):
|
||||
def get_gl_dict(self, args, cancel=None):
|
||||
"""this method populates the common properties of a gl entry record"""
|
||||
if cancel is None:
|
||||
cancel = (self.doc.docstatus == 2)
|
||||
|
||||
gl_dict = {
|
||||
'company': self.doc.company,
|
||||
'posting_date': self.doc.posting_date,
|
||||
@ -30,7 +33,7 @@ class AccountsController(TransactionBase):
|
||||
'voucher_no': self.doc.name,
|
||||
'aging_date': self.doc.fields.get("aging_date") or self.doc.posting_date,
|
||||
'remarks': self.doc.remarks,
|
||||
'is_cancelled': self.doc.docstatus == 2 and "Yes" or "No",
|
||||
'is_cancelled': cancel and "Yes" or "No",
|
||||
'fiscal_year': self.doc.fiscal_year,
|
||||
'debit': 0,
|
||||
'credit': 0,
|
||||
|
29
patches/march_2013/p05_payment_reconciliation.py
Normal file
29
patches/march_2013/p05_payment_reconciliation.py
Normal file
@ -0,0 +1,29 @@
|
||||
import webnotes
|
||||
|
||||
def execute():
|
||||
# delete wrong gle entries created due to a bug in make_gl_entries of Account Controller
|
||||
# when using payment reconciliation
|
||||
res = webnotes.conn.sql_list("""select distinct gl1.voucher_no
|
||||
from `tabGL Entry` gl1, `tabGL Entry` gl2
|
||||
where
|
||||
date(gl1.modified) >= "2013-03-11"
|
||||
and date(gl1.modified) = date(gl2.modified)
|
||||
and gl1.voucher_no = gl2.voucher_no
|
||||
and gl1.voucher_type = "Journal Voucher"
|
||||
and gl1.voucher_type = gl2.voucher_type
|
||||
and gl1.posting_date = gl2.posting_date
|
||||
and gl1.account = gl2.account
|
||||
and ifnull(gl1.is_cancelled, 'No') = 'No' and ifnull(gl2.is_cancelled, 'No') = 'No'
|
||||
and ifnull(gl1.against_voucher, '') = ifnull(gl2.against_voucher, '')
|
||||
and ifnull(gl1.against_voucher_type, '') = ifnull(gl2.against_voucher_type, '')
|
||||
and gl1.remarks = gl2.remarks
|
||||
and ifnull(gl1.debit, 0) = ifnull(gl2.credit, 0)
|
||||
and ifnull(gl1.credit, 0) = ifnull(gl2.debit, 0)
|
||||
and gl1.name > gl2.name""")
|
||||
|
||||
for r in res:
|
||||
webnotes.conn.sql("""update `tabGL Entry` set `is_cancelled`='Yes'
|
||||
where voucher_type='Journal Voucher' and voucher_no=%s""", r)
|
||||
jv = webnotes.bean("Journal Voucher", r)
|
||||
jv.run_method("make_gl_entries")
|
||||
|
@ -212,4 +212,5 @@ patch_list = [
|
||||
"patches.march_2013.p03_rename_blog_to_blog_post",
|
||||
"execute:webnotes.reload_doc('hr', 'search_criteria', 'monthly_attendance_details')",
|
||||
"patches.march_2013.p04_pos_update_stock_check",
|
||||
"patches.march_2013.p05_payment_reconciliation",
|
||||
]
|
@ -97,7 +97,6 @@ class DocType(SellingController):
|
||||
return webnotes.conn.get_value('Sales Email Settings',None,'email_id')
|
||||
|
||||
def on_trash(self):
|
||||
webnotes.conn.sql("""delete from tabCommunication where lead=%s""",
|
||||
self.doc.name)
|
||||
webnotes.conn.sql("""update tabCommunication set lead=null where lead=%s""", self.doc.name)
|
||||
webnotes.conn.sql("""update `tabSupport Ticket` set lead='' where lead=%s""",
|
||||
self.doc.name)
|
||||
|
@ -1,6 +1,17 @@
|
||||
# SETUP:
|
||||
# install pip install --upgrade dropbox
|
||||
#
|
||||
# Create new Dropbox App
|
||||
#
|
||||
# in conf.py, set oauth2 settings
|
||||
# dropbox_access_key
|
||||
# dropbox_access_secret
|
||||
|
||||
|
||||
import os
|
||||
import webnotes
|
||||
from webnotes.utils import get_request_site_address
|
||||
from webnotes.utils import get_request_site_address, get_base_path
|
||||
from webnotes import _
|
||||
|
||||
@webnotes.whitelist()
|
||||
def get_dropbox_authorize_url():
|
||||
@ -10,7 +21,7 @@ def get_dropbox_authorize_url():
|
||||
+ "?cmd=setup.doctype.backup_manager.backup_dropbox.dropbox_callback"
|
||||
|
||||
url = sess.build_authorize_url(request_token, return_address)
|
||||
|
||||
|
||||
return {
|
||||
"url": url,
|
||||
"key": request_token.key,
|
||||
@ -19,43 +30,43 @@ def get_dropbox_authorize_url():
|
||||
|
||||
@webnotes.whitelist(allow_guest=True)
|
||||
def dropbox_callback(oauth_token=None, not_approved=False):
|
||||
from dropbox import client
|
||||
if not not_approved:
|
||||
if webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key")==oauth_token:
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 1)
|
||||
allowed = 1
|
||||
message = "Dropbox access allowed."
|
||||
|
||||
sess = get_dropbox_session()
|
||||
sess.set_request_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"),
|
||||
webnotes.conn.get_value("Backup Manager", None, "dropbox_access_secret"))
|
||||
access_token = sess.obtain_access_token()
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_key", access_token.key)
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_secret", access_token.secret)
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", allowed)
|
||||
dropbox_client = client.DropboxClient(sess)
|
||||
dropbox_client.file_create_folder("files")
|
||||
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_key",
|
||||
access_token.key)
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_secret",
|
||||
access_token.secret)
|
||||
|
||||
else:
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 0)
|
||||
allowed = 0
|
||||
message = "Illegal Access Token Please try again."
|
||||
else:
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "dropbox_access_allowed", 0)
|
||||
allowed = 0
|
||||
message = "Dropbox Access not approved."
|
||||
|
||||
|
||||
webnotes.message_title = "Dropbox Approval"
|
||||
webnotes.message = "<h3>%s</h3><p>Please close this window.</p>" % message
|
||||
|
||||
|
||||
webnotes.conn.commit()
|
||||
webnotes.response['type'] = 'page'
|
||||
webnotes.response['page_name'] = 'message.html'
|
||||
|
||||
def backup_to_dropbox():
|
||||
from dropbox import client, session
|
||||
from dropbox import client, session, rest
|
||||
from conf import dropbox_access_key, dropbox_secret_key
|
||||
from webnotes.utils.backups import new_backup
|
||||
if not webnotes.conn:
|
||||
webnotes.connect()
|
||||
|
||||
|
||||
sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder")
|
||||
|
||||
sess.set_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"),
|
||||
@ -65,24 +76,23 @@ def backup_to_dropbox():
|
||||
|
||||
# upload database
|
||||
backup = new_backup()
|
||||
filename = backup.backup_path_db
|
||||
filename = os.path.join(get_base_path(), "public", "backups",
|
||||
os.path.basename(backup.backup_path_db))
|
||||
upload_file_to_dropbox(filename, "database", dropbox_client)
|
||||
|
||||
# upload files
|
||||
response = dropbox_client.metadata("files")
|
||||
response = dropbox_client.metadata("/files")
|
||||
|
||||
|
||||
# add missing files
|
||||
for filename in os.listdir(os.path.join("public", "files")):
|
||||
# upload files to files folder
|
||||
filename = os.path.join(get_base_path(), "public", "files")
|
||||
for filename in os.listdir(filename):
|
||||
found = False
|
||||
for file_metadata in response["contents"]:
|
||||
if filename==os.path.basename(file_metadata["path"]):
|
||||
if os.stat(os.path.join("public", "files", filename)).st_size==file_metadata["bytes"]:
|
||||
if os.stat(filename).st_size==file_metadata["bytes"]:
|
||||
found=True
|
||||
|
||||
|
||||
if not found:
|
||||
upload_file_to_dropbox(os.path.join("public", "files", filename), "files", dropbox_client)
|
||||
|
||||
upload_file_to_dropbox(os.path.join(get_base_path(),"public", "files", filename), "files", dropbox_client)
|
||||
|
||||
def get_dropbox_session():
|
||||
from dropbox import session
|
||||
@ -91,21 +101,18 @@ def get_dropbox_session():
|
||||
except ImportError, e:
|
||||
webnotes.msgprint(_("Please set Dropbox access keys in") + " conf.py",
|
||||
raise_exception=True)
|
||||
|
||||
sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder")
|
||||
return sess
|
||||
|
||||
def upload_file_to_dropbox(filename, folder, dropbox_client):
|
||||
if __name__=="__main__":
|
||||
print "Uploading " + filename
|
||||
size = os.stat(filename).st_size
|
||||
f = open(filename,'r')
|
||||
|
||||
if size > 4194304:
|
||||
uploader = dropbox_client.get_chunked_uploader(f, size)
|
||||
while uploader.offset < size:
|
||||
try:
|
||||
uploader.upload_chunked()
|
||||
finish(folder + '/' + os.path.basename(filename), overwrite='True')
|
||||
except rest.ErrorResponse, e:
|
||||
pass
|
||||
else:
|
||||
|
161
setup/doctype/backup_manager/backup_googledrive.py
Normal file
161
setup/doctype/backup_manager/backup_googledrive.py
Normal file
@ -0,0 +1,161 @@
|
||||
# SETUP:
|
||||
# install pip install --upgrade google-api-python-client
|
||||
#
|
||||
# In Google API
|
||||
# - create new API project
|
||||
# - create new oauth2 client (create installed app type as google \
|
||||
# does not support subdomains)
|
||||
#
|
||||
# in conf.py, set oauth2 settings
|
||||
# gdrive_client_id
|
||||
# gdrive_client_secret
|
||||
|
||||
import httplib2
|
||||
import sys
|
||||
import os
|
||||
import mimetypes
|
||||
import webnotes
|
||||
import oauth2client.client
|
||||
from webnotes.utils import get_request_site_address, get_base_path
|
||||
from webnotes import _, msgprint
|
||||
from apiclient.discovery import build
|
||||
from apiclient.http import MediaFileUpload
|
||||
|
||||
@webnotes.whitelist()
|
||||
def get_gdrive_authorize_url():
|
||||
flow = get_gdrive_flow()
|
||||
authorize_url = flow.step1_get_authorize_url()
|
||||
return {
|
||||
"authorize_url": authorize_url,
|
||||
}
|
||||
|
||||
@webnotes.whitelist()
|
||||
def upload_files(name, mimetype, service, folder_id):
|
||||
if not webnotes.conn:
|
||||
webnotes.connect()
|
||||
file_name = os.path.basename(name)
|
||||
media_body = MediaFileUpload(name, mimetype=mimetype, resumable=True)
|
||||
body = {
|
||||
'title': file_name,
|
||||
'description': 'Backup File',
|
||||
'mimetype': mimetype,
|
||||
'parents': [{
|
||||
'kind': 'drive#filelink',
|
||||
'id': folder_id
|
||||
}]
|
||||
}
|
||||
request = service.files().insert(body=body, media_body=media_body)
|
||||
response = None
|
||||
while response is None:
|
||||
status, response = request.next_chunk()
|
||||
|
||||
def backup_to_gdrive():
|
||||
from webnotes.utils.backups import new_backup
|
||||
found_database = False
|
||||
found_files = False
|
||||
if not webnotes.conn:
|
||||
webnotes.connect()
|
||||
flow = get_gdrive_flow()
|
||||
credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials")
|
||||
credentials = oauth2client.client.Credentials.new_from_json(credentials_json)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
drive_service = build('drive', 'v2', http=http)
|
||||
|
||||
# upload database
|
||||
backup = new_backup()
|
||||
path = os.path.join(get_base_path(), "public", "backups")
|
||||
filename = os.path.join(path, os.path.basename(backup.backup_path_db))
|
||||
|
||||
# upload files to database folder
|
||||
upload_files(filename, 'application/x-gzip', drive_service,
|
||||
webnotes.conn.get_value("Backup Manager", None, "database_folder_id"))
|
||||
|
||||
# upload files to files folder
|
||||
path = os.path.join(get_base_path(), "public", "files")
|
||||
for files in os.listdir(path):
|
||||
filename = path + "/" + files
|
||||
ext = filename.split('.')[-1]
|
||||
size = os.path.getsize(filename)
|
||||
if ext == 'gz' or ext == 'gzip':
|
||||
mimetype = 'application/x-gzip'
|
||||
else:
|
||||
mimetype = mimetypes.types_map["." + ext]
|
||||
#Compare Local File with Server File
|
||||
param = {}
|
||||
children = drive_service.children().list(
|
||||
folderId=webnotes.conn.get_value("Backup Manager", None, "files_folder_id"),
|
||||
**param).execute()
|
||||
for child in children.get('items', []):
|
||||
file = drive_service.files().get(fileId=child['id']).execute()
|
||||
if files == file['title'] and size == int(file['fileSize']):
|
||||
found_files = True
|
||||
break
|
||||
if not found_files:
|
||||
upload_files(filename, mimetype, drive_service, webnotes.conn.get_value("Backup Manager", None, "files_folder_id"))
|
||||
|
||||
def get_gdrive_flow():
|
||||
from oauth2client.client import OAuth2WebServerFlow
|
||||
import conf
|
||||
|
||||
if not hasattr(conf, "gdrive_client_id"):
|
||||
webnotes.msgprint(_("Please set Google Drive access keys in") + " conf.py",
|
||||
raise_exception=True)
|
||||
|
||||
#callback_url = get_request_site_address(True) \
|
||||
# + "?cmd=setup.doctype.backup_manager.backup_googledrive.googledrive_callback"
|
||||
|
||||
# for installed apps since google does not support subdomains
|
||||
redirect_uri = "urn:ietf:wg:oauth:2.0:oob"
|
||||
|
||||
flow = OAuth2WebServerFlow(conf.gdrive_client_id, conf.gdrive_client_secret,
|
||||
"https://www.googleapis.com/auth/drive", redirect_uri)
|
||||
return flow
|
||||
|
||||
@webnotes.whitelist()
|
||||
def gdrive_callback(verification_code = None):
|
||||
flow = get_gdrive_flow()
|
||||
if verification_code:
|
||||
credentials = flow.step2_exchange(verification_code)
|
||||
allowed = 1
|
||||
|
||||
# make folders to save id
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
drive_service = build('drive', 'v2', http=http)
|
||||
erpnext_folder_id = create_erpnext_folder(drive_service)
|
||||
database_folder_id = create_folder('database', drive_service, erpnext_folder_id)
|
||||
files_folder_id = create_folder('files', drive_service, erpnext_folder_id)
|
||||
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_access_allowed", allowed)
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "database_folder_id", database_folder_id)
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "files_folder_id", files_folder_id)
|
||||
final_credentials = credentials.to_json()
|
||||
webnotes.conn.set_value("Backup Manager", "Backup Manager", "gdrive_credentials", final_credentials)
|
||||
|
||||
webnotes.msgprint("Updated")
|
||||
|
||||
def create_erpnext_folder(service):
|
||||
if not webnotes.conn:
|
||||
webnotes.connect()
|
||||
erpnext = {
|
||||
'title': 'erpnext',
|
||||
'mimeType': 'application/vnd.google-apps.folder'
|
||||
}
|
||||
erpnext = service.files().insert(body=erpnext).execute()
|
||||
return erpnext['id']
|
||||
|
||||
def create_folder(name, service, folder_id):
|
||||
database = {
|
||||
'title': name,
|
||||
'mimeType': 'application/vnd.google-apps.folder',
|
||||
'parents': [{
|
||||
'kind': 'drive#fileLink',
|
||||
'id': folder_id
|
||||
}]
|
||||
}
|
||||
database = service.files().insert(body=database).execute()
|
||||
return database['id']
|
||||
|
||||
if __name__=="__main__":
|
||||
backup_to_gdrive()
|
@ -1,24 +1,65 @@
|
||||
cur_frm.cscript.refresh = function(doc) {
|
||||
cur_frm.disable_save();
|
||||
}
|
||||
|
||||
//dropbox
|
||||
cur_frm.cscript.allow_dropbox_access = function(doc) {
|
||||
wn.call({
|
||||
method: "setup.doctype.backup_manager.backup_dropbox.get_dropbox_authorize_url",
|
||||
callback: function(r) {
|
||||
if(!r.exc) {
|
||||
cur_frm.set_value("dropbox_access_secret", r.message.secret);
|
||||
cur_frm.set_value("dropbox_access_key", r.message.key);
|
||||
cur_frm.save(null, function() {
|
||||
window.open(r.message.url);
|
||||
});
|
||||
if (doc.send_notifications_to == '') {
|
||||
msgprint("Please enter email address.")
|
||||
}
|
||||
else {
|
||||
wn.call({
|
||||
method: "setup.doctype.backup_manager.backup_dropbox.get_dropbox_authorize_url",
|
||||
callback: function(r) {
|
||||
if(!r.exc) {
|
||||
cur_frm.set_value("dropbox_access_secret", r.message.secret);
|
||||
cur_frm.set_value("dropbox_access_key", r.message.key);
|
||||
cur_frm.save(null, function() {
|
||||
window.open(r.message.url);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
cur_frm.cscript.backup_right_now = function(doc) {
|
||||
msgprint("Backing up and uploading. This may take a few minutes.")
|
||||
wn.call({
|
||||
method: "setup.doctype.backup_manager.backup_manager.take_backups",
|
||||
method: "setup.doctype.backup_manager.backup_manager.take_backups_dropbox",
|
||||
callback: function(r) {
|
||||
msgprint("Backups taken. Please check your email for the response.")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
//gdrive
|
||||
cur_frm.cscript.allow_gdrive_access = function(doc) {
|
||||
if (doc.send_notifications_to == '') {
|
||||
msgprint("Please enter email address.")
|
||||
}
|
||||
else {
|
||||
wn.call({
|
||||
method: "setup.doctype.backup_manager.backup_googledrive.get_gdrive_authorize_url",
|
||||
callback: function(r) {
|
||||
window.open(r.message.authorize_url);
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
cur_frm.cscript.validate_gdrive = function(doc) {
|
||||
wn.call({
|
||||
method: "setup.doctype.backup_manager.backup_manager.gdrive_callback",
|
||||
args: {
|
||||
verification_code: doc.verification_code
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
cur_frm.cscript.upload_backups_to_dropbox = function(doc) {
|
||||
cur_frm.save()
|
||||
}
|
||||
|
||||
cur_frm.cscript.upload_backups_to_gdrive = function(doc) {
|
||||
cur_frm.save()
|
||||
}
|
||||
|
@ -3,6 +3,8 @@
|
||||
from __future__ import unicode_literals
|
||||
import webnotes
|
||||
from webnotes import _
|
||||
from backup_dropbox import dropbox_callback, get_dropbox_session, get_dropbox_authorize_url
|
||||
from backup_googledrive import gdrive_callback, get_gdrive_flow, get_gdrive_authorize_url
|
||||
|
||||
class DocType:
|
||||
def __init__(self, d, dl):
|
||||
@ -16,10 +18,13 @@ def take_backups_weekly():
|
||||
|
||||
def take_backups_if(freq):
|
||||
if webnotes.conn.get_value("Backup Manager", None, "upload_backups_to_dropbox")==freq:
|
||||
take_backups()
|
||||
|
||||
take_backups_dropbox()
|
||||
|
||||
if webnotes.conn.get_value("Backup Manager", None, "upload_backups_to_gdrive")==freq:
|
||||
take_backups_gdrive()
|
||||
|
||||
@webnotes.whitelist()
|
||||
def take_backups():
|
||||
def take_backups_dropbox():
|
||||
try:
|
||||
from setup.doctype.backup_manager.backup_dropbox import backup_to_dropbox
|
||||
backup_to_dropbox()
|
||||
@ -27,6 +32,16 @@ def take_backups():
|
||||
except Exception, e:
|
||||
send_email(False, "Dropbox", e)
|
||||
|
||||
#backup to gdrive
|
||||
@webnotes.whitelist()
|
||||
def take_backups_gdrive():
|
||||
try:
|
||||
from setup.doctype.backup_manager.backup_googledrive import backup_to_gdrive
|
||||
backup_to_gdrive()
|
||||
send_email(True, "Google Drive")
|
||||
except Exception, e:
|
||||
send_email(False, "Google Drive", e)
|
||||
|
||||
def send_email(success, service_name, error_status=None):
|
||||
if success:
|
||||
subject = "Backup Upload Successful"
|
||||
@ -44,5 +59,5 @@ def send_email(success, service_name, error_status=None):
|
||||
|
||||
# email system managers
|
||||
from webnotes.utils.email_lib import sendmail
|
||||
sendmail(webnotes.conn.get_value("Backup Manager", None, "send_notifications_to").split(","),
|
||||
subject=subject, msg=message)
|
||||
sendmail(webnotes.conn.get_value("Backup Manager", None, "send_notifications_to").split(","),
|
||||
subject=subject, msg=message)
|
@ -1,8 +1,8 @@
|
||||
[
|
||||
{
|
||||
"creation": "2013-03-05 16:35:50",
|
||||
"creation": "2013-03-15 11:06:59",
|
||||
"docstatus": 0,
|
||||
"modified": "2013-03-07 12:18:07",
|
||||
"modified": "2013-03-15 17:27:33",
|
||||
"modified_by": "Administrator",
|
||||
"owner": "Administrator"
|
||||
},
|
||||
@ -56,7 +56,9 @@
|
||||
"doctype": "DocField",
|
||||
"fieldname": "backup_right_now",
|
||||
"fieldtype": "Button",
|
||||
"label": "Backup Right Now"
|
||||
"hidden": 1,
|
||||
"label": "Backup Right Now",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"description": "Note: Backups and files are not deleted from Dropbox, you will have to delete them manually.",
|
||||
@ -102,6 +104,70 @@
|
||||
"fieldtype": "Button",
|
||||
"label": "Allow Dropbox Access"
|
||||
},
|
||||
{
|
||||
"description": "Note: Backups and files are not deleted from Google Drive, you will have to delete them manually.",
|
||||
"doctype": "DocField",
|
||||
"fieldname": "sync_with_gdrive",
|
||||
"fieldtype": "Section Break",
|
||||
"label": "Sync with Google Drive"
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "upload_backups_to_gdrive",
|
||||
"fieldtype": "Select",
|
||||
"label": "Upload Backups to Google Drive",
|
||||
"options": "Never\nDaily\nWeekly"
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "allow_gdrive_access",
|
||||
"fieldtype": "Button",
|
||||
"label": "Allow Google Drive Access"
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "verification_code",
|
||||
"fieldtype": "Data",
|
||||
"label": "Enter Verification Code"
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "validate_gdrive",
|
||||
"fieldtype": "Button",
|
||||
"label": "Validate"
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "gdrive_access_allowed",
|
||||
"fieldtype": "Check",
|
||||
"hidden": 1,
|
||||
"label": "Google Drive Access Allowed",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "gdrive_credentials",
|
||||
"fieldtype": "Text",
|
||||
"hidden": 1,
|
||||
"label": "Credentials",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "database_folder_id",
|
||||
"fieldtype": "Data",
|
||||
"hidden": 1,
|
||||
"label": "Database Folder ID",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"doctype": "DocField",
|
||||
"fieldname": "files_folder_id",
|
||||
"fieldtype": "Data",
|
||||
"hidden": 1,
|
||||
"label": "Files Folder ID",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"doctype": "DocPerm"
|
||||
}
|
||||
|
@ -238,8 +238,7 @@ class DocType(DocListController):
|
||||
from website.helpers.product import get_parent_item_groups, url_for_website
|
||||
self.parent_groups = get_parent_item_groups(self.doc.item_group) + [{"name":self.doc.name}]
|
||||
self.doc.website_image = url_for_website(self.doc.website_image)
|
||||
self.doc.title = self.doc.item_name == self.doc.name and self.doc.item_name or \
|
||||
(self.doc.item_name + " [" + self.doc.name + "]")
|
||||
self.doc.title = self.doc.item_name
|
||||
|
||||
if self.doc.slideshow:
|
||||
from website.helpers.slideshow import get_slideshow
|
||||
|
@ -47,10 +47,9 @@ div.outer {
|
||||
{% if doc.page_border %}
|
||||
/* Page Border*/
|
||||
div.outer {
|
||||
-moz-box-shadow: 0px 0px 3px rgba(0,0,0,0.9);
|
||||
-webkit-box-shadow: 0px 0px 3px rgba(0,0,0,0.9);
|
||||
box-shadow: 0px 0px 3px rgba(0,0,0,0.9);
|
||||
border-radius: 5px;
|
||||
box-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
|
||||
-moz-box-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
|
||||
-webkibox-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
{% else %}
|
||||
{% if doc.background_color == doc.page_background %}
|
||||
@ -61,6 +60,11 @@ div.web-footer {
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
div.web-footer, div.web-footer a {
|
||||
font-size: 90%;
|
||||
color: #{{ get_hex_shade(doc.background_color or "ffffff", 70) }};
|
||||
}
|
||||
|
||||
/* Bootstrap Navbar */
|
||||
.navbar-inverse .navbar-inner {
|
||||
background-color: #{{ doc.top_bar_background or "444444"}};
|
||||
|
@ -2,7 +2,7 @@
|
||||
{
|
||||
"creation": "2013-03-08 11:36:53",
|
||||
"docstatus": 0,
|
||||
"modified": "2013-03-12 13:35:14",
|
||||
"modified": "2013-03-14 11:57:20",
|
||||
"modified_by": "Administrator",
|
||||
"owner": "Administrator"
|
||||
},
|
||||
@ -150,14 +150,14 @@
|
||||
"fieldtype": "Column Break"
|
||||
},
|
||||
{
|
||||
"description": "Add the name of Google Web Font e.g. \"Open Sans\"",
|
||||
"description": "Add the name of <a href=\"http://google.com/webfonts\" target=\"_blank\">Google Web Font</a> e.g. \"Open Sans\"",
|
||||
"doctype": "DocField",
|
||||
"fieldname": "google_web_font_for_heading",
|
||||
"fieldtype": "Data",
|
||||
"label": "Google Web Font (Heading)"
|
||||
},
|
||||
{
|
||||
"description": "Add the name of Google Web Font e.g. \"Open Sans\"",
|
||||
"description": "Add the name of <a href=\"http://google.com/webfonts\" target=\"_blank\">Google Web Font</a> e.g. \"Open Sans\"",
|
||||
"doctype": "DocField",
|
||||
"fieldname": "google_web_font_for_text",
|
||||
"fieldtype": "Data",
|
||||
|
@ -50,5 +50,5 @@ cur_frm.cscript.set_banner_from_image = function(doc) {
|
||||
var src = doc.banner_image;
|
||||
if(src.indexOf("/")==-1) src = "files/" + src;
|
||||
cur_frm.set_value("banner_html", "<a href='/'><img src='"+ src
|
||||
+"'></a>");
|
||||
+"' style='max-width: 200px;'></a>");
|
||||
}
|
@ -44,7 +44,7 @@ rss_item = u"""
|
||||
<description>%(content)s</description>
|
||||
<link>%(link)s</link>
|
||||
<guid>%(name)s</guid>
|
||||
<pubDate>%(creation)s</pubDate>
|
||||
<pubDate>%(published_on)s</pubDate>
|
||||
</item>"""
|
||||
|
||||
def generate():
|
||||
@ -57,13 +57,12 @@ def generate():
|
||||
|
||||
items = ''
|
||||
blog_list = webnotes.conn.sql("""\
|
||||
select page_name as name, modified, creation, title from `tabBlog Post`
|
||||
select page_name as name, published_on, modified, title, content from `tabBlog Post`
|
||||
where ifnull(published,0)=1
|
||||
order by creation desc, modified desc, name asc limit 20""", as_dict=1)
|
||||
order by published_on desc limit 20""", as_dict=1)
|
||||
|
||||
for blog in blog_list:
|
||||
blog.link = host + '/' + blog.name + '.html'
|
||||
blog.content = get_blog_content(blog.name)
|
||||
|
||||
items += rss_item % blog
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
.layout-wrapper {
|
||||
background-color: #fff;
|
||||
color: #333;
|
||||
padding: 10px;
|
||||
box-shadow: 1px 1px 3px 3px #ccc;
|
||||
font-size: 12px;
|
||||
|
@ -1,6 +1,6 @@
|
||||
<style>
|
||||
.item-main-image {
|
||||
max-width: 400px;
|
||||
max-width: 100%;
|
||||
margin: auto;
|
||||
}
|
||||
.web-long-description {
|
||||
|
@ -11,46 +11,42 @@
|
||||
{% block content %}
|
||||
{% include 'html/product_search_box.html' %}
|
||||
{% include 'html/product_breadcrumbs.html' %}
|
||||
<div class="span12">
|
||||
<h3 itemprop="name">{{ item_name }}</h3>
|
||||
<p class="help">Item Code: {{ name }}</p>
|
||||
</div>
|
||||
<div class="span12 product-page-content" itemscope itemtype="http://schema.org/Product">
|
||||
{% if slideshow %}
|
||||
{% include "html/slideshow.html" %}
|
||||
{% else %}
|
||||
{% if website_image %}
|
||||
<image itemprop="image" class="item-main-image"
|
||||
src="{{ website_image }}" />
|
||||
{% else %}
|
||||
<div class="img-area">
|
||||
{% include 'html/product_missing_image.html' %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
<br><br>
|
||||
<div class="row">
|
||||
<div class="span9">
|
||||
<h3>Product Description</h3>
|
||||
<div class="span6">
|
||||
{% if slideshow %}
|
||||
{% include "html/slideshow.html" %}
|
||||
{% else %}
|
||||
{% if website_image %}
|
||||
<image itemprop="image" class="item-main-image"
|
||||
src="{{ website_image }}" />
|
||||
{% else %}
|
||||
<div class="img-area">
|
||||
{% include 'html/product_missing_image.html' %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="span6">
|
||||
<h3 itemprop="name" style="margin-top: 0px;">{{ item_name }}</h3>
|
||||
<p class="help">Item Code: {{ name }}</p>
|
||||
<h4>Product Description</h4>
|
||||
<div itemprop="description">
|
||||
{{ web_long_description or web_short_description or
|
||||
"[No description given]" }}
|
||||
</div>
|
||||
<hr>
|
||||
{% if obj.doclist.get({"doctype":"Item Website Specification"}) %}
|
||||
<h3>Specifications</h3>
|
||||
<table class="table table-bordered" style="width: 100%">
|
||||
{% for d in obj.doclist.get(
|
||||
{"doctype":"Item Website Specification"}) %}
|
||||
<tr>
|
||||
<td style="width: 30%;">{{ d.label }}</td>
|
||||
<td>{{ d.description }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
<h4>Specifications</h4>
|
||||
<table class="table table-bordered" style="width: 100%">
|
||||
{% for d in obj.doclist.get(
|
||||
{"doctype":"Item Website Specification"}) %}
|
||||
<tr>
|
||||
<td style="width: 30%;">{{ d.label }}</td>
|
||||
<td>{{ d.description }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="span3">
|
||||
<div class="item-price hide">
|
||||
<p>Price:</p>
|
||||
</div>
|
||||
|
@ -309,13 +309,6 @@ def url_for_website(url):
|
||||
return url
|
||||
|
||||
def get_hex_shade(color, percent):
|
||||
# switch dark and light shades
|
||||
if int(color, 16) > int("808080", 16):
|
||||
percent = -percent
|
||||
|
||||
# stronger diff for darker shades
|
||||
if int(color, 16) < int("333333", 16):
|
||||
percent = percent * 2
|
||||
|
||||
def p(c):
|
||||
v = int(c, 16) + int(int('ff', 16) * (float(percent)/100))
|
||||
@ -329,4 +322,16 @@ def get_hex_shade(color, percent):
|
||||
return h
|
||||
|
||||
r, g, b = color[0:2], color[2:4], color[4:6]
|
||||
return p(r) + p(g) + p(b)
|
||||
|
||||
avg = (float(int(r, 16) + int(g, 16) + int(b, 16)) / 3)
|
||||
# switch dark and light shades
|
||||
if avg > 128:
|
||||
percent = -percent
|
||||
|
||||
# stronger diff for darker shades
|
||||
if percent < 25 and avg < 64:
|
||||
percent = percent * 2
|
||||
|
||||
return p(r) + p(g) + p(b)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user