天天看点

python更新判断_python 判断更新插入数据库

#-*-coding:utf8-*-

2 #此处调试代码...

3 importsys4 importos5 importMySQLdb6 importdatetime7 path1 = (os.getcwd()).split(r‘\test‘)[0]8 sys.path.append(path1+‘\Shop_site_crawler‘)9 from zsl_spider_lib importget_u_sql,get_s_sql,get_i_sql10 importtime11 try:12 conn=MySQLdb.connect(host=‘localhost‘,user=‘root‘,passwd=‘root‘,charset=‘utf8‘)13 conn.select_db(‘scrapy‘)14 except:15 conn=MySQLdb.connect(host=‘rds.com‘,user=‘scrapy‘,passwd=‘passwd‘,charset=‘utf8‘)16 conn.select_db(‘scrapy‘)17 print ‘task build start :‘,datetime.datetime.now()18

19 #----------------分割线-------------------

20 table = ‘shops‘

21 curDate = time.strftime("%Y-%m-%d")22 i ={}23 i[‘uid‘] = 1314520

24 i[‘shop_uri‘] = ‘url‘

25 i[‘shop_name‘] = ‘name‘

26 i[‘nick‘] = ‘nick‘

27 i[‘brand‘] = ‘brand‘

28 i[‘platform‘] =029 i[‘addtime‘] = time.strftime("%Y-%m-%d %H:%M:%S")30 i[‘the_date‘] = time.strftime("%Y-%m-%d")31

32

33 db = conn.cursor()#创建实例

34 conds ={}35 conds[‘shop_name‘] = i[‘shop_name‘]36 conds[‘nick‘] = i[‘nick‘]37 conds[‘the_date‘] =curDate38

39 keys = [‘count(1)‘]40

41 insert_sql =get_i_sql(table,i)42 select_sql =get_s_sql(table,keys,conds)43 update_sql =get_u_sql(table,i,conds)44

45 db.execute(select_sql)#操作数据库

46 res =db.fetchone()47 ifres[0]:48 db.execute(update_sql)49 print ‘----update reday!‘

50 else:51 db.execute(insert_sql)52 print ‘----insert reday!‘

53

54 conn.commit()55 db.close()56 conn.close()

python 判断更新插入数据库

标签:and   数据   commit   getcwd   getc   exec   scrapy   set   task

本条技术文章来源于互联网,如果无意侵犯您的权益请点击此处反馈版权投诉

本文系统来源:http://www.cnblogs.com/zsl-3/p/6014649.html