本人原創,轉載請注明出處,合法轉載
1.python 多程序+gevent實作并發
#!/bin/env python
#-*- coding: UTF-8 -*-
from __future__ import print_function
from multiprocessing import Pool
import threading
import os
import sys
import pdb
import time
import pprint
import gevent
from gevent.queue import Queue
g_progress_num = 3
g_read_gevent_num = 1
g_set__gevent_num = 5
def eachFile(filepath):
try:
redis_client=redis.StrictRedis(host='192.168.1.1', port=6390, db=0)
pathDir = os.listdir(filepath)
redis_client.delete("file_path_list")
for allDir in pathDir:
child = os.path.join('%s%s' % (filepath, allDir))
redis_client.rpush("file_path_list",str(child.decode('gbk')))
except Exception as e:
print("[ERROR eachFile]: {0} ".format(e))
sys.exit(1)
def gevent_set_job(n):
try:
#do set job
except Exception as e:
print("[ERROR]:{0} {1}".format(time.ctime(),e), file=sys.stderr)
sys.exit(1)
def gevent_read_job(n):
try:
#do read job
except Exception as e:
<span style="white-space:pre"> </span>print("[ERROR]:{0} {1}".format(time.ctime(),e), file=sys.stderr)
def work_progress():
reads = [gevent.spawn(gevent_read_job, i) for i in xrange(g_read_line_gevent_num)]
sets = [gevent.spawn(gevent_set_job, i) for i in xrange(g_set_tag_to_cache_gevent_num)]
gevent.joinall(reads)
gevent.joinall(sets)
#####Start from here!###########
if __name__=='__main__':
eachFile(sys.argv[1])
print("Start time: {0} ".format(time.ctime()))
p = Pool(g_progress_num)
for i in xrange(g_progress_num):
p.apply_async(work_progress)
p.close()
p.join()
#work_progress()
print("End time: {0} ".format(time.ctime()))
2.python 多程序+多線程實作并發
#!/bin/env python
#-*- coding: UTF-8 -*-
from __future__ import print_function
from multiprocessing import Pool
import threading
import os
import sys
import pdb
import time
import pprint
g_count = 0
g_thread_num = 3
g_progress_num = 12
lock = threading.Lock()
def eachFile(filepath):
try:
<span style="white-space:pre"> </span>redis_client=redis.StrictRedis(host='192.168.1.1', port=6300, db=0)
pathDir = os.listdir(filepath)
redis_client.delete("file_path_list")
for allDir in pathDir:
child = os.path.join('%s%s' % (filepath, allDir))
redis_client.rpush("file_path_list",str(child.decode('gbk')))
except Exception as e:
print("[ERROR eachFile]: {0} ".format(e))
sys.exit(1)
def work_thread():
global g_count, lock ,g_thread_num
try:
#do something
<span style="white-space:pre"> </span>lock.acquire()
g_count += 1
lock.release()
except Exception as e:
print("[ERROR]:{0} {1}".format(time.ctime(),e), file=sys.stderr)
lock.acquire()
g_count= g_thread_num
lock.release()
sys.exit(1)
def work_progress():
for i in xrange(g_thread_num):
t = threading.Thread(target=work_thread)
t.start()
while(g_count < g_thread_num):
time.sleep(3)
#####Start from here!###########
if __name__=='__main__':
eachFile(sys.argv[1])
print("Start time: {0} ".format(time.ctime()))
p = Pool(g_progress_num)
for i in xrange(g_progress_num):
p.apply_async(work_progress)
p.close()
p.join()
print("End time: {0} ".format(time.ctime()))