1. 程式人生 > 資料庫 >python 監聽salt job狀態,並任務資料推送到redis中的方法

python 監聽salt job狀態,並任務資料推送到redis中的方法

salt分發後,主動將已完成的任務資料推送到redis中,使用redis的生產者模式,進行訊息傳送

#coding=utf-8
import fnmatch,json,logging
import salt.config
import salt.utils.event
from salt.utils.redis import RedisPool
import sys,os,datetime,random
import multiprocessing,threading
from joi.utils.gobsAPI import PostWeb
logger = logging.getLogger(__name__)
opts = salt.config.client_config('/data/salt/saltstack/etc/salt/master')
r_conn = RedisPool(opts.get('redis_db')).getConn()
lock = threading.Lock()
class RedisQueueDaemon(object):
	'''
	redis 佇列監聽器
	'''
	def __init__(self,r_conn):
		self.r_conn = r_conn #redis 連線例項
		self.task_queue = 'task:prod:queue' #任務訊息佇列
	def listen_task(self):
		'''
		監聽主函式
		'''
		while True:
				queue_item = self.r_conn.blpop(self.task_queue,0)[1]
				print "queue get",queue_item
				#self.run_task(queue_item)
				t = threading.Thread(target=self.run_task,args=(queue_item,))
				t.start()
	def run_task(self,info):
		'''
		執行操作函式
		'''
		lock.acquire()
		info = json.loads(info)
		if info['type'] == 'pushTaskData':
			task_data = self.getTaskData(info['jid'])
			task_data = json.loads(task_data) if task_data else []
			logger.info('獲取快取資料:%s' % task_data)
			if task_data:
				if self.sendTaskData2bs(task_data):
					task_data = []
			self.setTaskData(info['jid'],task_data)
		elif info['type'] == 'setTaskState':
			self.setTaskState(info['jid'],info['state'],info['message'])
		elif info['type'] == 'setTaskData':
			self.setTaskData(info['jid'],info['data'])
		lock.release()
	def getTaskData(self,jid):
		return self.r_conn.hget('task:'+jid,'data')
	def setTaskData(self,jid,data):
		self.r_conn.hset('task:'+jid,'data',json.dumps(data))
	def sendTaskData2bs(self,task_data):
		logger.info('傳送任務資料到後端...')
		logger.info(task_data)
		if task_data:
			p = PostWeb('/jgapi/verify',task_data,'pushFlowTaskData')
			result = p.postRes()
			print result
			if result['code']:
				logger.info('傳送成功!')
				return True
			else:
				logger.error('傳送失敗!')
				return False
		else:
			return True
	def setTaskState(self,state,message=''):
		logger.info('到後端設定任務【%s】狀態' % str(jid))
		p = PostWeb('/jgapi/verify',{'code':jid,'state':'success','message':message},'setTaskState')
		result = p.postRes()
		if result['code']:
			logger.info('設定任務【%s】狀態成功!' % str(jid))
			return True,result
		else:
			logger.error('設定任務【%s】狀態失敗!' % str(jid))
			return result		
def salt_job_listener():
	'''
	salt job 監聽器
	'''
	sevent = salt.utils.event.get_event(
			'master',sock_dir=opts['sock_dir'],transport=opts['transport'],opts=opts)	
	while True:
		ret = sevent.get_event(full=True)
		if ret is None:
			continue
		if fnmatch.fnmatch(ret['tag'],'salt/job/*/ret/*'):
			task_key = 'task:'+ret['data']['jid']
			task_state = r_conn.hget(task_key,'state')
			task_data = r_conn.hget(task_key,'data')
			if task_state:
				jid_data = {
					'code':ret['data']['jid'],'project_id':settings.SALT_MASTER_OPTS['project_id'],'serverip':ret['data']['id'],'returns':ret['data']['return'],'name':ret['data']['id'],'state':'success' if ret['data']['success'] else 'failed',}
				task_data = json.loads(task_data) if task_data else []
				task_data.append(jid_data)
				logger.info("新增資料:%s" % json.dumps(task_data))
				r_conn.lpush('task:prod:queue',json.dumps({'type':'setTaskData','jid':ret['data']['jid'],'data':task_data}))
				#r_conn.hset(task_key,json.dumps(task_data))						
				if task_state == 'running':
					if len(task_data)>=1:
						logger.info('新增訊息到佇列:pushTaskData')
						r_conn.lpush('task:prod:queue',json.dumps({'jid':ret['data']['jid'],'type':'pushTaskData'}))
				else:
					logger.info('任務{0}完成,傳送剩下的資料到後端...'.format(task_key))
					logger.info('新增訊息到佇列:pushTaskData')
					r_conn.lpush('task:prod:queue','type':'pushTaskData'}))
				
				print datetime.datetime.now()
 
def run():
	print 'start redis product queue listerner...'
	logger.info('start redis product queue listerner...')
	multiprocessing.Process(target=RedisQueueDaemon(r_conn).listen_task,args=()).start()
	print 'start salt job listerner...'
	logger.info('start salt job listerner...')
	multiprocessing.Process(target=salt_job_listener,args=()).start()
 
	'''
	p=multiprocessing.Pool(2)
	print 'start redis product queue listerner...'
	p.apply_async(redis_queue_listenr,())
	print 'start salt job listerner...'
	p.apply_async(salt_job_listener,())
	p.close()
	p.join()
	'''

以上這篇python 監聽salt job狀態,並任務資料推送到redis中的方法就是小編分享給大家的全部內容了,希望能給大家一個參考,也希望大家多多支援我們。