创建celery项目目录
新建celery_tasks.py
import celery
import time
backend='redis://:@127.0.0.1:6379/3'
broker='redis://:@127.0.0.1:6379/2'
cel=celery.Celery('test',backend=backend,broker=broker)
@cel.task
def send_email(name):
print("向%s发送邮件..."%name)
time.sleep(5)
print("向%s发送邮件完成"%name)
return "ok"
@cel.task
def send_message(name):
print("向%s发送短信..."%name)
time.sleep(5)
print("向%s发送短信完成"%name)
return "ok"
新建执行produce_task.py
from celery_task import send_email,send_message
# .delay是celery自带的参数
result = send_email.delay("yuan")
print(result.id)
result2 = send_message.delay("alex")
print(result2.id)
运行结果result.py
from celery.result import AsyncResult
from celery_task import cel
# id就是之前的执行结果,也就是才能存入redis的key
# 通过AsyncResult调出结果,app是设置celery对象
async_result=AsyncResult(id="3489df42-d882-4411-818b-725a4a128b72", app=cel)
if async_result.successful():
result = async_result.get()
print(result)
# 成功则返回ok
# result.forget() # 将结果删除
elif async_result.failed():
print('执行失败')
elif async_result.status == 'PENDING':
print('任务等待中被执行')
elif async_result.status == 'RETRY':
print('任务异常后正在重试')
elif async_result.status == 'STARTED':
print('任务已经开始被执行')
启动celery服务命令(#必须在celery目录下执行,否则运行不了)celery -A celery_task worker -l info
添加任务(执行produce_task.py),检查任务执行结果(执行check_result.py)
多任务执行
任务目录
celery.py:
from celery import Celery
cel = Celery('celery_demo',
broker='redis://127.0.0.1:6379/1',
backend='redis://127.0.0.1:6379/2',
# 包含以下两个任务文件,去相应的py文件中找任务,对多个任务做分类
include=['celery_tasks.task01',
'celery_tasks.task02'
])
# 时区
cel.conf.timezone = 'Asia/Shanghai'
# 是否使用UTC
cel.conf.enable_utc = False
task01.py,task02.py:
#task01
import time
from celery_tasks.celery import cel
@cel.task
def send_email(res):
time.sleep(5)
return "完成向%s发送邮件任务"%res
#task02
import time
from celery_tasks.celery import cel
@cel.task
def send_msg(name):
time.sleep(5)
return "完成向%s发送短信任务"%name
produce_task.py:
from celery_tasks.task01 import send_email
from celery_tasks.task02 import send_msg
# 立即告知celery去执行test_celery任务,并传入一个参数
result = send_email.delay('yuan')
print(result.id)
result = send_msg.delay('yuan')
print(result.id)
check_result.py:
from celery.result import AsyncResult
from celery_tasks.celery import cel
async_result = AsyncResult(id="562834c6-e4be-46d2-908a-b102adbbf390", app=cel)
if async_result.successful():
result = async_result.get()
print(result)
# result.forget() # 将结果删除,执行完成,结果不会自动删除
# async.revoke(terminate=True) # 无论现在是什么时候,都要终止
# async.revoke(terminate=False) # 如果任务还没有开始执行呢,那么就可以终止。
elif async_result.failed():
print('执行失败')
elif async_result.status == 'PENDING':
print('任务等待中被执行')
elif async_result.status == 'RETRY':
print('任务异常后正在重试')
elif async_result.status == 'STARTED':
print('任务已经开始被执行')
Celery执行定时任务
设定时间让celery执行一个定时任务,produce_task.py:
from celery_task import send_email
from datetime import datetime
# 方式一
# v1 = datetime(2020, 3, 11, 16, 19, 00)
# print(v1)
# v2 = datetime.utcfromtimestamp(v1.timestamp())
# print(v2)
# result = send_email.apply_async(args=["egon",], eta=v2)
# print(result.id)
# 方式二
ctime = datetime.now()
# 默认用utc时间
utc_ctime = datetime.utcfromtimestamp(ctime.timestamp())
from datetime import timedelta
time_delay = timedelta(seconds=10)
task_time = utc_ctime + time_delay
# 使用apply_async并设定时间
result = send_email.apply_async(args=["egon"], eta=task_time)
print(result.id)
多任务下目录结构
from datetime import timedelta
from celery import Celery
from celery.schedules import crontab
cel = Celery('tasks', broker='redis://127.0.0.1:6379/1', backend='redis://127.0.0.1:6379/2', include=[
'celery_tasks.task01',
'celery_tasks.task02',
])
cel.conf.timezone = 'Asia/Shanghai'
cel.conf.enable_utc = False
cel.conf.beat_schedule = {
# 名字随意命名
'add-every-10-seconds': {
# 执行tasks1下的test_celery函数
'task': 'celery_tasks.task01.send_email',
# 每隔2秒执行一次
# 'schedule': 1.0,
# 'schedule': crontab(minute="*/1"),
'schedule': timedelta(seconds=6),
# 传递参数
'args': ('张三',)
},
# 'add-every-12-seconds': {
# 'task': 'celery_tasks.task01.send_email',
# 每年4月11号,8点42分执行
# 'schedule': crontab(minute=42, hour=8, day_of_month=11, month_of_year=4),
# 'args': ('张三',)
# },
}
# 启动 Beat 程序$ celery beat -A proj<br># Celery Beat进程会读取配置文件的内容,周期性的将配置中到期需要执行的任务发送给任务队列
# 之后启动 worker 进程.$ celery -A proj worker -l info 或者$ celery -B -A proj worker -l info
原创文章,作者:kirin,如若转载,请注明出处:https://blog.ytso.com/267701.html