删除多余代码
Project description
fastapi_crawler_scheduler
使用
from fastapi import FastAPI
from fastapi_crawler_scheduler import TaskScheduler
app = FastAPI()
task_scheduler = TaskScheduler(
app=app,
ssl=False,
project_name="project_name",
server_name="server_name",
redis_username='redis_username',
redis_password='redis_password',
redis_host="127.0.0.1",
redis_port=6379,
thread_pool_size=50,
)
添加|更新任务 - add_task
interval类型
def add_spider(**crawler_info):
print(f"add_spider = {crawler_info}")
print("add_spider")
trigger = 'interval'
crawler_info = {
"topic": "interval insert_task",
"title_handler_name": "interval insert_task",
"seconds": 4,
}
job_id = 'job_1'
task_scheduler.add_task(
func=add_spider,
job_id=job_id,
trigger=trigger,
crawler_info=crawler_info,
seconds=4
)
date类型
def add_spider(**crawler_info):
print(f"add_spider = {crawler_info}")
print("add_spider")
trigger = 'date'
crawler_info = {
"topic": "date insert_task",
"title_handler_name": "date insert_task",
"run_date": "2022-10-03 11:30:00",
}
job_id = 'job_1'
run_date = '2022-10-03 11:30:00'
task_scheduler.add_task(
func=add_spider,
job_id=job_id,
trigger=trigger,
crawler_info=crawler_info,
run_date=run_date,
)
cron类型
def add_spider(**crawler_info):
print(f"add_spider = {crawler_info}")
print("add_spider")
job_id = 'job_1'
trigger = 'cron'
minute = '*/2'
crawler_info = {
"topic": "cron update_task",
"title_handler_name": "cron update_task",
"minute": minute,
}
task_scheduler.add_task(
func=add_spider,
job_id=job_id,
trigger=trigger,
crawler_info=crawler_info,
minute=minute,
)
删除任务 - delete_task
job_id = 'job_1'
task_scheduler.delete_task(job_id=job_id)
安装
Pypi
$ pip install fastapi-crawler-scheduler
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
Built Distribution
File details
Details for the file fastapi_crawler_scheduler-2.1.5.tar.gz
.
File metadata
- Download URL: fastapi_crawler_scheduler-2.1.5.tar.gz
- Upload date:
- Size: 7.6 kB
- Tags: Source
- Uploaded using Trusted Publishing? No
- Uploaded via: twine/5.1.1 CPython/3.11.5
File hashes
Algorithm | Hash digest | |
---|---|---|
SHA256 | bb672d1865bb85e6bdbc854fcb01b5210c70170832a8649928e8b3b372a1d6d3 |
|
MD5 | ce494bed2ea0396a2655ac0311689998 |
|
BLAKE2b-256 | f1b342d2d1ffefe366664853194d7ba4ab18e5cd0fb2b697cf05e1698d72c69e |
File details
Details for the file fastapi_crawler_scheduler-2.1.5-py3-none-any.whl
.
File metadata
- Download URL: fastapi_crawler_scheduler-2.1.5-py3-none-any.whl
- Upload date:
- Size: 9.4 kB
- Tags: Python 3
- Uploaded using Trusted Publishing? No
- Uploaded via: twine/5.1.1 CPython/3.11.5
File hashes
Algorithm | Hash digest | |
---|---|---|
SHA256 | 57a8dd5467a974363044f3dc645d77b10df5b517196b268f6d746cc0b3f46f8a |
|
MD5 | 420a1255cbf84afb98678103e316845d |
|
BLAKE2b-256 | 7235a7b59e3c104b17780f9ea82fad93d2265b8370beee2967891dfaa740df84 |