Skip to main content

Tools for Python

Project description

https://img.shields.io/badge/license-MIT-green

CrawlerCodePythonTools - Documentation

Installation

pip install CrawlerCodePythonTools

Config

from pythontools.core import config

cfg = config.Config(path="", default_config={"config_data_1": "value_123"})
cfgData = cfg.getConfig()

# get config value
print(cfgData["config_data_1"])

# chang value and save config
cfgData["config_data_1"] = "value_456"
cfg.saveConfig()

Logger

from pythontools.core import logger

# path to logs directory (optional)
logger.init_log("logs")

# set time format (optional)
logger.set_time_format(date=True, time=True)

# print color test
logger.log("§1Blue §9Light-Blue §3Cyan §bLight-Cyan §4Red §cLight-Red §6Yellow §eLight-Yellow §2Green §aLight-Green §5Magenta §dLight-Magenta §fWhite §7Light-Gray §8Gray §0Black")

CronJob

from pythontools.core import cronjob, logger

Interval-based CronJob

def cron_interval_based():
    logger.log("Interval-based CronJob done.")

# register CronJob
cronjob.register_cron_job(cronjob.CronJob("cron_interval_based", 10, cron_interval_based))

Time-based CronJob

def cron_time_based():
    logger.log("Time-based CronJob done.")

# register CronJob
#cronjob.register_cron_job(cronjob.CronJob("cron_time_based", "20", cron_time_based))  # 20 => 20h
cronjob.register_cron_job(cronjob.CronJob("cron_time_based", "20:10", cron_time_based))  # 20:10 => 20h 10m
#cronjob.register_cron_job(cronjob.CronJob("cron_time_based", "20:10:30", cron_time_based))  # 20:10:30 => 20h 10m 30s

Weekday-based CronJob

def cron_weekday_based():
    logger.log("today is friday!")
    logger.log("Weekday-based CronJob done.")

# register CronJob
cronjob.register_cron_job(cronjob.CronJob("cron_weekday_based", "friday", cron_weekday_based))

Server and Client (sockets)

Server

from pythontools.sockets import server
from threading import Thread

SERVER = server.Server(password="PASSWORD")

def ON_CLIENT_CONNECT(client):
    # send a message to client on connect by clientSocket
    SERVER.sendTo(client["clientSocket"], {"METHOD": "HELLO"})

def ON_CLIENT_DISCONNECT(client):
    pass

def ON_RECEIVE(client, data):
    METHOD = data["METHOD"]

SERVER.ON_CLIENT_CONNECT(ON_CLIENT_CONNECT)
SERVER.ON_CLIENT_DISCONNECT(ON_CLIENT_DISCONNECT)
SERVER.ON_RECEIVE(ON_RECEIVE)

Thread(target=SERVER.start, args=["HOST-IP", 15749]).start()

# send a message to client by clientID
SERVER.sendToClient("MY_CLIENT_ID", {"METHOD": "TEST", "mydata": "123"})

Client

from pythontools.sockets import client
from threading import Thread

CLIENT = client.Client(password="PASSWORD", clientID="MY_CLIENT_ID", clientType="CLIENT")

def ON_CONNECT():
    pass

def ON_RECEIVE(data):
    METHOD = data["METHOD"]
    # recipe the test message
    if METHOD == "TEST":
        print("test:", data["mydata"])

CLIENT.ON_CONNECT(ON_CONNECT)
CLIENT.ON_RECEIVE(ON_RECEIVE)

Thread(target=CLIENT.connect, args=["HOST-IP", 15749]).start()

WebBot

Download chromedriver or geckodriver
from pythontools.webbot import webbot

# Google Chrome
browser = webbot.WebBot().Chrome(chromedriver="chromedriver.exe")
# Firefox
browser = webbot.WebBot().Firefox(geckodriver="geckodriver.exe")
browser.get("https://www.google.com/")

browser.input('//*[@id="tsf"]/div[2]/div[1]/div[1]/div/div[2]/input', "what is python?")
browser.click('//*[@id="tsf"]/div[2]/div[1]/div[2]/div[2]/div[2]/center/input[1]')

time.sleep(10)

browser.close()

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

CrawlerCodePythonTools-1.5.4.tar.gz (383.1 kB view details)

Uploaded Source

File details

Details for the file CrawlerCodePythonTools-1.5.4.tar.gz.

File metadata

File hashes

Hashes for CrawlerCodePythonTools-1.5.4.tar.gz
Algorithm Hash digest
SHA256 1e55f7077c29ab5a76a3ae0cbf7b925099b9743114eef582913f56a47cab35a4
MD5 b48d4a36a0e7727499b2352e67d9b91b
BLAKE2b-256 17761d0367c26631de0e57b0973c1851936e0692b85d84576c25f06cacd9c269

See more details on using hashes here.

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page