Skip to main content

a wrapper around osrs api's

Project description

The project

The goal is to make a wrapper around the various oldschool runescape api's.

osrs hiscores

import asyncio

from aiohttp import ClientSession

from osrs.asyncio import Hiscore, HSMode
from osrs.utils import RateLimiter
from osrs.exceptions import PlayerDoesNotExist


async def main():
    # 100 calls per minute
    limiter = RateLimiter(calls_per_interval=100, interval=60)
    hiscore_instance = Hiscore(proxy="", rate_limiter=limiter)
    
    async with ClientSession() as session:
        player_stats = await hiscore_instance.get(
            mode=HSMode.OLDSCHOOL,
            player="extreme4all",
            session=session,
        )
        print(player_stats)

    # if you do not provide a session we'll make one for you, this session will not be reused
    # for multiple requests we advice doing that within one session like the example above
    player_stats = await hiscore_instance.get(
        mode=HSMode.OLDSCHOOL,
        player="extreme4all",
    )
    print(player_stats)
# Run the asynchronous main function
if __name__ == "__main__":
    asyncio.run(main())

osrs itemdb (Catalogue & Grand Exchange)

import asyncio

from aiohttp import ClientSession

from osrs.asyncio import ItemDBMode, Catalogue, Graph
from osrs.utils import RateLimiter

async def main():
    # Initialize the Catalogue with optional proxy and rate limiter
    limiter = RateLimiter(calls_per_interval=100, interval=60)
    catalogue_instance = Catalogue(proxy="", rate_limiter=limiter)
    graph_instance = Graph(proxy="", rate_limiter=limiter)

    async with ClientSession() as session:
        # Example 1: Fetching items by alphabetical filter
        alpha = "A"  # Items starting with "A"
        page = 1     # First page of results
        category = 1 # Category identifier, for OSRS there is only 1 category
        items = await catalogue_instance.get_items(
            session, 
            alpha=alpha, 
            page=page, 
            mode=ItemDBMode.OLDSCHOOL, 
            category=category
        )
        print("Fetched Items:", items)

        # Example 2: Fetching detailed information for a specific item
        item_id = 4151  # Example item ID (Abyssal whip in OSRS)
        item_detail = await catalogue_instance.get_detail(
            session, 
            item_id=item_id, 
            mode=ItemDBMode.OLDSCHOOL
        )
        print("Item Detail:", item_detail)

        # Example 3: Fetching historical trade data (price graph) for a specific item
        item_id = 4151  # Example item ID (Abyssal whip in OSRS)
        trade_history = await graph_instance.get_graph(
            session, 
            item_id=item_id, 
            mode=ItemDBMode.OLDSCHOOL
        )
        print("Trade History:", trade_history)

# Run the asynchronous main function
if __name__ == "__main__":
    asyncio.run(main())

wiki prices

the wiki via runelite collects item price, which they expose via an api.

import asyncio

from aiohttp import ClientSession

from osrs.asyncio import WikiPrices, Interval
from osrs.utils import RateLimiter

async def main():
    limiter = RateLimiter(calls_per_interval=100, interval=60)
    prices_instance = WikiPrices(user_agent="Your User Agent", rate_limiter=limiter)

    async with ClientSession() as session:
        # Fetch item mappings
        mappings = await prices_instance.get_mapping(
            session=session
        )
        print("Item Mappings:", mappings)

        # Fetch latest prices
        latest_prices = await prices_instance.get_latest_prices(
            session=session
        )
        print("Latest Prices:", latest_prices)

        # Fetch average prices
        average_prices = await prices_instance.get_average_prices(
            session=session, 
            interval=Interval.FIVE_MIN
        )

        print("Average Prices:", average_prices)

        # Fetch time series data
        item_id = 4151  # Example item ID (Abyssal whip in OSRS)
        time_series = await prices_instance.get_time_series(
            session=session, 
            item_id=item_id, 
            timestep=Interval.ONE_HOUR
        )
        print("Time Series Data:", time_series)

# Run the asynchronous main function
if __name__ == "__main__":
    asyncio.run(main())

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

osrs-0.0.14.tar.gz (10.6 kB view details)

Uploaded Source

Built Distribution

osrs-0.0.14-py3-none-any.whl (10.9 kB view details)

Uploaded Python 3

File details

Details for the file osrs-0.0.14.tar.gz.

File metadata

  • Download URL: osrs-0.0.14.tar.gz
  • Upload date:
  • Size: 10.6 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? Yes
  • Uploaded via: twine/5.1.1 CPython/3.12.7

File hashes

Hashes for osrs-0.0.14.tar.gz
Algorithm Hash digest
SHA256 360f10aa6565f15497b25586c2edcd8fc8b81fe8cc2d3ad9ea316983c1c3c37b
MD5 d68784d8115e3fdd4fe8286320772a34
BLAKE2b-256 b50fef387da4bad6831a3f2621a678add070db97bd4b564c7fe8ad9f9db7b006

See more details on using hashes here.

Provenance

The following attestation bundles were made for osrs-0.0.14.tar.gz:

Publisher: python-package.yml on Bot-detector/osrs

Attestations:

File details

Details for the file osrs-0.0.14-py3-none-any.whl.

File metadata

  • Download URL: osrs-0.0.14-py3-none-any.whl
  • Upload date:
  • Size: 10.9 kB
  • Tags: Python 3
  • Uploaded using Trusted Publishing? Yes
  • Uploaded via: twine/5.1.1 CPython/3.12.7

File hashes

Hashes for osrs-0.0.14-py3-none-any.whl
Algorithm Hash digest
SHA256 f66aa99cc08203208f493ece768f7105ab777f671f580f77121c34a529794b55
MD5 7b7d38a8ec0e0270d0e2a7112e844d1a
BLAKE2b-256 87303c97a69ee66e67313f410df6626bf929344e46a150437c0cca6ab060b657

See more details on using hashes here.

Provenance

The following attestation bundles were made for osrs-0.0.14-py3-none-any.whl:

Publisher: python-package.yml on Bot-detector/osrs

Attestations:

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page