Skip to main content

Python client for the Arctic Shift API.

Project description

Arctic Shift API Client Reference

This page gives practical examples for every public function in arcshiftwrap/arctic_shift.py.

  • API Base: https://arctic-shift.photon-reddit.com
  • Install: pip install arcshiftwrap

Setup

from datetime import datetime, timezone

from arcshiftwrap import (
	ArcticShiftClient,
	collect_posts_by_subreddits_parallel,
	collect_comments_by_subreddits_parallel,
)
from arcshiftwrap.arctic_shift import (
	collect_comments_by_windows,
	collect_posts_by_windows,
	deduplicate_items,
	format_date,
	normalize_response,
	split_time_range,
	utc_now,
)

client = ArcticShiftClient(timeout=90, max_retries=4, backoff_factor=2.0)

request

Low-level method to send a GET request to the Arctic Shift API.

response = client.request(
        endpoint="posts/search",
        params={"subreddit": "technology", "limit": 10}
)

ID Lookup Methods

get_posts_by_ids

posts = client.get_posts_by_ids(
	ids=["1abcde", "1fghij"],
	fields=["id", "subreddit", "title", "created_utc"],
)

get_comments_by_ids

comments = client.get_comments_by_ids(
	ids=["k12345", "k67890"],
	fields=["id", "subreddit", "body", "created_utc"],
)

get_subreddits_by_ids

subreddits = client.get_subreddits_by_ids(
	ids=["technology", "MachineLearning"],
	fields=["display_name", "subscribers", "over18"],
)

get_users_by_ids

users = client.get_users_by_ids(
	ids=["spez", "AutoModerator"],
	fields=["author", "total_karma", "created_utc"],
)

Search Methods

search_posts

posts = client.search_posts(
	subreddit="technology",
	after="2026-04-01",
	before="2026-04-08",
	limit=100,
	sort="asc",
	query="llm",
	fields=["id", "title", "selftext", "score", "num_comments"],
)

search_comments

comments = client.search_comments(
	subreddit="technology",
	after="2026-04-01",
	before="2026-04-08",
	limit=100,
	sort="asc",
	body="chatgpt",
	fields=["id", "body", "score", "link_id", "parent_id"],
)

get_comment_tree

tree = client.get_comment_tree(
	link_id="t3_1abcde",
	limit=500,
	start_breadth=4,
	start_depth=4,
)

Aggregation Methods

aggregate_posts

post_agg = client.aggregate_posts(
	aggregate="subreddit",
	frequency="day",
	after="2026-04-01",
	before="2026-04-08",
	min_count=5,
)

aggregate_comments

comment_agg = client.aggregate_comments(
	aggregate="author",
	frequency="day",
	subreddit="technology",
	after="2026-04-01",
	before="2026-04-08",
)

Subreddit Methods

search_subreddits

subs = client.search_subreddits(
	subreddit_prefix="tech",
	min_subscribers=50000,
	limit=25,
	sort="desc",
	sort_type="subscribers",
	fields=["display_name", "subscribers", "over18"],
)

get_subreddit_rules

rules = client.get_subreddit_rules(subreddits=["technology", "MachineLearning"])

get_subreddit_wikis

wikis = client.get_subreddit_wikis(
	subreddit="technology",
	paths=["index", "faq"],
	limit=10,
)

list_subreddit_wikis

wiki_paths = client.list_subreddit_wikis(subreddit="technology")

User Methods

search_users

users = client.search_users(
	author_prefix="sam",
	min_karma=1000,
	limit=25,
	sort="desc",
	sort_type="total_karma",
)

user_user_interactions

interactions = client.user_user_interactions(
	author="spez",
	subreddit="technology",
	after="2026-04-01",
	before="2026-04-08",
	min_count=2,
	limit=100,
)

user_subreddit_interactions

subreddit_interactions = client.user_subreddit_interactions(
	author="spez",
	weight_posts=1.0,
	weight_comments=1.0,
	after="2026-04-01",
	before="2026-04-08",
	min_count=1,
	limit=100,
)

aggregate_flairs

flairs = client.aggregate_flairs(author="spez")

Utility Endpoints

resolve_short_links

resolved = client.resolve_short_links(paths=["3g1jfiw", "3h2kxyz"])

time_series

series = client.time_series(
	key="posts",
	precision="day",
	after="2026-04-01",
	before="2026-04-08",
)

Helper Functions

utc_now

now_utc = utc_now()

format_date

dt_str = format_date(datetime(2026, 4, 1, 12, 0, 0, tzinfo=timezone.utc))
# "2026-04-01T12:00:00Z"

split_time_range

start = datetime(2026, 4, 1, tzinfo=timezone.utc)
end = datetime(2026, 4, 3, tzinfo=timezone.utc)
windows = split_time_range(start=start, end=end, step_hours=12)

normalize_response

items = normalize_response({"data": [{"id": "1"}, {"id": "2"}]})

deduplicate_items

deduped = deduplicate_items(
	items=[{"id": "a"}, {"id": "a"}, {"id": "b"}],
	id_field="id",
)

collect_posts_by_windows

start = datetime(2026, 4, 1, tzinfo=timezone.utc)
end = datetime(2026, 4, 8, tzinfo=timezone.utc)

posts = collect_posts_by_windows(
	client=client,
	subreddit="technology",
	start=start,
	end=end,
	step_hours=24,
	limit=100,
	fields=["id", "title", "created_utc"],
)

collect_comments_by_windows

start = datetime(2026, 4, 1, tzinfo=timezone.utc)
end = datetime(2026, 4, 8, tzinfo=timezone.utc)

comments = collect_comments_by_windows(
	client=client,
	subreddit="technology",
	start=start,
	end=end,
	step_hours=24,
	limit=100,
	fields=["id", "body", "created_utc"],
)

collect_posts_by_subreddits_parallel

Collect posts from multiple subreddits concurrently using ProcessPoolExecutor. Significantly faster than sequential collection.

from arcshiftwrap import collect_posts_by_subreddits_parallel

start = datetime(2026, 4, 1, tzinfo=timezone.utc)
end = datetime(2026, 4, 8, tzinfo=timezone.utc)

subreddits = ["technology", "MachineLearning", "chatgpt", "OpenAI"]

posts = collect_posts_by_subreddits_parallel(
	client=client,
	subreddits=subreddits,
	start=start,
	end=end,
	step_hours=24,
	limit=100,
	max_workers=4,  # number of parallel processes
	fields=["id", "title", "subreddit", "created_utc"],
)

collect_comments_by_subreddits_parallel

Collect comments from multiple subreddits concurrently using ProcessPoolExecutor. Significantly faster than sequential collection.

from arcshiftwrap import collect_comments_by_subreddits_parallel

start = datetime(2026, 4, 1, tzinfo=timezone.utc)
end = datetime(2026, 4, 8, tzinfo=timezone.utc)

subreddits = ["technology", "MachineLearning", "chatgpt", "OpenAI"]

comments = collect_comments_by_subreddits_parallel(
	client=client,
	subreddits=subreddits,
	start=start,
	end=end,
	step_hours=24,
	limit=100,
	max_workers=4,  # number of parallel processes
	fields=["id", "body", "subreddit", "created_utc"],
)

Notes

  • These examples reflect the current public API in arcshiftwrap/arctic_shift.py.
  • Internal helper methods like _join, _bool, _clean, _parse_response, and _rate_limit_wait are intentionally not used directly.

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

arcshiftwrap-0.4.0.tar.gz (11.0 kB view details)

Uploaded Source

Built Distribution

If you're not sure about the file name format, learn more about wheel file names.

arcshiftwrap-0.4.0-py3-none-any.whl (9.1 kB view details)

Uploaded Python 3

File details

Details for the file arcshiftwrap-0.4.0.tar.gz.

File metadata

  • Download URL: arcshiftwrap-0.4.0.tar.gz
  • Upload date:
  • Size: 11.0 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/6.2.0 CPython/3.9.6

File hashes

Hashes for arcshiftwrap-0.4.0.tar.gz
Algorithm Hash digest
SHA256 ca52725c703e10f8025f71a079d9e9ec4df57225d0806aa200eabb8915e12c55
MD5 83c9c06aa05a3626d22c627047a09781
BLAKE2b-256 57de59956f17d710f06536311284597ddb898f825fa73a944124e46a758b8730

See more details on using hashes here.

File details

Details for the file arcshiftwrap-0.4.0-py3-none-any.whl.

File metadata

  • Download URL: arcshiftwrap-0.4.0-py3-none-any.whl
  • Upload date:
  • Size: 9.1 kB
  • Tags: Python 3
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/6.2.0 CPython/3.9.6

File hashes

Hashes for arcshiftwrap-0.4.0-py3-none-any.whl
Algorithm Hash digest
SHA256 b92d53b77f334543433da4914f6a2884829b7c5450ebf39d92a92a9fda1be5d2
MD5 ff46a04661132242d8c110c6224aa3b9
BLAKE2b-256 b68ad0534af46997c834b5e5e0a246a0193f2d01983173cd9d1239dd3feb0450

See more details on using hashes here.

Supported by

AWS Cloud computing and Security Sponsor Datadog Monitoring Depot Continuous Integration Fastly CDN Google Download Analytics Pingdom Monitoring Sentry Error logging StatusPage Status page