Skip to main content

wrapper for yandex webmaster api

Project description

wrapper for yandex webmaster api

Install

Install using pip...

pip install yandex-webmaster-api

Usage

=======

from yandex_webmaster import YandexWebmaster
client = YandexWebmaster('<access_token>')

get hosts

hosts = client.get_hosts()

get popular search queries

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_popular_search_queries('<host_id>', date_from, date_to, query_indicator=['TOTAL_SHOWS'])

get search query all history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_search_query_all_history('<host_id>', date_from, date_to, query_indicator=['TOTAL_SHOWS'], device_type_indicator='DESKTOP')

get single search query history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_single_search_query_history('<host_id>', '<query_id>', date_from, date_to, query_indicator=['TOTAL_SHOWS'], device_type_indicator='DESKTOP')

get list query analytics

result = client.get_list_query_analytics('<host_id>', "ALL", limit=500, offset=500)

get host info

result = client.get_host('<host_id>')

get sqi history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_sqi_history('<host_id>', '<query_id>', date_from, date_to)

add host

result = client.add_host(host_url='<host_url>')

delete host

result = client.delete_host(host_id='<host_id>')

get sitemaps

result = client.get_sitemaps(host_id='<host_id>')

get_sitemap

result = client.get_sitemap(host_id='<host_id>', sitemap_id='<sitemap_id>')

add sitemap

result = client.add_sitemap(host_id='<host_id>', host_url='<host_url>')

delete sitemap

result = client.delete_sitemap(host_id='<host_id>', sitemap_id='<sitemap_id>')

get indexing stats

result = client.get_indexing_stats(host_id='<host_id>')

get indexing history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_indexing_history(host_id='<host_id>', date_from=date_from, date_to=date_to)

get indexing samples

result = client.get_indexing_samples(host_id='<host_id>')

get monitoring important urls

result = client.get_monitoring_important_urls(host_id='<host_id>')

get important url history

result = client.get_important_url_history(host_id='<host_id>', url='<url>')

get insearch url history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_important_url_history(host_id='<host_id>', date_from=date_from, date_to=date_to)

get insearch url samples

result = client.get_insearch_url_samples(host_id='<host_id>', limit=limit, offset=offset)

get insearch url events history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_insearch_url_events_history(host_id='<host_id>', date_from=date_from, date_to=date_to)

get insearch url events samples

result = client.get_insearch_url_events_samples(host_id='<host_id>', limit=limit, offset=offset)

recrawl url

result = client.recrawl_url(host_id='<host_id>', url='<recrawl_url>')

get recrawl task

result = client.get_recrawl_task(host_id='<host_id>', task_id='<task_id>')

get recrawl tasks

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_recrawl_tasks(
    host_id='<host_id>',
    date_from=date_from,
    date_to=date_to,
    limit=10,
    offset=10
)

get recrawl quota

result = client.get_recrawl_quota(host_id='<host_id>')

diagnostic site

result = client.diagnostic_site(host_id='<host_id>')

get broken internal links samples

result = client.get_broken_internal_links_samples(host_id='<host_id>', indicator='SITE_ERROR')

get broken internal links history

from datetime import datetime, timedelta
date_from = datetime.now() - timedelta(days=4)
date_to = datetime.now()
result = client.get_broken_internal_links_samples(host_id='<host_id>', date_from=date_from, date_to=date_to)

get external links samples

result = client.get_external_links_samples(host_id='<host_id>')

get external links history

result = client.get_external_links_history(host_id='<host_id>')

CHANGELOG

0.0.3 - change query_indicator params to list[str] 0.0.2 - add get_list_query_analytics method

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

yandex-webmaster-api-0.0.3.tar.gz (8.7 kB view details)

Uploaded Source

File details

Details for the file yandex-webmaster-api-0.0.3.tar.gz.

File metadata

  • Download URL: yandex-webmaster-api-0.0.3.tar.gz
  • Upload date:
  • Size: 8.7 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/5.0.0 CPython/3.8.10

File hashes

Hashes for yandex-webmaster-api-0.0.3.tar.gz
Algorithm Hash digest
SHA256 3e1899e254e0030d1c1d7b9db24cce2a8a8fce50bce907e14416d6cf41a4e5ee
MD5 8fedd203a1f596c75d26120eeee74323
BLAKE2b-256 ee7646f63b151ce2515340793ae981075b1424e322129c353dd07839e962c479

See more details on using hashes here.

Supported by

AWS Cloud computing and Security Sponsor Datadog Monitoring Depot Continuous Integration Fastly CDN Google Download Analytics Pingdom Monitoring Sentry Error logging StatusPage Status page