Skip to main content

Graviti TensorBay python SDK

Project description

Graviti python SDK

Installation

pip3 install graviti

Usage

Get accessKey

AccessKey is required when upload data.

Use your username and password to login to Graviti website, and get accessKey on profile page.

Create Dataset

#!/usr/bin/env python3

from graviti import GAS

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client
gas.create_dataset(DATASET_NAME)  # create a new dataset

List Datasets

#!/usr/bin/env python3

from graviti import GAS

ACCESS_KEY = "Accesskey-****"

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client
gas.list_datasets()  # list datasets

Upload data

This sample is for uploading dataset which only contains data collected from a single sensor.

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

dataset = Dataset(DATASET_NAME)  # create local dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local segment

# Add data to segment
for filename in FILE_LIST:
    data = Data(filename)
    segment.append(Data)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Upload fusion data

This sample is for uploading dataset which contains data collected from multiple sensors.

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import FusionDataset, Frame, Data
from graviti.sensor import Camera

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestFusionDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

gas = GAS(ACCESS_KEY)

dataset = FusionDataset(DATASET_NAME)  # create local fusion dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local fusion segment

# Add sensor to segment
for sensor_name in SENSOR_LIST:
    camera = Camera(sensor_name)
    camera.set_translation(x=1.1, y=2.2, z=3.3)
    camera.set_rotation(w=1.1, x=2.2, y=3.3, z=4.4)
    camera.set_camera_matrix(fx=1.1, fy=2.2, cx=3.3, cy=4.4)
    camera.set_distortion_coefficients(p1=1.1, p2=2.2, k1=3.3, k2=4.4, k3=5.5)
    segment.add_sensor(camera)

# Add frame to segment
for frame_info in FRAME_LIST:
    frame = Frame()
    for sensor_name in SENSOR_LIST:
        data_info = frame_info[sensor_name]
        frame[sensor_name] = Data(data_info.filename, timestamp=data_info.timestamp)
    segment.append(frame)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local fusion dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Upload data with labels

This sample is for uploading dataset which contains labels

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data
from graviti.label import LabelType, LabeledBox2D

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

dataset = Dataset(DATASET_NAME)  # create local dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local segment

# Add label table
label_table = dataset.create_label_table(LabelType.BOX2D)
label_table.add_category(CATEGORY)

# Add data to segment
for filename, labels in zip(FILE_LIST, LABEL_LIST):
    data = Data(filename)
    data.register_label(LabelType.BOX2D)
    for label in labels:
        label = LabeledBox2D(
            label.box.xmin,
            label.box.ymin,
            label.box.xmax,
            label.box.ymax,
            category=label.category,
        )
        data.append_label(label)
    segment.append(Data)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Upload fusion data with labels

This sample is for uploading dataset which contains labels

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data
from graviti.label import LabelType, LabeledBox2D

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

dataset = Dataset(DATASET_NAME)  # create local dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local segment

# Add label table
label_table = dataset.create_label_table(LabelType.BOX2D)
label_table.add_category(CATEGORY)

# Add sensor to segment
for sensor_name in SENSOR_LIST:
    camera = Camera(sensor_name)
    camera.set_translation(x=1.1, y=2.2, z=3.3)
    camera.set_rotation(w=1.1, x=2.2, y=3.3, z=4.4)
    camera.set_camera_matrix(fx=1.1, fy=2.2, cx=3.3, cy=4.4)
    camera.set_distortion_coefficients(p1=1.1, p2=2.2, k1=3.3, k2=4.4, k3=5.5)
    segment.add_sensor(camera)

# Add frame to segment
for frame_info in FRAME_LIST:
    frame = Frame()
    for sensor_name in SENSOR_LIST:
        data_info = frame_info[sensor_name]

        data = Data(data_info.filename, data_info.timestamp)
        data.register_label(LabelType.BOX2D)
        for label in data_info.labels:
            label = LabeledBox2D(
                label.box.xmin,
                label.box.ymin,
                label.box.xmax,
                label.box.ymax,
                category=label.category,
            )
            data.append_label(label)

        frame[sensor_name] = data

    segment.append(frame)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Commit dataset

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data
from graviti.label import LabelType, LabeledBox2D

# code for load data into Dataset object
...
...

# upload_dataset_object will return the client of the uploaded dataset
dataset_client = gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

COMMIT_MESSAGE = "Initial commit"
TAG = "v0.0.1"

# 'commit()' method will commit your dataset and return the commited dataset client
commited_dataset_client = dataset_client.commit(COMMIT_MESSAGE, TAG)

Read data and label

#!/usr/bin/env python3

from graviti import GAS
from graviti.label import LabelType
from PIL import Image

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"

gas = GAS(ACCESS_KEY)
dataset_client = gas.get_dataset(DATASET_NAME)  # Get dataset client of your dataset
segment = gas.get_segment_object(SEGMENT_NAME)  # Get segment object of your dataset

for data in segment:
    with data.open() as file:
        img = Image.open(file)  # Read image by PIL

    for labeled_box in data[LabelType.BOX2D]:  # Get all labeled box
        box = list(labeled_box)                # Get box which format should be [xmin, ymin, xmax, ymax]
        category = labeled_box.category        # Read the category of the labeled box
        attributes = labeled_box.attributes    # Read the attributes of the labeled box

Command line

We also provide gas command to call SDK APIs.

Use gas in terminal to see the available commands as follows.

gas config
gas create
gas delete
gas publish
gas ls
gas cp
gas rm

config environment

gas config [accessKey]                   # config accesskey to default environment
gas -c [config_name] config [accessKey]  # create an environment named [config_name]
                                         # and config accesskey to it

show config

gas config         # show config information of all environments

choose environment

gas [command] [args]                           # choose default environment
gas -c [config_name] [command] [args]          # choose the environment named [config_name]
gas -k [accessKey] [command] [args]            # appoint accessKey in current command line

# '-k' has higher priority than '-c'

create dataset

gas create tb:[dataset_name]

delete dataset

gas delete tb:[dataset_name]

publish dataset

gas publish tb:[dataset_name]

list data

gas ls [Options] [tbrn]

Options:
  -a, --all      List all files under all segments. Only works when [tbrn] is tb:[dataset_name].

tbrn:
  None                                              # list all dataset names
  tb:[dataset_name]                                 # list all segment names under the dataset
  tb:[dataset_name]:[segment_name]                  # list all files under the segment
  tb:[dataset_name]:[segment_name]://[remote_path]  # list files under the remote path

upload data

gas cp [Options] [local_path1] [local_path2]... [tbrn]

Options:
  -r, --recursive     Copy directories recursively.
  -j, --jobs INTEGER  The number of threads.

tbrn:
  tb:[dataset_name]:[segment_name]                  # copy files to the segment
  tb:[dataset_name]:[segment_name]://[remote_path]  # copy files to the remote path

# [segment_name] is must required.
# If only upload one file and [remote_path] doesn't end with '/',
# then the file will be uploaded and renamed as [remote_path]

delete data

gas rm [Options] [tbrn]

Options:
  -r, --recursive  Remove directories recursively.
  -f, --force      Delete segments forcibly regardless of the nature of the dataset.
                   By default, only segments with no sensor can be deleted.
                   Once '-f' is used, sensors along with their objects will also be deleted.

tbrn:
  tb:[dataset_name]                                 # remove all segments under the dataset
  tb:[dataset_name]:[segment_name]                  # remove a segment
  tb:[dataset_name]:[segment_name]://[remote_path]  # remove files under the remote path

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

graviti-0.3.2.tar.gz (63.5 kB view details)

Uploaded Source

Built Distribution

graviti-0.3.2-py3-none-any.whl (84.8 kB view details)

Uploaded Python 3

File details

Details for the file graviti-0.3.2.tar.gz.

File metadata

  • Download URL: graviti-0.3.2.tar.gz
  • Upload date:
  • Size: 63.5 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/3.2.0 pkginfo/1.5.0.1 requests/2.24.0 setuptools/41.2.0 requests-toolbelt/0.9.1 tqdm/4.48.0 CPython/3.8.2

File hashes

Hashes for graviti-0.3.2.tar.gz
Algorithm Hash digest
SHA256 7bf547beea0f8a2ab0199c8b3fea4dc5ad86ebb7e0aa5e8f4de31cee8fe07e37
MD5 e7b4f6bd991ea435923062ddfdcfb16f
BLAKE2b-256 5b420c5b0a6d1459a436a89ed64d87255f2a2dcd7b8e2d38bc93daef0b1c5675

See more details on using hashes here.

File details

Details for the file graviti-0.3.2-py3-none-any.whl.

File metadata

  • Download URL: graviti-0.3.2-py3-none-any.whl
  • Upload date:
  • Size: 84.8 kB
  • Tags: Python 3
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/3.2.0 pkginfo/1.5.0.1 requests/2.24.0 setuptools/41.2.0 requests-toolbelt/0.9.1 tqdm/4.48.0 CPython/3.8.2

File hashes

Hashes for graviti-0.3.2-py3-none-any.whl
Algorithm Hash digest
SHA256 ccdbeb30b767185119db2abb56e691a148074d3cca429a001ac20f9ba0805e62
MD5 c483be936b2b4b1ae850f9c11b8c89db
BLAKE2b-256 be67066f42f98a0dd22427f8d7482f5d1a588ce65bdef3822a5e1126f2d80c2d

See more details on using hashes here.

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page