Skip to main content

Graviti TensorBay python SDK

Project description

Graviti python SDK

Installation

pip3 install graviti

Usage

Get accessKey

AccessKey is required when upload data.

Use your username and password to login to Graviti website, and get accessKey on profile page.

Create Dataset

#!/usr/bin/env python3

from graviti import GAS

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client
gas.create_dataset(DATASET_NAME)  # create a new dataset

List Datasets

#!/usr/bin/env python3

from graviti import GAS

ACCESS_KEY = "Accesskey-****"

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client
gas.list_datasets()  # list datasets

Upload data

This sample is for uploading dataset which only contains data collected from a single sensor.

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

dataset = Dataset(DATASET_NAME)  # create local dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local segment

# Add data to segment
for filename in FILE_LIST:
    data = Data(filename)
    segment.append(Data)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Upload fusion data

This sample is for uploading dataset which contains data collected from multiple sensors.

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import FusionDataset, Frame, Data
from graviti.sensor import Camera

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestFusionDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

gas = GAS(ACCESS_KEY)

dataset = FusionDataset(DATASET_NAME)  # create local fusion dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local fusion segment

# Add sensor to segment
for sensor_name in SENSOR_LIST:
    camera = Camera(sensor_name)
    camera.set_translation(x=1.1, y=2.2, z=3.3)
    camera.set_rotation(w=1.1, x=2.2, y=3.3, z=4.4)
    camera.set_camera_matrix(fx=1.1, fy=2.2, cx=3.3, cy=4.4)
    camera.set_distortion_coefficients(p1=1.1, p2=2.2, k1=3.3, k2=4.4, k3=5.5)
    segment.add_sensor(camera)

# Add frame to segment
for frame_info in FRAME_LIST:
    frame = Frame()
    for sensor_name in SENSOR_LIST:
        data_info = frame_info[sensor_name]
        frame[sensor_name] = Data(data_info.filename, timestamp=data_info.timestamp)
    segment.append(frame)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local fusion dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Upload data with labels

This sample is for uploading dataset which contains labels

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data
from graviti.label import LabelType, LabeledBox2D

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

dataset = Dataset(DATASET_NAME)  # create local dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local segment

# Add label table
label_table = dataset.create_label_table(LabelType.BOX2D)
label_table.add_category(CATEGORY)

# Add data to segment
for filename, labels in zip(FILE_LIST, LABEL_LIST):
    data = Data(filename)
    data.register_label(LabelType.BOX2D)
    for label in labels:
        label = LabeledBox2D(
            label.box.xmin,
            label.box.ymin,
            label.box.xmax,
            label.box.ymax,
            category=label.category,
        )
        data.append_label(label)
    segment.append(Data)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Upload fusion data with labels

This sample is for uploading dataset which contains labels

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data
from graviti.label import LabelType, LabeledBox2D

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"
THREAD_NUMBER = 8

dataset = Dataset(DATASET_NAME)  # create local dataset
segment = dataset.create_segment(SEGMENT_NAME)  # create local segment

# Add label table
label_table = dataset.create_label_table(LabelType.BOX2D)
label_table.add_category(CATEGORY)

# Add sensor to segment
for sensor_name in SENSOR_LIST:
    camera = Camera(sensor_name)
    camera.set_translation(x=1.1, y=2.2, z=3.3)
    camera.set_rotation(w=1.1, x=2.2, y=3.3, z=4.4)
    camera.set_camera_matrix(fx=1.1, fy=2.2, cx=3.3, cy=4.4)
    camera.set_distortion_coefficients(p1=1.1, p2=2.2, k1=3.3, k2=4.4, k3=5.5)
    segment.add_sensor(camera)

# Add frame to segment
for frame_info in FRAME_LIST:
    frame = Frame()
    for sensor_name in SENSOR_LIST:
        data_info = frame_info[sensor_name]

        data = Data(data_info.filename, data_info.timestamp)
        data.register_label(LabelType.BOX2D)
        for label in data_info.labels:
            label = LabeledBox2D(
                label.box.xmin,
                label.box.ymin,
                label.box.xmax,
                label.box.ymax,
                category=label.category,
            )
            data.append_label(label)

        frame[sensor_name] = data

    segment.append(frame)

gas = GAS(ACCESS_KEY)  # register the accesskey to gas client

# Upload local dataset to tensorbay
# Argument 'jobs' for multi-thread uploading
# Set 'skip_uploaded_files' to 'True' will skip the uploaded files
gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

Commit dataset

#!/usr/bin/env python3

from graviti import GAS
from graviti.dataset import Dataset, Data
from graviti.label import LabelType, LabeledBox2D

# code for load data into Dataset object
...
...

# upload_dataset_object will return the client of the uploaded dataset
dataset_client = gas.upload_dataset_object(dataset, jobs=THREAD_NUMBER, skip_uploaded_files=True)

COMMIT_MESSAGE = "Initial commit"
TAG = "v0.0.1"

# 'commit()' method will commit your dataset and return the commited dataset client
commited_dataset_client = dataset_client.commit(COMMIT_MESSAGE, TAG)

Read data and label

#!/usr/bin/env python3

from graviti import GAS
from graviti.label import LabelType
from PIL import Image

ACCESS_KEY = "Accesskey-****"
DATASET_NAME = "TestDataset"
SEGMENT_NAME = "TestSegment"

gas = GAS(ACCESS_KEY)
dataset_client = gas.get_dataset(DATASET_NAME)  # Get dataset client of your dataset
segment = gas.get_segment_object(SEGMENT_NAME)  # Get segment object of your dataset

for data in segment:
    with data.open() as file:
        img = Image.open(file)  # Read image by PIL

    for labeled_box in data[LabelType.BOX2D]:  # Get all labeled box
        box = list(labeled_box)                # Get box which format should be [xmin, ymin, xmax, ymax]
        category = labeled_box.category        # Read the category of the labeled box
        attributes = labeled_box.attributes    # Read the attributes of the labeled box

Command line

We also provide gas command to call SDK APIs.

Use gas in terminal to see the available commands as follows.

gas config
gas create
gas delete
gas publish
gas ls
gas cp
gas rm

config environment

gas config [accessKey]                   # config accesskey to default environment
gas -c [config_name] config [accessKey]  # create an environment named [config_name]
                                         # and config accesskey to it

show config

gas config         # show config information of all environments

choose environment

gas [command] [args]                           # choose default environment
gas -c [config_name] [command] [args]          # choose the environment named [config_name]
gas -k [accessKey] [command] [args]            # appoint accessKey in current command line

# '-k' has higher priority than '-c'

create dataset

gas create tb:[dataset_name]

delete dataset

gas delete tb:[dataset_name]

publish dataset

gas publish tb:[dataset_name]

list data

gas ls [Options] [tbrn]

Options:
  -a, --all      List all files under all segments. Only works when [tbrn] is tb:[dataset_name].

tbrn:
  None                                              # list all dataset names
  tb:[dataset_name]                                 # list all segment names under the dataset
  tb:[dataset_name]:[segment_name]                  # list all files under the segment
  tb:[dataset_name]:[segment_name]://[remote_path]  # list files under the remote path

upload data

gas cp [Options] [local_path1] [local_path2]... [tbrn]

Options:
  -r, --recursive     Copy directories recursively.
  -j, --jobs INTEGER  The number of threads.

tbrn:
  tb:[dataset_name]:[segment_name]                  # copy files to the segment
  tb:[dataset_name]:[segment_name]://[remote_path]  # copy files to the remote path

# [segment_name] is must required.
# If only upload one file and [remote_path] doesn't end with '/',
# then the file will be uploaded and renamed as [remote_path]

delete data

gas rm [Options] [tbrn]

Options:
  -r, --recursive  Remove directories recursively.
  -f, --force      Delete segments forcibly regardless of the nature of the dataset.
                   By default, only segments with no sensor can be deleted.
                   Once '-f' is used, sensors along with their objects will also be deleted.

tbrn:
  tb:[dataset_name]                                 # remove all segments under the dataset
  tb:[dataset_name]:[segment_name]                  # remove a segment
  tb:[dataset_name]:[segment_name]://[remote_path]  # remove files under the remote path

shell completion

Activation

For Bash, add this to ~/.bashrc:

eval "$(_GAS_COMPLETE=source_bash gas)"

For Zsh, add this to ~/.zshrc:

eval "$(_GAS_COMPLETE=source_zsh gas)"

For Fish, add this to ~/.config/fish/completions/foo-bar.fish:

eval (env _GAS_COMPLETE=source_fish gas)

See detailed info in Click Shell Completion

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

graviti-0.3.4.tar.gz (66.9 kB view details)

Uploaded Source

Built Distribution

graviti-0.3.4-py3-none-any.whl (92.4 kB view details)

Uploaded Python 3

File details

Details for the file graviti-0.3.4.tar.gz.

File metadata

  • Download URL: graviti-0.3.4.tar.gz
  • Upload date:
  • Size: 66.9 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/3.2.0 pkginfo/1.5.0.1 requests/2.24.0 setuptools/41.2.0 requests-toolbelt/0.9.1 tqdm/4.48.0 CPython/3.8.2

File hashes

Hashes for graviti-0.3.4.tar.gz
Algorithm Hash digest
SHA256 536bd09a8719bb01f4cead6ffa2df98134049eb00a0c2dfbbf3c93fbd014965d
MD5 2b7f3bf9f7830dbe9486b0265ee912de
BLAKE2b-256 cf8034946b4dbb43779b3a11d872b26bef1291f5955ac6ff1b9453d700631b72

See more details on using hashes here.

File details

Details for the file graviti-0.3.4-py3-none-any.whl.

File metadata

  • Download URL: graviti-0.3.4-py3-none-any.whl
  • Upload date:
  • Size: 92.4 kB
  • Tags: Python 3
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/3.2.0 pkginfo/1.5.0.1 requests/2.24.0 setuptools/41.2.0 requests-toolbelt/0.9.1 tqdm/4.48.0 CPython/3.8.2

File hashes

Hashes for graviti-0.3.4-py3-none-any.whl
Algorithm Hash digest
SHA256 25f65f394446a3d8fd3a4113367ca0a2d2995223b2053bbca532b0b5bc91fcb2
MD5 70d5832731af9f2519b606e9f0a20646
BLAKE2b-256 bb8e222a051f11c41aaf688b8fdb95077a934972ce96d2fd57634d9b03d837f8

See more details on using hashes here.

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page