Functions used in the sample coding

REST API Reference Guide for Virtual Storage Platform 5000, Virtual Storage Platform E Series, and Virtual Storage Platform G/F Series

Version
93-07-0x
90-09-0x
88-08-10
Audience
anonymous
Part Number
MK-98RD9014-17
This section explains the functions used in the sample coding.

Generating a URL

In the sample coding, the functions that generate the URL to be used in the HTTP request are defined in the BlockStorageAPI class, and are called from the main processing when needed. If you specify the required parameters for each target resource or the method to use, these functions generate and return the corresponding URL. The following explains the sample coding for BlockStorageAPI.

In this sample coding, the storage device ID to be used in the URL is generated from the model name and serial number of the storage system specified by the parameters. The storage device ID is generated by replacing the model name specified by the STORAGE_MODEL_DICT parameter with the fixed value of the type of the storage system. If necessary, change the value to match the system environment and requirements.

# coding:utf-8
"""
This class creates a URL for the REST API
to manage block storage
"""

# For VSP E990
STORAGE_MODEL_DICT = {"VSP E990": "936000"}

# For VSP G350, G370, G700, G900, VSP F350, F370, F700, F900
STORAGE_MODEL_DICT = {
                      "VSP G900": "886000",
                      "VSP G700": "886000",
                      "VSP G370": "886000",
                      "VSP G350": "882000",
                      "VSP F900": "886000",
                      "VSP F700": "886000",
                      "VSP F370": "886000",
                      "VSP F350": "882000"}

# For VSP 5000 series
STORAGE_MODEL_DICT = {
                      "VSP 5500H": "900000",
                      "VSP 5500": "900000",
                      "VSP 5100H": "900000",
                      "VSP 5100": "900000"}

class BlockStorageAPI():
    # GUM IP address
    gum_ip_addr = None
    # port number
    port = None
    # storage URL
    base_url = None
    # object URL
    object_url = None
    # service URL
    service_url = None
    # storage device ID
    storage_id = None

Generate the common part, in the following format, of the requests in the URL:

For the objects domain:
protocol://host-name:port-number/ConfigurationManager/version/objects
For the services domain:
protocol://host-name:port-number/ConfigurationManager/version/services

    def __init__(self, gum_ip_addr, port, storage_model,
                 serial_number):
        self.gum_ip_addr = gum_ip_addr
        self.port = port
        self.storage_id = STORAGE_MODEL_DICT[storage_model] \
            + serial_number
        self.base_url = "https://" + \
            self.gum_ip_addr + ":" + self.port + \
            "/ConfigurationManager/v1"
        self.object_url = "/objects"
        self.service_url = "/services"

Next, define the functions according to the target resource and the operation. For example, the URL for creating a volume is generated by the block_storage_api.ldevs that corresponds to ldevs.

    def get_storage_id(self):
        return self.storage_id

    def ldevs(self):
        url = self.base_url + self.object_url + "/ldevs"
        return url

    def ldev(self, object_id):
        url = self.ldevs() + "/" + str(object_id)
        return url

    def host_groups(self):
        url = self.base_url + self.object_url + "/host-groups"
        return url

    def host_wwns(self):
        url = self.base_url + self.object_url + "/host-wwns"
        return url

    def luns(self):
        url = self.base_url + self.object_url + "/luns"
        return url

    def local_copy_pairs(self):
        url = self.base_url + self.object_url + \
            "/local-clone-copypairs"
        return url

    def split_local_copy_pair_template(self, pair_url):
        url = pair_url + "/actions/split"
        return url

    def split_local_copy_pair(self, pair_url):
        url = pair_url + "/actions/split/invoke"
        return url

    def generate_session(self):
        url = self.base_url + self.object_url + "/sessions"
        return url

    def discard_session(self, object_id):
        url = self.base_url + self.object_url + "/sessions/" + \
            str(object_id)
        return url

    def lock(self):
        url = self.base_url + self.service_url + \
            "/resource-group-service/" + \
            "actions/lock/invoke"
        return url

    def unlock(self):
        url = self.base_url + self.service_url + \
            "/resource-group-service/" + \
            "actions/unlock/invoke"
        return url

    def remote_storage(self):
        url = self.base_url + self.object_url + \
            "/remote-storages"
        return url

    def remote_copy_pairs(self):
        url = self.base_url + self.object_url + \
            "/remote-mirror-copypairs"
        return url

    def job(self, object_id):
        url = self.base_url + self.object_url + "/jobs/" + \
            str(object_id)
        return url

    def affected_resource(self, affected_resource):
        url = "https://" + self.gum_ip_addr + ":" + \
              self.port \
              + affected_resource
        return url

    def api_version(self):
        url = "https://" + self.gum_ip_addr + ":" + \
              self.port \
            + "/ConfigurationManager/configuration/version"
        return url

    def file_upload(self):
        url = self.base_url + self.object_url + \
            "/actions/file-upload/invoke"
        return url

    def auditlog_syslog(self):
        url = self.base_url + self.object_url + \
              "/auditlog-syslog-servers" + "/instance"
        return url

    def auditlog_syslog_send_test(self):
        url = self.auditlog_syslog() + \
            "/actions/send-test/invoke"
        return url

    def drives(self):
        url = self.base_url + self.object_url + "/drives"
        return url

    def drives_parity_group(self, parity_group_id):
        url = self.drives() + "?parityGroupId=" + str(parity_group_id)
        return url

    def parity_groups(self):
        url = self.base_url + self.object_url + "/parity-groups"
        return url

    def parity_group(self, object_id):
        url = self.parity_groups() + "/" + str(object_id)
        return url

    def encryption_keys(self):
        url = self.base_url + self.object_url + "/encryption-keys"
        return url

    def encryption_key(self, object_id):
        url = self.encryption_keys() + "/" + str(object_id)
        return url

    def encryption_key_file(self):
        url = self.encryption_key("file")
        return url

    def encryption_key_file_backup(self):
        url = self.encryption_key_file() + "/actions/backup/invoke"
        return url

    def encryption_key_file_restore(self):
        url = self.encryption_key_file() + "/actions/restore/invoke"
        return url

Issuing an HTTP request and verifying the status of asynchronous processing

In the REST API, operations such as creating an object or changing its attributes are registered as jobs and run asynchronously. For asynchronous processing, you must confirm that the jobs are complete before obtaining the resource to which the request execution result is applied. In the sample coding, the invoke_async_command function is used to issue the request and to perform the processing for waiting for the completion of the jobs. After the method type, URL, and request body are specified, the invoke_async_command function issues the request according to the specified method, waits for the completion of the job, and then returns the resource information.

The invoke_async_command function is defined in the coding samples for volume allocation, ShadowImage pair operation, and registration of remote storage system information. The following examples show the contents of coding samples for the invoke_async_command function.

First, define the function that gets the job status. This function is called by using the invoke_async_command function.
"""
Check whether the asynchronous command was finished.
@param job_id the job ID to identify
       the asynchronous command
@return r the response data
"""


def check_update(job_id):
    url = block_storage_api.job(str(job_id))
    r = requests.get(url, headers=headers, verify=False)
    return r
Next, define the invoke_async_command function. Specify the settings so that when a request is generated, the request body is generated in JSON format. Specify the authentication information by using the token obtained when a session was generated.
"""
Execute the HTTP request (POST or PATCH)
@param method_type HTTP request method (POST or PATCH)
@param url URL to execute HTTP method
@param body The information of a resource
@return job_result.json()["affectedResources"][0]
         URL of an affected resource
"""


def invoke_async_command(method_type, url, body):
    if method_type == "patch":
        r = requests.patch(url, headers=headers,
                         data=json.dumps(body), verify=False)
    elif method_type == "post":
        r = requests.post(
            url,
            headers=headers,
            data=json.dumps(body),
            verify=False)
    if r.status_code != http.client.ACCEPTED:
        raise requests.HTTPError(r)
    print("Request was accepted. JOB URL : " +
          r.json()["self"])
After the request is issued, the job status is repeatedly obtained until the job status changes to Completed. If the job does not finish before the maximum number of retries specified in the parameter is reached, the processing ends. In addition, if an error occurs for the job, the error code is obtained and the processing ends.
    status = "Initializing"
    job_result = None
    retry_count = 1
    wait_time = FIRST_WAIT_TIME
    while status != "Completed":
        if retry_count > MAX_RETRY_COUNT:
            raise Exception("Timeout Error! "
                            "Operation was not completed.")
        time.sleep(wait_time)
        job_result = check_update(r.json()["jobId"])
        status = job_result.json()["status"]
        double_time = wait_time * 2
        if double_time < 120:
            wait_time = double_time
        else:
            wait_time = 120
        retry_count += 1
    if job_result.json()["state"] == "Failed":
        error_obj = job_result.json()["error"]
        if "errorCode" in error_obj:
            if "SSB1" in error_obj["errorCode"]:
                print("Error! SSB code : ",
                      error_obj["errorCode"]["SSB1"],
                      ", ", error_obj["errorCode"]["SSB2"])
            elif "errorCode" in error_obj["errorCode"]:
                print("Error! error code : ",
                      error_obj["errorCode"]["errorCode"])
        raise Exception("Job Error!", job_result.text)
After the job is complete and the status changes to Completed, the URL of the resource to which the job execution result is applied is obtained. The first result is obtained because only one result is returned to affectedResources.
    print("Async job was succeeded. affected resource : " +
          job_result.json()["affectedResources"][0])
    return job_result.json()["affectedResources"][0]

Getting status changes for asynchronous processing

The wait_until_jobstatus_is_changed function gets the status of the jobs that were asynchronously run by the REST API, waits until the job status changes to the specified execution status, and then returns the resource information. The wait_until_jobstatus_is_changed function is defined in the coding samples for a TrueCopy pair operation. The following example shows the contents of coding samples for the wait_until_jobstatus_is_changed function.

First, define the function that gets the job status. This function is called by using the wait_until_jobstatus_is_changed function.

"""
Check whether the asynchronous command was finished.

@param storage_api storage_api
@param job_id the job ID to identify
      the asynchronous command
@param headers the array of the http headers
@return r the response data
"""


def check_update(storage_api, job_id, headers):
    url = storage_api.job(str(job_id))
    r = requests.get(url, headers=headers, verify=False)
    return r

Next, define the wait_until_jobstatus_is_changed function. For changed_status, specify the job status that needs to be detected when a job status has changed to that status. If True is specified for is_retry_count_enabled, after processing is tried again for the number of times specified in the MAX_RETRY_COUNT parameter, a timeout error will be returned. If False is specified, processing will wait until a job moves to the specified status.

"""
Wait until the job status is changed

@param storage_api storage_api
@param headers the array of the http headers
@param job_id the job ID to identify
       the asynchronous command
@param changed_status job status after waiting
@param is_retry_count_enabled if true, wait
       until MAX_RETRY_COUNT. if false, wait forever
       until job status is changed.
@return job_result.json()["affectedResources"][0]
         URL of an affected resource
"""


def wait_until_jobstatus_is_changed(
        storage_api,
        headers,
        job_id,
        changed_status,
        is_retry_count_enabled):
    status = "Initializing"
    retry_count = 1
    wait_time = FIRST_WAIT_TIME
    while status != changed_status:
        if status == "Completed":
            print("Status was already changed" +
                  "to Completed.")
            break
        if is_retry_count_enabled and \
                retry_count > MAX_RETRY_COUNT:
            raise Exception("Timeout Error! "
                            "Operation was not completed.")
        time.sleep(wait_time)
        job_result = check_update(storage_api,
                                  job_id, headers)
        status = job_result.json()["status"]
        double_time = wait_time * 2
        if double_time < 120:
            wait_time = double_time
        else:
            wait_time = 120
        retry_count += 1
    if job_result.json()["state"] == "Failed":
        error_obj = job_result.json()["error"]
        if "errorCode" in error_obj:
            if "SSB1" in error_obj["errorCode"]:
                print("Error! SSB code : ",
                      error_obj["errorCode"]["SSB1"],
                      ", ", error_obj["errorCode"]["SSB2"])
            elif "errorCode" in error_obj["errorCode"]:
                print("Error! error code : ",
                      error_obj["errorCode"]["errorCode"])
        raise Exception("Job Error!", job_result.text)
    print("Async job was succeeded. affected resource : " +
          job_result.json()["affectedResources"][0])
    return job_result.json()["affectedResources"][0]