HEX
Server: Apache
System: Linux scp1.abinfocom.com 5.4.0-216-generic #236-Ubuntu SMP Fri Apr 11 19:53:21 UTC 2025 x86_64
User: confeduphaar (1010)
PHP: 8.1.33
Disabled: exec,passthru,shell_exec,system
Upload Files
File: //lib/mysqlsh/lib/python3.8/site-packages/oci/generative_ai_inference/models/generated_text.py
# coding: utf-8
# Copyright (c) 2016, 2025, Oracle and/or its affiliates.  All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.

# NOTE: This class is auto generated by OracleSDKGenerator. DO NOT EDIT. API Version: 20231130


from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel  # noqa: F401
from oci.decorators import init_model_state_from_kwargs


@init_model_state_from_kwargs
class GeneratedText(object):
    """
    The text generated during each run.
    """

    def __init__(self, **kwargs):
        """
        Initializes a new GeneratedText object with values from keyword arguments.
        The following keyword arguments are supported (corresponding to the getters/setters of this class):

        :param id:
            The value to assign to the id property of this GeneratedText.
        :type id: str

        :param text:
            The value to assign to the text property of this GeneratedText.
        :type text: str

        :param likelihood:
            The value to assign to the likelihood property of this GeneratedText.
        :type likelihood: float

        :param finish_reason:
            The value to assign to the finish_reason property of this GeneratedText.
        :type finish_reason: str

        :param token_likelihoods:
            The value to assign to the token_likelihoods property of this GeneratedText.
        :type token_likelihoods: list[oci.generative_ai_inference.models.TokenLikelihood]

        """
        self.swagger_types = {
            'id': 'str',
            'text': 'str',
            'likelihood': 'float',
            'finish_reason': 'str',
            'token_likelihoods': 'list[TokenLikelihood]'
        }
        self.attribute_map = {
            'id': 'id',
            'text': 'text',
            'likelihood': 'likelihood',
            'finish_reason': 'finishReason',
            'token_likelihoods': 'tokenLikelihoods'
        }
        self._id = None
        self._text = None
        self._likelihood = None
        self._finish_reason = None
        self._token_likelihoods = None

    @property
    def id(self):
        """
        **[Required]** Gets the id of this GeneratedText.
        A unique identifier for this text generation.


        :return: The id of this GeneratedText.
        :rtype: str
        """
        return self._id

    @id.setter
    def id(self, id):
        """
        Sets the id of this GeneratedText.
        A unique identifier for this text generation.


        :param id: The id of this GeneratedText.
        :type: str
        """
        self._id = id

    @property
    def text(self):
        """
        **[Required]** Gets the text of this GeneratedText.
        The generated text.


        :return: The text of this GeneratedText.
        :rtype: str
        """
        return self._text

    @text.setter
    def text(self, text):
        """
        Sets the text of this GeneratedText.
        The generated text.


        :param text: The text of this GeneratedText.
        :type: str
        """
        self._text = text

    @property
    def likelihood(self):
        """
        **[Required]** Gets the likelihood of this GeneratedText.
        The overall likelihood of the generated text.

        When a large language model generates a new token for the output text, a likelihood is assigned to all tokens, where tokens with higher likelihoods are more likely to follow the current token. For example, it's more likely that the word favorite is followed by the word food or book rather than the word zebra. A lower likelihood means that it's less likely that token follows the current token.


        :return: The likelihood of this GeneratedText.
        :rtype: float
        """
        return self._likelihood

    @likelihood.setter
    def likelihood(self, likelihood):
        """
        Sets the likelihood of this GeneratedText.
        The overall likelihood of the generated text.

        When a large language model generates a new token for the output text, a likelihood is assigned to all tokens, where tokens with higher likelihoods are more likely to follow the current token. For example, it's more likely that the word favorite is followed by the word food or book rather than the word zebra. A lower likelihood means that it's less likely that token follows the current token.


        :param likelihood: The likelihood of this GeneratedText.
        :type: float
        """
        self._likelihood = likelihood

    @property
    def finish_reason(self):
        """
        Gets the finish_reason of this GeneratedText.
        The reason why the model stopped generating tokens.

        A model stops generating tokens if the model hits a natural stop point or reaches a provided stop sequence.


        :return: The finish_reason of this GeneratedText.
        :rtype: str
        """
        return self._finish_reason

    @finish_reason.setter
    def finish_reason(self, finish_reason):
        """
        Sets the finish_reason of this GeneratedText.
        The reason why the model stopped generating tokens.

        A model stops generating tokens if the model hits a natural stop point or reaches a provided stop sequence.


        :param finish_reason: The finish_reason of this GeneratedText.
        :type: str
        """
        self._finish_reason = finish_reason

    @property
    def token_likelihoods(self):
        """
        Gets the token_likelihoods of this GeneratedText.
        A collection of generated tokens and their corresponding likelihoods.


        :return: The token_likelihoods of this GeneratedText.
        :rtype: list[oci.generative_ai_inference.models.TokenLikelihood]
        """
        return self._token_likelihoods

    @token_likelihoods.setter
    def token_likelihoods(self, token_likelihoods):
        """
        Sets the token_likelihoods of this GeneratedText.
        A collection of generated tokens and their corresponding likelihoods.


        :param token_likelihoods: The token_likelihoods of this GeneratedText.
        :type: list[oci.generative_ai_inference.models.TokenLikelihood]
        """
        self._token_likelihoods = token_likelihoods

    def __repr__(self):
        return formatted_flat_dict(self)

    def __eq__(self, other):
        if other is None:
            return False

        return self.__dict__ == other.__dict__

    def __ne__(self, other):
        return not self == other