Skip to content

API Reference

ai_model_manager

AIModelManager

A class to manage AI model configurations stored in a JSON file. Allows adding, removing, selecting models, and generating output via LLM APIs.

Source code in cli/ai_model_manager.py
class AIModelManager:
    """
    A class to manage AI model configurations stored in a JSON file.
    Allows adding, removing, selecting models, and generating output via LLM APIs.
    """

    def __init__(self, file_path=None):
        """
        Initialize the AIModelManager instance.
        Creates the default configuration file if it doesn't exist.

        Args:
            file_path (str, optional): Path to the JSON configuration file.
                If None, defaults to ~/.config/ai_model_manager/models_api.json.
        """
        if file_path is None:
            config_dir = os.path.join(
                os.path.expanduser("~"), ".config", "ai_model_manager"
            )
            os.makedirs(config_dir, exist_ok=True)
            file_path = os.path.join(config_dir, "models_api.json")

        self.file_path = file_path

        if not os.path.exists(self.file_path):
            self.create_default_file()

    def _read_json(self):
        """
        Internal method to read JSON configuration data from file.

        Returns:
            dict: Parsed JSON data or an empty dict if file not found.
        """
        try:
            with open(self.file_path, "r") as f:
                return json.load(f)
        except FileNotFoundError:
            return {}

    def _write_json(self, data):
        """
        Internal method to write JSON data to file.

        Args:
            data (dict): Data to write into the JSON file.
        """
        with open(self.file_path, "w") as f:
            json.dump(data, f, indent=4)

    def create_default_file(self):
        """
        Creates a configuration file with default model entries.
        Includes a pre-set API key for `gemini-1.5-flash`.
        """
        default_config = {
            "selected_model": None,
            "gemini-1.5-flash": "AIzaSyCSXtRAITXfGuarMHI1j-0QyKkoT9mUfz8",
            "gemini-1.5-interactive": None,
            "gemini-1.5-creative": None,
        }
        self._write_json(default_config)
        print(f"Config file created at: {self.file_path}")

    def load(self):
        """
        Loads the current configuration from the JSON file.

        Returns:
            dict: Configuration data including selected model and keys.
        """
        return self._read_json()

    def configure_model(self, model_name, api_key):
        """
        Adds or updates a model entry with a given API key.

        Args:
            model_name (str): The name of the model to configure.
            api_key (str): The API key associated with the model.
        """
        data = self._read_json()
        data[model_name] = api_key
        self._write_json(data)
        print("Model added successfully.")

    def remove_model(self, model_name):
        """
        Removes a model from the configuration.

        Args:
            model_name (str): The name of the model to remove.

        Raises:
            ValueError: If the model is not found in the configuration.
        """
        data = self._read_json()
        if model_name in data:
            del data[model_name]
            self._write_json(data)
            print(f"Model '{model_name}' removed successfully.")
        else:
            raise ValueError(f"Model '{model_name}' not found.")

    def get_api_key(self, model_name):
        """
        Retrieves the API key for a given model.

        Args:
            model_name (str): The name of the model.

        Returns:
            str: The API key associated with the model.

        Raises:
            ValueError: If the model is not available or lacks an API key.
        """
        data = self._read_json()
        if model_name in data and data[model_name]:
            return data[model_name]
        raise ValueError(f"Model '{model_name}' is not available or has no API key.")

    def list_models(self):
        """
        Prints the list of models and their corresponding API keys (if available).
        """
        data = self._read_json()
        if data:
            for model, key in data.items():
                print(f"{model}: {key}")
        else:
            print("No models found.")

    def select_model(self, model_name):
        """
        Sets the specified model as the default selected model.

        Args:
            model_name (str): The name of the model to select.

        Prints a message based on whether the selection was successful.
        """
        data = self._read_json()
        if model_name in data and data[model_name]:
            data["selected_model"] = model_name
            self._write_json(data)
            print(f"Selected model: {model_name}")
        else:
            print("No models found or no API key for that model.")

    def generate_output(self, model_name, prompt_by_user):
        """
        Generates LLM output using the specified model and user prompt.

        This method displays a spinner while waiting for a response from the model.

        Args:
            model_name (str): The name of the model to use.
            prompt_by_user (str): The prompt to send to the model.

        Returns:
            str: The model's output.
        """
        stop_spinner = threading.Event()
        spinner_thread = threading.Thread(target=spin_loader, args=(stop_spinner,))
        spinner_thread.start()

        from cli.llm import gemini_api_output

        output = gemini_api_output(model_name, prompt_by_user)

        stop_spinner.set()
        spinner_thread.join()

        return output

__init__(file_path=None)

Initialize the AIModelManager instance. Creates the default configuration file if it doesn't exist.

Parameters:

Name Type Description Default
file_path str

Path to the JSON configuration file. If None, defaults to ~/.config/ai_model_manager/models_api.json.

None
Source code in cli/ai_model_manager.py
def __init__(self, file_path=None):
    """
    Initialize the AIModelManager instance.
    Creates the default configuration file if it doesn't exist.

    Args:
        file_path (str, optional): Path to the JSON configuration file.
            If None, defaults to ~/.config/ai_model_manager/models_api.json.
    """
    if file_path is None:
        config_dir = os.path.join(
            os.path.expanduser("~"), ".config", "ai_model_manager"
        )
        os.makedirs(config_dir, exist_ok=True)
        file_path = os.path.join(config_dir, "models_api.json")

    self.file_path = file_path

    if not os.path.exists(self.file_path):
        self.create_default_file()

configure_model(model_name, api_key)

Adds or updates a model entry with a given API key.

Parameters:

Name Type Description Default
model_name str

The name of the model to configure.

required
api_key str

The API key associated with the model.

required
Source code in cli/ai_model_manager.py
def configure_model(self, model_name, api_key):
    """
    Adds or updates a model entry with a given API key.

    Args:
        model_name (str): The name of the model to configure.
        api_key (str): The API key associated with the model.
    """
    data = self._read_json()
    data[model_name] = api_key
    self._write_json(data)
    print("Model added successfully.")

create_default_file()

Creates a configuration file with default model entries. Includes a pre-set API key for gemini-1.5-flash.

Source code in cli/ai_model_manager.py
def create_default_file(self):
    """
    Creates a configuration file with default model entries.
    Includes a pre-set API key for `gemini-1.5-flash`.
    """
    default_config = {
        "selected_model": None,
        "gemini-1.5-flash": "AIzaSyCSXtRAITXfGuarMHI1j-0QyKkoT9mUfz8",
        "gemini-1.5-interactive": None,
        "gemini-1.5-creative": None,
    }
    self._write_json(default_config)
    print(f"Config file created at: {self.file_path}")

generate_output(model_name, prompt_by_user)

Generates LLM output using the specified model and user prompt.

This method displays a spinner while waiting for a response from the model.

Parameters:

Name Type Description Default
model_name str

The name of the model to use.

required
prompt_by_user str

The prompt to send to the model.

required

Returns:

Name Type Description
str

The model's output.

Source code in cli/ai_model_manager.py
def generate_output(self, model_name, prompt_by_user):
    """
    Generates LLM output using the specified model and user prompt.

    This method displays a spinner while waiting for a response from the model.

    Args:
        model_name (str): The name of the model to use.
        prompt_by_user (str): The prompt to send to the model.

    Returns:
        str: The model's output.
    """
    stop_spinner = threading.Event()
    spinner_thread = threading.Thread(target=spin_loader, args=(stop_spinner,))
    spinner_thread.start()

    from cli.llm import gemini_api_output

    output = gemini_api_output(model_name, prompt_by_user)

    stop_spinner.set()
    spinner_thread.join()

    return output

get_api_key(model_name)

Retrieves the API key for a given model.

Parameters:

Name Type Description Default
model_name str

The name of the model.

required

Returns:

Name Type Description
str

The API key associated with the model.

Raises:

Type Description
ValueError

If the model is not available or lacks an API key.

Source code in cli/ai_model_manager.py
def get_api_key(self, model_name):
    """
    Retrieves the API key for a given model.

    Args:
        model_name (str): The name of the model.

    Returns:
        str: The API key associated with the model.

    Raises:
        ValueError: If the model is not available or lacks an API key.
    """
    data = self._read_json()
    if model_name in data and data[model_name]:
        return data[model_name]
    raise ValueError(f"Model '{model_name}' is not available or has no API key.")

list_models()

Prints the list of models and their corresponding API keys (if available).

Source code in cli/ai_model_manager.py
def list_models(self):
    """
    Prints the list of models and their corresponding API keys (if available).
    """
    data = self._read_json()
    if data:
        for model, key in data.items():
            print(f"{model}: {key}")
    else:
        print("No models found.")

load()

Loads the current configuration from the JSON file.

Returns:

Name Type Description
dict

Configuration data including selected model and keys.

Source code in cli/ai_model_manager.py
def load(self):
    """
    Loads the current configuration from the JSON file.

    Returns:
        dict: Configuration data including selected model and keys.
    """
    return self._read_json()

remove_model(model_name)

Removes a model from the configuration.

Parameters:

Name Type Description Default
model_name str

The name of the model to remove.

required

Raises:

Type Description
ValueError

If the model is not found in the configuration.

Source code in cli/ai_model_manager.py
def remove_model(self, model_name):
    """
    Removes a model from the configuration.

    Args:
        model_name (str): The name of the model to remove.

    Raises:
        ValueError: If the model is not found in the configuration.
    """
    data = self._read_json()
    if model_name in data:
        del data[model_name]
        self._write_json(data)
        print(f"Model '{model_name}' removed successfully.")
    else:
        raise ValueError(f"Model '{model_name}' not found.")

select_model(model_name)

Sets the specified model as the default selected model.

Parameters:

Name Type Description Default
model_name str

The name of the model to select.

required

Prints a message based on whether the selection was successful.

Source code in cli/ai_model_manager.py
def select_model(self, model_name):
    """
    Sets the specified model as the default selected model.

    Args:
        model_name (str): The name of the model to select.

    Prints a message based on whether the selection was successful.
    """
    data = self._read_json()
    if model_name in data and data[model_name]:
        data["selected_model"] = model_name
        self._write_json(data)
        print(f"Selected model: {model_name}")
    else:
        print("No models found or no API key for that model.")

llm

gemini_api_output(model_name, prompt_by_user)

Generate AI response using specified model.

Parameters:

Name Type Description Default
model_name str

Name of the AI model to use.

required
prompt_by_user str

User's input prompt.

required

Returns:

Name Type Description
str

Generated response text.

Source code in cli/llm.py
def gemini_api_output(model_name, prompt_by_user):
    """
    Generate AI response using specified model.

    Args:
        model_name (str): Name of the AI model to use.
        prompt_by_user (str): User's input prompt.

    Returns:
        str: Generated response text.
    """
    from cli.ai_model_manager import AIModelManager

    manager = AIModelManager()
    api_key = manager.get_api_key(model_name)

    client = genai.Client(api_key=api_key)
    response = client.models.generate_content(model=model_name, contents=prompt_by_user)

    return response.text

prettify_llm_output

prettify_llm_output(response)

Prettifies the output from a language model response by stripping leading and trailing whitespace and code block markers, then prints it as Markdown to the console.

Parameters:

Name Type Description Default
response str

The raw response from the language model.

required

Returns:

Type Description

None

Source code in cli/prettify_llm_output.py
def prettify_llm_output(response):
    """
    Prettifies the output from a language model response by stripping leading
    and trailing whitespace and code block markers, then prints it as Markdown
    to the console.

    Args:
        response (str): The raw response from the language model.

    Returns:
        None
    """
    markdown_output = response.strip().strip("```")
    console = Console()
    md = Markdown(markdown_output)
    print()
    console.print(md)
    print()

utils

install_requirements()

Installs the required dependencies for the application.

Source code in cli/utils.py
def install_requirements():
    """
    Installs the required dependencies for the application.
    """

    dependencies = ["rich==14.0.0", "google-genai"]
    for package in dependencies:
        subprocess.run([sys.executable, "-m", "pip", "install", package])

spin_loader(stop_event)

Displays a spinning loader in the console until the stop_event is set.

Parameters:

Name Type Description Default
stop_event Event

An event object used to signal the loader to stop.

required
Source code in cli/utils.py
def spin_loader(stop_event):
    """
    Displays a spinning loader in the console until the stop_event is set.

    Args:
        stop_event (threading.Event): An event object used to signal the loader to stop.
    """
    spinner = itertools.cycle(["-", "/", "|", "\\"])
    while not stop_event.is_set():
        sys.stdout.write(next(spinner))
        sys.stdout.flush()
        time.sleep(0.1)
        sys.stdout.write("\b")
    sys.stdout.write(" ")
    sys.stdout.flush()