Skip to content

Sager

OnsSager(ons)

Class used for handling ONS Sager data. Can be accessed via ons.sager.

Parameters:

  • ons

    (Ons) –

    Top level object carrying all functionality.

Source code in echo_ons/ons_sager.py
def __init__(self, ons: e_o.Ons) -> None:
    """Class used for handling ONS Sager data. Can be accessed via `ons.sager`.

    Parameters
    ----------
    ons : Ons
        Top level object carrying all functionality.
    """
    super().__init__(ons)

    # getting credentials to connect
    data_source = self._ons._perfdb.datasources.instances.get(data_source_names=["ons_sager"], get_attributes=True)  # noqa: SLF001
    if not data_source:
        raise ValueError("Data source 'ons_sager' not found.")
    data_source = data_source["ons_sager"]
    if "user" not in data_source or "password" not in data_source:
        raise ValueError("Missing credentials for 'ons_sager' in database.")

    self.data_source_objects: list[str] = data_source["object_names"]
    """List of all objects that are connected to the ons_sager data source."""

    # creating connection to SAGER
    conn_props = OnsHttpConnProperties(
        host="https://integra.ons.org.br/api/sager/renovaveis/api",
        user=data_source["user"],
        password=data_source["password"],
        login_target="SAGER",
    )
    self.conn = OnsHttpHandler(conn_props, skip_connect=True)
    """OnsHttpHandler object used to perform requests to SAGER portal"""

    from .ons_sager_limitations import OnsSagerLimitations
    from .ons_sager_status import OnsSagerStatus
    from .ons_sager_timeseries import OnsSagerTimeseries

    # * subclasses

    self.limitations = OnsSagerLimitations(ons)
    self.status = OnsSagerStatus(ons)
    self.timeseries = OnsSagerTimeseries(ons)

conn = OnsHttpHandler(conn_props, skip_connect=True) instance-attribute

OnsHttpHandler object used to perform requests to SAGER portal

data_source_objects = data_source['object_names'] instance-attribute

List of all objects that are connected to the ons_sager data source.

import_database(period, site_names=None, recalc_features=False, calc_limitation_lost_energy=True)

Method to import all SAGER data for a given period.

This currently includes:

  • Status
  • Limitation events
  • Timeseries data

Parameters:

  • period

    (DateTimeRange) –

    Period to import data for.

  • site_names

    (list[str] | None, default: None ) –

    The names of the sites to import data for. If None, all sites attached to "ons_sager" data source will be imported.

    By default None.

  • recalc_features

    (bool, default: False ) –

    If True, the function will recalculate the features of the ONS objects, by default False.

  • calc_limitation_lost_energy

    (bool, default: True ) –

    If True, the function will calculate the produced and lost energy during each limitation event. This will be done for each SPE and results will be saved to table ons_spe_limitations in performance_db.

    By default True.

Returns:

  • ErrorSummary

    Error summary of the import process.

Source code in echo_ons/ons_sager.py
def import_database(
    self,
    period: DateTimeRange,
    site_names: list[str] | None = None,
    recalc_features: bool = False,
    calc_limitation_lost_energy: bool = True,
) -> ErrorSummary:
    """Method to import all SAGER data for a given period.

    This currently includes:

    - Status
    - Limitation events
    - Timeseries data

    Parameters
    ----------
    period : DateTimeRange
        Period to import data for.
    site_names : list[str] | None, optional
        The names of the sites to import data for. If None, all sites attached to "ons_sager" data source will be imported.

        By default None.
    recalc_features : bool, optional
        If True, the function will recalculate the features of the ONS objects, by default False.
    calc_limitation_lost_energy : bool, optional
        If True, the function will calculate the produced and lost energy during each limitation event. This will be done for each SPE and results will be saved to table ons_spe_limitations in performance_db.

        By default True.

    Returns
    -------
    ErrorSummary
        Error summary of the import process.
    """
    # creating error summary
    errs = ErrorSummary(name="OnsSager")

    # checking if all the requested objects exist in self._ons.sager.data_source_objects
    if site_names:
        wrong_objs = set(site_names) - set(self.data_source_objects)
        if wrong_objs:
            raise ValueError(f"Requested site names not connected to data source ons_sager: {wrong_objs}")
    else:
        site_names = self.data_source_objects

    # first importing Status
    logger.info("Importing Status")
    try:
        err_summ = self._ons.sager.status.import_database(period=period, site_names=site_names)
        errs.add_child(err_summ)
    except Exception as e:
        logger.exception("Error importing Status")
        errs.add_exception(e)

    # then importing limitation events
    logger.info("Importing Limitation Events")
    try:
        err_summ = self._ons.sager.limitations.import_database(period=period, site_names=site_names)
        errs.add_child(err_summ)
    except Exception as e:
        logger.exception("Error importing Limitation Events")
        errs.add_exception(e)

    # finally the timeseries data
    logger.info("Importing Timeseries Data")
    try:
        err_summ = self._ons.sager.timeseries.import_database(period=period, site_names=site_names)
        errs.add_child(err_summ)
    except Exception as e:
        logger.exception("Error importing Timeseries Data")
        errs.add_exception(e)

    # recalculating features
    if recalc_features:
        from echo_energycalc import CalculationHandler

        calc_err_summ = ErrorSummary(name="FeatureRecalculation")

        logger.info("Recalculating features")

        for obj in site_names:
            logger.info(f"{obj} - Recalculating features for period {period}")
            try:
                calc = CalculationHandler.from_type_defaults(object_filters={"object_names": [obj]})[0]
                this_errs = calc.calculate(period=period)
                if this_errs.total_exception_count > 0:
                    raise RuntimeError(f"Errors occurred while recalculating features for {obj} and period {period}")
            except Exception:
                logger.exception(f"{obj} - Could not recalculate features")
                obj_err = ErrorObject(
                    name=obj,
                    exceptions=[RuntimeError(f"Could not recalculate features for {obj} and period {period}")],
                )
                calc_err_summ.add_child(obj_err)
                continue

        errs.add_child(calc_err_summ)

    # calculating lost energy
    if calc_limitation_lost_energy:
        from .limitations_lost_energy import calc_limitation_events_lost_energy

        calc_err_summ = ErrorSummary(name="LimitationLostEnergyCalc")

        logger.info("Calculating limitation events lost energy")
        for obj in site_names:
            logger.info(f"{obj} - Calculating limitation events lost energy for period {period}")
            try:
                if this_errs := calc_limitation_events_lost_energy(
                    period,
                    objects=[obj],
                ):
                    raise RuntimeError(
                        f"Errors occurred while calculating limitation events lost energy for {obj} and period {period}",
                    )
            except Exception:
                logger.exception(f"{obj} - Could not calculate limitation events lost energy")
                obj_err = ErrorObject(
                    name=obj,
                    exceptions=[RuntimeError(f"Could not calculate limitation events lost energy for {obj} and period {period}")],
                )
                calc_err_summ.add_child(obj_err)
                continue

        errs.add_child(calc_err_summ)

    return errs