Skip to content

Core Classes

illuminate.manager.assistant.Assistant (IAssistant)

Assistant class, creates objects from project files.

Used by Manager class in its static methods and cli start function to initialize Manager.

Source code in illuminate/manager/assistant.py
class Assistant(IAssistant):
    """
    Assistant class, creates objects from project files.

    Used by Manager class in its static methods and cli start function to
    initialize Manager.
    """

    @staticmethod
    def provide_alembic_config(
        path: str, selector: str, url: Optional[str] = None
    ) -> Config:
        """
        Creates Alembic's configuration object.

        :param path: Migration directory path
        :param selector: Database name in settings.py module DB attribute
        :param url: SQLAlchemy Database URL
        :return: Alembic configuration object
        """
        if not url:
            url = Assistant._provide_db_url(selector)
        config = Config()
        config.set_main_option(
            "script_location", os.path.join(path, "migrations")
        )
        config.set_main_option("sqlalchemy.url", url)
        return config

    @staticmethod
    def provide_alembic_operations(
        selector: str, url: Optional[str] = None
    ) -> Operations:
        """
        Creates Alembic's operations object.
        NOTE: Currently unused after switching to SQLAlchemy 2.0.31. Alembic's
        Operations.bulk_insert is not working at the time of this note
        creation. Still, bulk_insert is considered the proper way to populate
        tables and should be used again when this issue is resolved.

        :param selector: Database name in settings.py module DB attribute
        :param url: SQLAlchemy Database URL
        :return: Alembic operations object
        """
        if not url:
            url = Assistant._provide_db_url(selector)
        engine = create_engine(url)
        context = MigrationContext.configure(engine.connect())
        return Operations(context)

    @staticmethod
    def provide_context(
        sessions: bool = True,
        _labels: Optional[tuple[dict]] = None,
        _observers: Optional[tuple[str]] = None,
    ) -> dict[
        str,
        Union[
            dict[str, Union[sessionmaker[AsyncSession], InfluxDBClient]],
            str,
            list[Union[Type[Adapter], Type[Observer]]],
            ModuleType,
        ],
    ]:
        """
        Creates Manager's constructor kwargs.

        :param sessions: Sessions option
        :param _labels: Optional tuple of Observer's names or class names
        :param _observers: Optional tuple of Observer's names or class names
        :return: Manager's constractor parameters
        :raises BasicManagerException:
        """
        settings = Assistant._provide_settings()
        context = {
            "adapters": Assistant._collect_classes("adapters"),
            "name": settings.NAME,
            "observers": Assistant._collect_classes("observers"),
            "path": os.getcwd(),
            "settings": settings,
        }

        if sessions:
            context["sessions"] = Assistant._provide_sessions()

        if _labels:
            required_labels = {k: v for d in _labels for k, v in d.items()}
            context["observers"] = list(
                filter(
                    lambda x: all(
                        x.LABELS.get(k) == v
                        for k, v in required_labels.items()
                    ),
                    context["observers"],
                )
            )

        if _observers:
            context["observers"] = list(
                filter(
                    lambda x: x.NAME in _observers  # type: ignore
                    or x.__name__ in _observers,
                    context["observers"],
                )
            )

        if not context["observers"]:
            raise BasicManagerException(
                "No observers found or left after filtering"
            )

        return context

    @staticmethod
    def provide_models() -> list[object]:
        """
        Gathers project models.

        :return: Models list
        """
        settings = Assistant._provide_settings()
        return [locate(i) for i in settings.MODELS]

    @staticmethod
    def _collect_classes(
        directory: str,
    ) -> list[Union[Type[Adapter], Type[Observer]]]:
        """
        Recursively collects all classes from a given directory matching its
        prefix (Adapter/Observer).

        :param directory: Either "adapters" or "observers"
        :return: List of matching class types
        """
        classes = []
        prefix = directory.capitalize()[:-1]
        base_dir = os.path.join(os.getcwd(), directory)

        for dirpath, _, filenames in os.walk(base_dir):
            for filename in filenames:
                if not filename.endswith(".py") or filename.startswith(
                    "__init__"
                ):
                    continue

                full_path = os.path.join(dirpath, filename)
                rel_path = os.path.relpath(full_path, os.getcwd())

                module_name = rel_path.replace(os.sep, ".")[:-3]
                spec = importlib.util.spec_from_file_location(
                    module_name, full_path
                )

                if not spec or not spec.loader:
                    continue

                module = importlib.util.module_from_spec(spec)
                sys.modules[module_name] = module
                spec.loader.exec_module(module)

                for name, cls in inspect.getmembers(module, inspect.isclass):
                    if cls.__module__ != module_name:
                        continue
                    if name.startswith(prefix) and len(name) > len(prefix):
                        classes.append(cls)

        return classes

    @staticmethod
    def _provide_db_url(selector: str, _async: bool = False) -> str:
        """
        Creates database URL.

        :param selector: Database name in settings.py module DB attribute
        :param _async: Async URL flag
        :return: Database URL string
        :raises BasicManagerException:
        """
        settings = Assistant._provide_settings()
        try:
            db = settings.DB[selector]
        except KeyError:
            raise BasicManagerException(
                f"Database {selector} is not defined in settings.py"
            )
        if not db.get("name"):
            db["name"] = settings.NAME
        if _async:
            async_drivers = {
                "mysql": "asyncmy",
                "postgresql": "asyncpg",
            }
            driver = async_drivers[db["type"]]
            return "{type}+{driver}://{user}:{pass}@{host}/{name}".format(
                driver=driver, **db
            )

        return "{type}://{user}:{pass}@{host}/{name}".format(**db)

    @staticmethod
    def _provide_sessions() -> (
        dict[str, Union[sessionmaker[AsyncSession], InfluxDBClient]]
    ):
        """
        Creates a dictionary of database sessions.

        :return: Database sessions dictionary
        """
        _sessions: dict = {}
        settings = Assistant._provide_settings()
        logger.opt(colors=True).info(
            f"Number of expected db connections: "
            f"<yellow>{len(settings.DB)}</yellow>"
        )
        for db in settings.DB:
            _type = settings.DB[db]["type"]
            if _type in SUPPORTED_NOSQL_DATABASES:
                session = Assistant.__provide_nosql_sessions(db, settings)
                _sessions.update({db: session})
            elif _type in SUPPORTED_SQL_DATABASES:
                session = Assistant.__provide_sql_sessions(db, settings)
                _sessions.update({db: session})
            else:
                logger.warning(f"Database type {_type} is not supported")

        return _sessions

    @staticmethod
    def _provide_settings() -> ModuleType:
        """
        Imports project's settings.py module and returns it.

        :return: Project's settings.py module
        :raises BasicManagerException:
        """
        try:
            import settings  # type: ignore

            return settings
        except ImportError:
            raise BasicManagerException(
                "Framework did not found settings.py in the current directory"
            )

    @staticmethod
    def __log_database_connection(db: str, settings: ModuleType) -> None:
        """
        Log database connection.

        :param db: database name from settings.py module
        :param settings: settings.py module
        :return: None
        """
        host = settings.DB[db]["host"]
        port = settings.DB[db]["port"]
        logger.opt(colors=True).info(
            f"Adding session with <yellow>{db}</yellow> at "
            f"<magenta>{host}:{port}</magenta> to context"
        )

    @staticmethod
    def __provide_nosql_sessions(
        db: str, settings: ModuleType
    ) -> InfluxDBClient:
        """
        Provides NoSQL database session.

        :param db: database name from settings.py module
        :param settings: settings.py module
        :return: InfluxDBClient object
        """
        Assistant.__log_database_connection(db, settings)
        if settings.DB[db]["type"] == "influxdb":
            return InfluxDBClient(
                host=settings.DB[db]["host"],
                port=settings.DB[db]["port"],
                db=settings.DB[db].get("name", settings.NAME),
                username=settings.DB[db]["user"],
                password=settings.DB[db]["pass"],
            )
        return None

    @staticmethod
    def __provide_sql_sessions(
        db: str, settings: ModuleType
    ) -> sessionmaker[AsyncSession]:
        """
        Provides SQL database session.

        :param db: database name from settings.py module
        :param settings: settings.py module
        :return: AsyncSession created with session maker
        """
        Assistant.__log_database_connection(db, settings)
        return sessionmaker(
            create_async_engine(Assistant._provide_db_url(db, _async=True)),
            class_=AsyncSession,
            expire_on_commit=False,
        )

provide_alembic_config(path: str, selector: str, url: Optional[str] = None) -> Config staticmethod

Creates Alembic's configuration object.

:param path: Migration directory path :param selector: Database name in settings.py module DB attribute :param url: SQLAlchemy Database URL :return: Alembic configuration object

Source code in illuminate/manager/assistant.py
@staticmethod
def provide_alembic_config(
    path: str, selector: str, url: Optional[str] = None
) -> Config:
    """
    Creates Alembic's configuration object.

    :param path: Migration directory path
    :param selector: Database name in settings.py module DB attribute
    :param url: SQLAlchemy Database URL
    :return: Alembic configuration object
    """
    if not url:
        url = Assistant._provide_db_url(selector)
    config = Config()
    config.set_main_option(
        "script_location", os.path.join(path, "migrations")
    )
    config.set_main_option("sqlalchemy.url", url)
    return config

provide_alembic_operations(selector: str, url: Optional[str] = None) -> Operations staticmethod

Creates Alembic's operations object. NOTE: Currently unused after switching to SQLAlchemy 2.0.31. Alembic's Operations.bulk_insert is not working at the time of this note creation. Still, bulk_insert is considered the proper way to populate tables and should be used again when this issue is resolved.

:param selector: Database name in settings.py module DB attribute :param url: SQLAlchemy Database URL :return: Alembic operations object

Source code in illuminate/manager/assistant.py
@staticmethod
def provide_alembic_operations(
    selector: str, url: Optional[str] = None
) -> Operations:
    """
    Creates Alembic's operations object.
    NOTE: Currently unused after switching to SQLAlchemy 2.0.31. Alembic's
    Operations.bulk_insert is not working at the time of this note
    creation. Still, bulk_insert is considered the proper way to populate
    tables and should be used again when this issue is resolved.

    :param selector: Database name in settings.py module DB attribute
    :param url: SQLAlchemy Database URL
    :return: Alembic operations object
    """
    if not url:
        url = Assistant._provide_db_url(selector)
    engine = create_engine(url)
    context = MigrationContext.configure(engine.connect())
    return Operations(context)

provide_context(sessions: bool = True, _labels: Optional[tuple[dict]] = None, _observers: Optional[tuple[str]] = None) -> dict[str, Union[dict[str, Union[sessionmaker[AsyncSession], InfluxDBClient]], str, list[Union[Type[Adapter], Type[Observer]]], ModuleType]] staticmethod

Creates Manager's constructor kwargs.

:param sessions: Sessions option :param _labels: Optional tuple of Observer's names or class names :param _observers: Optional tuple of Observer's names or class names :return: Manager's constractor parameters :raises BasicManagerException:

Source code in illuminate/manager/assistant.py
@staticmethod
def provide_context(
    sessions: bool = True,
    _labels: Optional[tuple[dict]] = None,
    _observers: Optional[tuple[str]] = None,
) -> dict[
    str,
    Union[
        dict[str, Union[sessionmaker[AsyncSession], InfluxDBClient]],
        str,
        list[Union[Type[Adapter], Type[Observer]]],
        ModuleType,
    ],
]:
    """
    Creates Manager's constructor kwargs.

    :param sessions: Sessions option
    :param _labels: Optional tuple of Observer's names or class names
    :param _observers: Optional tuple of Observer's names or class names
    :return: Manager's constractor parameters
    :raises BasicManagerException:
    """
    settings = Assistant._provide_settings()
    context = {
        "adapters": Assistant._collect_classes("adapters"),
        "name": settings.NAME,
        "observers": Assistant._collect_classes("observers"),
        "path": os.getcwd(),
        "settings": settings,
    }

    if sessions:
        context["sessions"] = Assistant._provide_sessions()

    if _labels:
        required_labels = {k: v for d in _labels for k, v in d.items()}
        context["observers"] = list(
            filter(
                lambda x: all(
                    x.LABELS.get(k) == v
                    for k, v in required_labels.items()
                ),
                context["observers"],
            )
        )

    if _observers:
        context["observers"] = list(
            filter(
                lambda x: x.NAME in _observers  # type: ignore
                or x.__name__ in _observers,
                context["observers"],
            )
        )

    if not context["observers"]:
        raise BasicManagerException(
            "No observers found or left after filtering"
        )

    return context

provide_models() -> list[object] staticmethod

Gathers project models.

:return: Models list

Source code in illuminate/manager/assistant.py
@staticmethod
def provide_models() -> list[object]:
    """
    Gathers project models.

    :return: Models list
    """
    settings = Assistant._provide_settings()
    return [locate(i) for i in settings.MODELS]

illuminate.manager.manager.Manager (IManager)

Manager class, executes framework's cli commands.

All public methods correspond to cli commands. It should only be instantiated when 'illuminate observe start' command is used with kwargs provided by Assistant class.

Source code in illuminate/manager/manager.py
class Manager(IManager):
    """
    Manager class, executes framework's cli commands.

    All public methods correspond to cli commands. It should only be
    instantiated when 'illuminate observe start' command is used with kwargs
    provided by Assistant class.
    """

    def __init__(
        self,
        adapters: list[Type[Adapter]],
        name: str,
        observers: list[Type[Observer]],
        path: str,
        sessions: dict[str, Union[Type[AsyncSession], InfluxDBClient]],
        settings: ModuleType,
        *args,
        **kwargs,
    ):
        """
        Manager's __init__ method.

        :param adapters: List of Adapters found in project files
        :param name: Project's name
        :param observers: List of Observers found in project files after
        filtering
        :param path: Path to project files
        :param sessions: Database sessions
        :param settings: Project's settings.py module
        """
        self.adapters = adapters
        self.name = name
        self.observers = observers
        self.path = path
        self.sessions = sessions
        self.settings = settings
        self._adapters: list[Adapter] = []
        self._observers: list[Observer] = []
        self.__observe_queue: queues.Queue = queues.Queue()
        self.__adapt_queue: queues.Queue = queues.Queue()
        self.__export_queue: queues.Queue = queues.Queue()
        self.__exported: set = set()
        self.__not_observed: set = set()
        self.__observed: set = set()
        self.__observing: set = set()

    @property
    def exported(self) -> set:
        return self.__exported

    @property
    def not_observed(self) -> set:
        return self.__not_observed

    @property
    def observed(self) -> set:
        return self.__observed

    @staticmethod
    @adapt("populate")
    def db_populate(
        fixtures: tuple[str],
        selector: str,
        url: str,
    ) -> None:
        """
        Populates database with fixtures.

        :param fixtures: Tuple of fixture files
        :param selector: Database name in settings.py module
        :param url: SQLAlchemy Database URL
        :return: None
        """
        models = Assistant.provide_models()
        table_data = {}
        for _file in fixtures:
            with open(_file, "r") as file:  # type: ignore
                content = json.load(file)  # type: ignore
                for table in content:
                    table_data.update({table["name"]: table["data"]})
        with Session(create_engine(url)) as session:
            for model in models:
                if model.__tablename__ in table_data:  # type: ignore
                    data = table_data[model.__tablename__]  # type: ignore
                    for record in data:
                        session.add(model(**record))  # type: ignore
                        logger.debug(
                            f"Row {record} added to "  # type: ignore
                            f"table buffer {model.__tablename__}"
                        )
                    session.commit()
        logger.success(f"Database {selector} populated")

    @staticmethod
    @adapt("revision")
    def db_revision(
        config: Config,
        revision: str,
    ) -> None:
        """
        Creates Alembic's revision file in migration directory.

        :param config: Alembic's configuration object
        :param revision: Parent revision

        :return: None
        """
        command.revision(
            config,
            autogenerate=True,
            head=revision,
        )
        logger.success("Revision created")

    @staticmethod
    @adapt("upgrade")
    def db_upgrade(
        config: Config,
        revision: str,
        selector: str,
    ) -> None:
        """
        Applies migration file to a database.

        :param config: Alembic's configuration object
        :param revision: Revision to apply to database
        :param selector: Database name in settings.py module
        :return: None
        """
        command.upgrade(config, revision)
        logger.success(f"Database {selector} upgraded")

    @staticmethod
    def project_setup(name: str, path: str) -> None:
        """
        Creates a project directory with all needed files.

        :param name: Project's name
        :param path: Path to project files
        :return: None
        :raises BasicManagerException:
        """
        if path != ".":
            path = os.path.join(path, name)
            if os.path.exists(path):
                raise BasicManagerException("Directory already exists")
            logger.opt(colors=True).info(
                f"Creating project directory for project "
                f"<yellow>{name}</yellow>"
            )
            os.mkdir(path)

        for _name, content in FILES.items():
            file_path = os.path.join(path, _name)
            if os.sep in _name:
                os.makedirs(
                    os.sep.join(file_path.split(os.sep)[:-1]), exist_ok=True
                )
            with open(file_path, "w") as file:
                logger.debug(f"Creating project file {_name} at {file_path}")
                file.write(f"{content.format(name=name).strip()}\n")

        logger.success(f"Project structure created for {name}")

    @staticmethod
    @show_observer_catalogue
    def observe_catalogue(**context) -> dict:
        """
        Pass context dict to illuminate.decorators.cli.show_observe_catalogue.

        :return: dict
        """
        return context

    @show_logo
    @show_info
    def observe_start(self) -> None:
        """
        Starts producer/consumer ETL process.

        :return: None
        """
        io_loop = ioloop.IOLoop.current()
        io_loop.run_sync(self._observe_start)

    async def __start(self) -> None:
        """
        Initializes Adapters and Observers and pass initial Observation
        objects to self.__observation.

        :return: None
        """
        for adapter in self.adapters:
            self._adapters.append(adapter(manager=self))
            logger.opt(colors=True).info(
                f"Adapter <yellow>{adapter.__name__}</yellow> initialized"
            )

        for observer in self.observers:
            instance = observer(manager=self)
            self._observers.append(instance)
            logger.opt(colors=True).info(
                f"Observer <yellow>{observer.__name__}</yellow> initialized"
            )
            for _observation in instance.initial_observations:
                await self.__router(_observation)

    async def __router(
        self, item: Union[Exporter, Finding, Observation]
    ) -> None:
        """
        Routes object based on its class to proper queue.

        :param item: Exporter, Finding or Observation object
        :return: None
        """
        if isinstance(item, Exporter):
            await self.__export_queue.put(item)
        elif isinstance(item, Finding):
            if inspect.stack()[1][3] != "__adaptation":
                await self.__adapt_queue.put(item)
            else:
                logger.warning(
                    f"Findings can only yield Exporters and Observations "
                    f"thus rejecting item {item}"
                )
        elif isinstance(item, Observation):
            _hash = hash(item)
            if _hash not in self.__observing:
                self.__observing.add(_hash)
                if isinstance(item, HTTPObservation) and not item.allowed:
                    return
                await self.__observe_queue.put(item)
        else:
            logger.warning(
                f"Manager rejected item {item} due to unsupported "
                f"item type {type(item)}"
            )

    @logger.catch
    async def __observe(self) -> None:
        """
        Takes Observation object from self.__observe_queue and, after delay,
        pass it to self.__observation method.

        :return: None
        """
        async for item in self.__observe_queue:
            if not item:
                return
            await asyncio.sleep(
                self.settings.OBSERVATION_CONFIGURATION["delay"]
            )
            await self.__observation_switch(item)
            logger.debug(f"Coroutine observed {item}")
            del item
            self.__observe_queue.task_done()

    async def __observe_file(self, item: FileObservation) -> None:
        """
        Calls FileObservation's observe method and pass result to resolve
        function.

        :param item: FileObservation object
        :return: None
        """
        async with item.observe(xcom=item.xcom) as result:
            await self.__observation_resolve(result, item.url)

    async def __observe_http(self, item: HTTPObservation) -> None:
        """
        Prepares HTTPObservation configuration, calls observe method and pass
        result to resolve function.

        :param item: HTTPObservation object
        :return: None
        """
        item.configuration = {
            **self.settings.OBSERVATION_CONFIGURATION["http"],
            **item.configuration,
        }
        result = await item.observe(xcom=item.xcom)
        await self.__observation_resolve(result, item.url)

    async def __observe_sql(self, item: SQLObservation) -> None:
        """
        Calls SQLObservation's observe method and pass result to resolve
        function.

        :param item: SQLObservation object
        :return: None
        """
        result = await item.observe(self.sessions[item.url], xcom=item.xcom)
        await self.__observation_resolve(result, f"{item.url}:{item.query}")

    async def __observe_splash(self, item: SplashObservation) -> None:
        """
        Prepares SplashObservation configuration, calls observe method and pass
        result to resolve function.

        :param item: SplashObservation object
        :return: None
        """
        item.configuration = {
            **self.settings.OBSERVATION_CONFIGURATION["splash"],
            **item.configuration,
        }
        result = await item.observe(
            self.settings.OBSERVATION_CONFIGURATION["http"], xcom=item.xcom
        )
        await self.__observation_resolve(result, item.url)

    async def __observation_resolve(
        self, result: Union[None, Result], url: str
    ):
        """
        Resolves Observation.

        :param result: Result
        :param url: URL str
        :return: None
        """
        if not result:
            self.__not_observed.add(url)
            return
        self.__observed.add(url)
        try:
            if inspect.isawaitable(result):
                await result
            if inspect.isasyncgen(result):
                async for _item in result:
                    await self.__router(_item)
        except Exception:  # noqa
            stack = traceback.format_exc().strip().replace("<", "\\<")
            logger.opt(colors=True).warning(
                "Observation callback throws the following exception\n"
                f"<red>{stack}</red>"
            )

    async def __observation_switch(self, item: Observation) -> None:
        """
        Passes Observation object to proper method based on its class.

        :param item: Observation object
        :return: None
        """
        if isinstance(item, HTTPObservation):
            if isinstance(item, SplashObservation):
                await self.__observe_splash(item)
            else:
                await self.__observe_http(item)
        elif isinstance(item, FileObservation):
            await self.__observe_file(item)
        elif isinstance(item, SQLObservation):
            await self.__observe_sql(item)
        else:
            logger.warning(
                f"Observation of a type {type(item)} is not supported"
            )

    @logger.catch
    async def __adapt(self) -> None:
        """
        Takes Finding object from self.__adapt_queue and pass it to
        self.__adaptation method.

        :return: None
        """
        async for item in self.__adapt_queue:
            if not item:
                return
            await self.__adaptation(item)
            logger.debug(f"Coroutine adapted {item}")
            del item
            self.__adapt_queue.task_done()

    async def __adaptation(self, item: Finding) -> None:
        """
        Passes Finding object to Adapter's instance adapt method.

        :param item: Finding object
        :return: None
        """
        for adapter in self._adapters:
            for subscriber in adapter.subscribers:
                if isinstance(item, subscriber):
                    try:
                        items = adapter.adapt(item)
                        async for _item in items:  # type: ignore
                            await self.__router(_item)
                    except Exception as exception:
                        logger.warning(f"{self}.adapt() -> {exception}")

    @logger.catch
    async def __export(self) -> None:
        """
        Takes Exporter object from self.__export_queue and pass it to
        self.__exportation method.

        :return: None
        """
        async for item in self.__export_queue:
            if not item:
                return
            await self.__export_to(item)
            logger.debug(f"Coroutine exported {item}")
            del item
            self.__export_queue.task_done()

    async def __export_to(self, item: Exporter) -> None:
        """
        Passes Exporter object to proper method based on its class.

        :param item: Exporter object
        :return: None
        """
        if isinstance(item, (InfluxDBExporter, SQLExporter)):
            await self.__export_to_database(item)

    async def __export_to_database(
        self, item: Union[InfluxDBExporter, SQLExporter]
    ) -> None:
        """
        Acquires database session based on Exporter's attributes and pass it to
        Exporter's export method.

        :param item: InfluxDBExporter or SQLExporter object
        :return: None
        """
        try:
            session = self.sessions[item.name]
            await item.export(session)
            self.__exported.add(item)
        except BasicExporterException:
            pass
        except KeyError:
            logger.warning(f"Database {item.name} of is not found in context")

    @logger.catch
    async def _observe_start(self) -> None:
        """
        Starts producer/consumer ETL process.

        :return: None
        """
        self.adapters.sort(key=lambda x: x.priority, reverse=True)

        _adapters = self.settings.CONCURRENCY["adapters"]
        _exporters = self.settings.CONCURRENCY["exporters"]
        _obs = self.settings.CONCURRENCY["observations"]

        adapters = gen.multi([self.__adapt() for _ in range(_adapters)])
        exporters = gen.multi([self.__export() for _ in range(_exporters)])
        observations = gen.multi([self.__observe() for _ in range(_obs)])

        await self.__start()
        await self.__observe_queue.join()
        await self.__adapt_queue.join()
        await self.__export_queue.join()

        for _ in range(_obs):
            await self.__observe_queue.put(None)
        for _ in range(_adapters):
            await self.__adapt_queue.put(None)
        for _ in range(_exporters):
            await self.__export_queue.put(None)

        await adapters
        await exporters
        await observations

        for session in self.sessions:
            if isinstance(self.sessions[session], InfluxDBClient):
                await self.sessions[session].close()  # type: ignore

__init__(self, adapters: list[Type[Adapter]], name: str, observers: list[Type[Observer]], path: str, sessions: dict[str, Union[Type[AsyncSession], InfluxDBClient]], settings: ModuleType, *args, **kwargs) special

Manager's init method.

:param adapters: List of Adapters found in project files :param name: Project's name :param observers: List of Observers found in project files after filtering :param path: Path to project files :param sessions: Database sessions :param settings: Project's settings.py module

Source code in illuminate/manager/manager.py
def __init__(
    self,
    adapters: list[Type[Adapter]],
    name: str,
    observers: list[Type[Observer]],
    path: str,
    sessions: dict[str, Union[Type[AsyncSession], InfluxDBClient]],
    settings: ModuleType,
    *args,
    **kwargs,
):
    """
    Manager's __init__ method.

    :param adapters: List of Adapters found in project files
    :param name: Project's name
    :param observers: List of Observers found in project files after
    filtering
    :param path: Path to project files
    :param sessions: Database sessions
    :param settings: Project's settings.py module
    """
    self.adapters = adapters
    self.name = name
    self.observers = observers
    self.path = path
    self.sessions = sessions
    self.settings = settings
    self._adapters: list[Adapter] = []
    self._observers: list[Observer] = []
    self.__observe_queue: queues.Queue = queues.Queue()
    self.__adapt_queue: queues.Queue = queues.Queue()
    self.__export_queue: queues.Queue = queues.Queue()
    self.__exported: set = set()
    self.__not_observed: set = set()
    self.__observed: set = set()
    self.__observing: set = set()

db_populate(fixtures: tuple[str], selector: str, url: str) -> None staticmethod

Populates database with fixtures.

:param fixtures: Tuple of fixture files :param selector: Database name in settings.py module :param url: SQLAlchemy Database URL :return: None

Source code in illuminate/manager/manager.py
@staticmethod
@adapt("populate")
def db_populate(
    fixtures: tuple[str],
    selector: str,
    url: str,
) -> None:
    """
    Populates database with fixtures.

    :param fixtures: Tuple of fixture files
    :param selector: Database name in settings.py module
    :param url: SQLAlchemy Database URL
    :return: None
    """
    models = Assistant.provide_models()
    table_data = {}
    for _file in fixtures:
        with open(_file, "r") as file:  # type: ignore
            content = json.load(file)  # type: ignore
            for table in content:
                table_data.update({table["name"]: table["data"]})
    with Session(create_engine(url)) as session:
        for model in models:
            if model.__tablename__ in table_data:  # type: ignore
                data = table_data[model.__tablename__]  # type: ignore
                for record in data:
                    session.add(model(**record))  # type: ignore
                    logger.debug(
                        f"Row {record} added to "  # type: ignore
                        f"table buffer {model.__tablename__}"
                    )
                session.commit()
    logger.success(f"Database {selector} populated")

db_revision(config: Config, revision: str) -> None staticmethod

Creates Alembic's revision file in migration directory.

:param config: Alembic's configuration object :param revision: Parent revision

:return: None

Source code in illuminate/manager/manager.py
@staticmethod
@adapt("revision")
def db_revision(
    config: Config,
    revision: str,
) -> None:
    """
    Creates Alembic's revision file in migration directory.

    :param config: Alembic's configuration object
    :param revision: Parent revision

    :return: None
    """
    command.revision(
        config,
        autogenerate=True,
        head=revision,
    )
    logger.success("Revision created")

db_upgrade(config: Config, revision: str, selector: str) -> None staticmethod

Applies migration file to a database.

:param config: Alembic's configuration object :param revision: Revision to apply to database :param selector: Database name in settings.py module :return: None

Source code in illuminate/manager/manager.py
@staticmethod
@adapt("upgrade")
def db_upgrade(
    config: Config,
    revision: str,
    selector: str,
) -> None:
    """
    Applies migration file to a database.

    :param config: Alembic's configuration object
    :param revision: Revision to apply to database
    :param selector: Database name in settings.py module
    :return: None
    """
    command.upgrade(config, revision)
    logger.success(f"Database {selector} upgraded")

observe_catalogue(**context) -> dict staticmethod

Pass context dict to illuminate.decorators.cli.show_observe_catalogue.

:return: dict

Source code in illuminate/manager/manager.py
@staticmethod
@show_observer_catalogue
def observe_catalogue(**context) -> dict:
    """
    Pass context dict to illuminate.decorators.cli.show_observe_catalogue.

    :return: dict
    """
    return context

observe_start(self) -> None

Starts producer/consumer ETL process.

:return: None

Source code in illuminate/manager/manager.py
@show_logo
@show_info
def observe_start(self) -> None:
    """
    Starts producer/consumer ETL process.

    :return: None
    """
    io_loop = ioloop.IOLoop.current()
    io_loop.run_sync(self._observe_start)

project_setup(name: str, path: str) -> None staticmethod

Creates a project directory with all needed files.

:param name: Project's name :param path: Path to project files :return: None :raises BasicManagerException:

Source code in illuminate/manager/manager.py
@staticmethod
def project_setup(name: str, path: str) -> None:
    """
    Creates a project directory with all needed files.

    :param name: Project's name
    :param path: Path to project files
    :return: None
    :raises BasicManagerException:
    """
    if path != ".":
        path = os.path.join(path, name)
        if os.path.exists(path):
            raise BasicManagerException("Directory already exists")
        logger.opt(colors=True).info(
            f"Creating project directory for project "
            f"<yellow>{name}</yellow>"
        )
        os.mkdir(path)

    for _name, content in FILES.items():
        file_path = os.path.join(path, _name)
        if os.sep in _name:
            os.makedirs(
                os.sep.join(file_path.split(os.sep)[:-1]), exist_ok=True
            )
        with open(file_path, "w") as file:
            logger.debug(f"Creating project file {_name} at {file_path}")
            file.write(f"{content.format(name=name).strip()}\n")

    logger.success(f"Project structure created for {name}")