from abc import ABC, abstractmethod
from typing import Type, Any, NamedTuple, Union, Iterable

# TODO(baryluk): It would be nice to be able to set constraints in the base class.
# i.e. that memory is an in, and >1MB, in base class, and this check
# should still run even if the property is overriden in the subclass.


# Note: This only will affects 'from yacl_base import *'.
# The 'import yacl_base; yacl_base._some_other_thing' will still work.
# The 'from yacl_base import _some_other_things' will also work.
# So things in __all__ are more of an explicit public API, but other
# variables and objects will still be accessible (for example in yacl_eval
# and in unittests).
__all__ = [
  'ABC',  # Export that one too.
  'YaclBaseClass', 'GenericMixin',
  'no_default',
  'external', 'external_primary', 'required',
  'deepcopy',
  'appending', 'adding', 'adding_kvs', 'merging',
  'get', 'get_full_context', 'get_attribs',
  'deepcopy',
  'start_time',
]

try:
    from beartype import beartype
except ModuleNotFoundError:
    beartype = lambda x: x  # Identity decorator.


def export(fn):
    import sys
    mod = sys.modules[fn.__module__]
    if not hasattr(mod, '__all__'):
        mod.__all__ = []
    mod.__all__.append(fn.__name__)
    return fn



# This is for internal debuging of various aspects of yacl and tracing.
_DEBUG = False

_major_debug_indent = 0
_minor_debug_indent = 0


# Used internally, when _DEBUG == True.
def _debug_print(*args):
    global _major_debug_indent, _minor_debug_indent

    import sys

    for i in range(_major_debug_indent * 2 + _minor_debug_indent):
        print("    ", end="", file=sys.stderr)

    print(*args, file=sys.stderr)


@export
class YaclBaseClass(ABC):
    def __init__(self):
        """Dummy constructor. Raises exception. YaclBaseClasses and their subclasses
           are never instantiated directly or indirectly!"""
        # ABC only protects against instantiating the YaclBaseClass itself, not subclasses.
        # So add a constructor, that hopefully will make us catch such sitation too.
        raise Exception("Instatiating any sub-class of YaclBaseClass is not allowed")

    # You can override this to emit results conditionally. This is helpful for having
    # many different types of objects in single context.
    def _enable():
        return True

    #@staticmethod
    #def _execute(result):
    #     # custom application specific output. i.e. perform file actions defined in result,
    #     # sshs to a server and do something, or convert result to json or protocol buffer.
    #     # This is the only method that is allowed to do IO and affect the world.
    #     # The result is a dict with all attributes evaluated to specific values.
    #     # It will not have private attribues (starting with _).
    #     # A special field '_type' will reference a the most dervived class that the result
    #     # is coming from. In general it should not be used to influence the logic, it is
    #     # there only for debugging or making error messages more clean.
    #     # Note that _execute can be overriden in sub-sub-classes. However, they are not
    #     # allowed to use get() or get('attrib_name'). And they can't access their parent
    #     # class _execute methods (because super() will not work). They are executed
    #     # as a standard Python static method with no extra yacl lookup logic.
    #     # A set of common execute methods is available in yacl_execute provided using factories.
    #     # Example: _execute = yacl_execute.json_serializer()
    #     # Some executors are parametrized and provides ways to change their behaviour (i.e. schema validation, output filepath, to use pretty-printing or not, etc).
    #     # Note, in the future it is expected that yacl will first evaluate all
    #     # the objects in go, gather all results and their types, then perform
    #     # _execute on them in parallel (with some limits defined globally or
    #     # using command line flags).

@export
class GenericMixin(ABC):
    """This is only needed for 'generic' mixins. These are mixins that are expected
    to be used with many different unrelated YaclBaseClass sub-classes.
    For mixins that expect to be (optionally) be mixed only in one specifc YaclBaseClass,
    don't use this class, and instead inherit directly from the expected superclass,
    and add ABC to list its inheritance list explicitly.
    """
    def __init__(self):
        """Dummy constructor. Raises exception. YaclBaseClasses and their subclasses
           are never instantiated directly or indirectly!"""
        # ABC only protects against instantiating the GenericMixin itself, not subclasses.
        # So add a constructor, that hopefully will make us catch such sitation too.
        raise Exception("Instatiating any sub-class of GenericMixin is not allowed")

# Similar to 'required()'
@export
def no_default():
    """Similar to required(). Useful for documenting attributes.

       Needs to be overriden in a subclass
    """
    global current_evaluated_attrib_name
    assert False, "This attribute ({_current_full_eval_context.current_evaluated_attrib_name[-1]}) must be defined"

# eval_context:
class FullEvalContext:
    def __init__(self):
        self.current_full_context = None
        self.current_most_derived_class = None
        self.current_evaluated_class = None
        self.current_evaluated_context = None   # One context item.
        self.current_evaluated_attrib_name = []  # stack
        self.memoized_attrib_values = {}  # Cache. # In case we evaluate same attrib many times, from different places. # Cleaned after each context item.

# TODO(baryluk): We can actually make it None. And initalize it in yacl_eval._evaluate()
_current_full_eval_context = FullEvalContext()


class EvalStackElement(NamedTuple):
    most_derived_class: type
    current_evaluated_class: type
    qualified_get: bool
    attrib_name: str
    extra_context: dict[str, Any]
    major_debug_indent: int
    minor_debug_indent: int


_total_eval_stack: list[EvalStackElement] = []

def _total_eval_stack_push(*, attrib_name, extra_context={}):
    global _total_eval_stack, _current_full_eval_context, _major_debug_indent, _minor_debug_indent
    eval_stack_element = EvalStackElement(_current_full_eval_context.current_most_derived_class,
                                          _current_full_eval_context.current_evaluated_class,
                                          attrib_name is not None,
                                          _current_full_eval_context.current_evaluated_attrib_name[-1],
                                          extra_context,
                                          _major_debug_indent,
                                          _minor_debug_indent)

    _total_eval_stack.append(eval_stack_element)

def _total_eval_stack_pop():
    global _total_eval_stack
    return _total_eval_stack.pop()



#@export

class FieldValidator(ABC):
    def __init__(self, type_, validator, frameinfo, formatter = None):
        self.type_ = type_
        self.validator = validator
        self.frameinfo = frameinfo
        self.formatter = formatter


class External(FieldValidator):
    def __init__(self, type_, validator, frameinfo):
        super().__init__(type_, validator, frameinfo)


class ExternalPrimary(FieldValidator):
    def __init__(self, type_, validator, frameinfo):
        super().__init__(type_, validator, frameinfo)


from inspect import currentframe, getframeinfo


@export
@beartype
def external(type_: type = object, validator = lambda x: True):
  frameinfo = getframeinfo(currentframe().f_back)
  return External(type_, validator, frameinfo)


@export
@beartype
def external_primary(type_: type = object, validator = lambda x: True):
    frameinfo = getframeinfo(currentframe().f_back)
    return ExternalPrimary(type_, validator, frameinfo)


class Required(FieldValidator):
    def __init__(self, type_: type, validator, frameinfo):
        super().__init__(type_, validator, frameinfo)


# Similar to external, but is supposed to be provided usually in the class, not in the context.
@export
@beartype
def required(type_ = object, validator = lambda x: True):
    frameinfo = getframeinfo(currentframe().f_back)
    return Required(type_, validator, frameinfo)


class _NoDefault:  # sentinle. This is because we want to allow 'None' as default or initial values.
    pass


# Selective import as a microoptimization, to do one less lookup.
from copy import deepcopy as copy_deepcopy
from marshal import loads as marshal_loads, dumps as marshal_dumps


@export
@beartype
def deepcopy(x):
    return marshal_loads(marshal_dumps(x))
    # copy.deepcopy is a bit slower compared to marshal for four reasons:
    # 1) It is implemented in Python, not C.
    # 2) It has specialization for various datatypes, and custom __copy__ in classes.
    # 3) It currently copy lists using `.append()`, instead of preallocating a proper length list.
    # 4) It supports recurisve and self-referncing data structures, and does memoization in a dict to do that.
    # return copy_deepcopy(x)


@export
@beartype
def appending(l2 : list):
    """A function that returns a callable that can be assigned to attribute.
       This function will append a list "l2" to the list returned by the
       next class in mro for the same attribute it was assigned to.

       It is just like adding, just for lists.

       Example:

       ```python3
       class A(YaclBaseClass):
           a = ["foo", "bar"]

       class B(A):
           a = appending(["baz"])
       ```

       Evaluating B will produce attribute "a" with value `["foo", "bar", "baz"]`.

       If attribute is not defined in any next class in mro, the evaluation
       will fail.

       TODO(baryluk): Consider adding `default_value` paramter. But the ordering
       will be confusing: a = appending(["baz"], ["foo"]), will produce:
       `["foo", "baz"]`.
    """
    def f():
        if _DEBUG: _debug_print(f"appending: {l2}.  {_current_evaluated_class=}")
        l0 = get()
        if len(l2) == 0:
            return l0
        else:
            return l0 + l2

    return f


@export
@beartype
def adding(s2 : set):
    """A function that returns a callable that can be assigned to attribute.
       This function will add a set "s2" to the set returned by the
       next class in mro for the same attribute it was assigned to.
       If s2 is a list, it will be converted to a set first (with duplicates
       removed). In such case elements of the list must be hashable.

       It is just like appending, just for sets.

       Example:

       ```python3
       class A(YaclBaseClass):
           a = {"foo", "bar"}

       class B(A):
           a = adding({"cis"})
       ```

       Evaluating B will produce attribute "a" with value `{"bar", "cis", "foo"}`.

       If attribute is not defined in any next class in mro, the evaluation
       will fail.

       TODO(baryluk): Consider adding `default_value` paramter.
    """
    def f():
        if _DEBUG: _debug_print(f"adding: {s2}")
        if isinstance(s2, list):
            s0 = get()
            if len(s2) == 0:
                return s0
            s1 = deepcopy(s0)
            for e in s2:
                s1.add(e)
            return s1
        else:
            assert isinstance(s2, set)
            return get().union(s2)
            # return get() | s2  # Same works for long time in Python.
    return f


@export
@beartype
def adding_kvs(d2 : dict = {}, **kwargs):
    """A function that returns a callable that can be assigned to attribute.
       This function will add dictionary items and kwargs to the dict (or
       dict-like object) returned by the next class in mro for the same
       attribute it was assigned to.

       Example:

       ```python3
       class A(YaclBaseClass):
           a = {"foo": 5, "bar": 42}

       class B(A):
           a = adding({"cis": 32}, foo=3, zoo=100)
       ```

       Evaluating B will produce attribute "a" with value
       `{"bar": 42, "cis": 32", "foo": 3, "zoo": 100}`.

       If attribute is not defined in any next class in mro, the evaluation
       will fail (it does not provide empty starting dict).
    """
    def f():
        d0 = get()
        if len(d2) == 0 and len(kwargs) == 0:
            return d0
        # return {**d0, **d2}  # Works for some time, but it esoteric a bit. Also if d0 has type annotations or a subclass of 'dict', this information will be lost.
        # return dict(d0, **d2)  # Slightly better, but also esoteric, and requires d2 to have string keys. (It doesn't work with keys being integers or tuples for example).
        d3 = d0 | d2  # Since Python 3.9.0 for dicts.
        if len(kwargs):
            return d3 | kwargs
        return d3

        ##d1 = deepcopy(d0)
        #d1 = dict(d0)  # Make a shallow copy.
        #d1.update(d2)
        #for k, v in kwargs.items():
        #  d1[k] = v
        #return d1
    return f


@export
@beartype
def merging(d2 : dict = {}):
    """Similar to adding_kvs, but works with deeply nested structs and lists.

       NOT IMPLEMENTED
    """
    assert False, "not fully implemented"
    def f():
        d0 = get()
        if len(d2) == 0:
            return d0
        d1 = deepcopy(d0)
        not_implemented
        return d1
    return f


@export
@beartype
def getting(default_value, /):
    return lambda: get(default_value=default_value)


@export
@beartype
def defaulting(default_value, /):
    return lambda: get(default_value=default_value)


from typing import Type


class LazyEvaluateClass:
    pass


@export
@beartype
def include(cls_ : Type[YaclBaseClass], *, extra_context : dict = {}):
    """Can be used as a value of attribute or as element of list, set (if hashable),
       or as a values in dicts, or recursive structures of these types.
       i.e. list of dicts with value being sets of lists ...

       Causes yacl class `cls_` to be evaluated in the same context as the
       current evaluation context item (and extra context if provided which
       can provide new items or override existing ones). If extra_context
       is provided and non-empty, the a deepcopy of context item will be
       made for safety during evaluation of the requested class (we call
       this a nested evaluation).

       Nexted evaluated classes are evaluated just the same as normal classes,
       (including their _enable logic), but their _execute is not performed
       (they are not serialized for example), however type of the class
       and _execute are saved in the result.

       After evaluation the resulting object will be a string-keyed dict (just
       like evaluation to `yacl_eval.go`) possibly with complex values,
       and WITH `__type` and `__execute` fields present too. Final serializators
       are expected to remove these fields, but they are present for debugability
       and discoverability purposes.

       Note: In the future, instead of a dict/list/set, a more complex object
       will be returned that behaves like dict, but doesn't `__type` and `__execute`
       directly in iteration APIs (`.keys()`, `.items()`), but can still be
       accessed by introspecting a type and calling proper API directly. This
       way the returned objec is more transparent, and can also be serialized
       using a proper intended executeor later (possibly under control of the
       user code).

       Example:

       ```python3
       class A(YaclBaseClass):
           a = 5
           def b(): return get_from_context("b") * 1000 + get('a')

       class B(A):
           a = 10


       class Z(YaclBaseClass):
           bar = 0
           foo = [include(A, {'b': 1}], include(B, {'b': 42}), "something else"]
       ```

       This will return (assuming empty context item):

       ```python3
       {"bar": 0,
        "foo", [{"a":5, "b": 1005},
                {"a":10, "b": 42010},
                "something else"
               ],
        '__type':Z
       }
       ```

       `include` can be called programatically from inside `def` attributes
       too, however the result of the evluation will not be available in
       the `def` function. The result of `include` must be returned back
       from attribute to yacl evaluator. If you wish to do include, and
       then inspect or modify results, do a two level processing:

       ```python3
       class A(YaclBaseClass):
           a = 5
           def b(): return get_from_context("b") * 1000 + get('a')

       class B(A):
           a = 10


       class Z(YaclBaseClass):
           def _foo_raw():
               r = []
               for i in range(10):
                   r += [include(A, {'b': 1 + i}], include(B, {'b': 42 + i})]

           def foo():
               evaluated_foo = get('foo_raw')
               # ... copy, filter, modify (after deepcopy), post-process.
               return evaluated_foo  # or something else.
       ```

       Of course include can be used recursevily. Evaluated classes only
       have access to the context and extra context. They don't have access
       to the attributes or types that are defined in the caller class.
       This promotes separation of code and data, as well reusability.
    """
    assert issubclass(cls_, YaclBaseClass)
    assert isinstance(extra_context, dict)
    extra_context_ = extra_context

    class LazyEvaluate(LazyEvaluateClass):
        cls = cls_
        extra_context = extra_context_

    return LazyEvaluate


# TODO(baryluk): Recursive calls are expected to be frequent, and complex. So add
# a recrusion level counter (or maybe use the len(current_evaluated_attrib_name)
# as indent. This would help, also because the effects will appear to be in
# reverse order, because we need to call the base classes first when self-referencing using get().


def _get_uncached(attrib_name : str = None, default_value = _NoDefault):
  """Internal function that implements everything behind `get` function.

  The `get` is just simplish wrapper around `_get_uncached` that does extra
  caching and minor checks.
  """
  global _current_full_eval_context, _DEBUG
  global _major_debug_indent, _minor_debug_indent

  assert _current_full_eval_context.current_most_derived_class is not None
  assert _current_full_eval_context.current_evaluated_class is not None
  assert _current_full_eval_context.current_evaluated_context is not None

  saved_current_evaluated_class = _current_full_eval_context.current_evaluated_class
  saved_current_most_derived_class = _current_full_eval_context.current_most_derived_class

  original_attrib_name = attrib_name

  found_base_class = None

  # TODO(baryluk): Using inner function is probably not the best for performance.
  # Either inline it manually, or put on the module level, and pass all paramters
  # it might be accessing, and write results using normal assignment.

  # this is essentially just like getattr(super(_current_full_eval_context.current_evaluated_class, _current_full_eval_context.current_evaluated_class), base_attrib_name)
  # super(A, mro) returns a proxy object, that does a resolution for each accessed attribute, starting with a class right after A in mro, in MRO order.
  # super() in method classes, is equivalent to super(ThisClassType, self), but the 2-argument explicit version can be used in free code outside of classes or methods too.
  # There reason we don't use super directly, is because we actually need to know which class we resolved into.
  # This is to support chaining of get() with no explicit attrib_name, as each such get is relative to the class it originate from, not top of the class hierarchy.
  def _custom_mro_lookup(attrib_name):
    # Retrive the value from the base class by manually traversing mro.

    # if _DEBUG: _debug_print(f"Evaluating attrib_name '{attrib_name}' using custom mro order")
    # if _DEBUG: _debug_print(f"   mro: {_current_full_eval_context.current_evaluated_class.mro()}")
    # if _DEBUG: _debug_print(f"   current_eval_class: {_current_full_eval_context.current_evaluated_class}")

    already_there = False
    found_base_class = None
    mro = _current_full_eval_context.current_most_derived_class.mro()
    mro_len = len(mro)
    i = 0
    while i < mro_len:
      base_class = mro[i]
      if base_class is _current_full_eval_context.current_evaluated_class:
        if _DEBUG: _debug_print(f"   Skipping same class {base_class} we started from")
        already_there = True
        i += 1
        continue
      if not already_there:
        if _DEBUG: _debug_print(f"   Skipping class {base_class} before the one we started")
        i += 1
        continue
      if _DEBUG: _debug_print(f"   Found class next after current: {base_class}, trying")
      if hasattr(base_class, attrib_name):
        if _DEBUG: _debug_print(f"      it has searched attribute! use it and break?")

        # hasattr will return True, even if the attrib_name is in not in the base_class
        # itself but in its parent. This is ok for the top level and base level classes,
        # but because we later modify the current_evaluate_class, this will mess up
        # when using mixins. For example class A(Mixin1, Mixin2): pass
        # and both Mixin1 and Mixin2 define it, we will find the attrib first
        # in Mixin1, but then when evaluating attrib in Mixin1, we will not
        # find attrib in Mixin2 (because Mixin2 doesn't inherit from Mixin1,
        # or vice-versa).
        # So we need to build a list of (class, own_attrib, attrib) for every class,
        # in the mro order, where own_attrib tells us if it was defined in this
        # class directly, or in some of its parents.
        # This can be done in one of two ways:
        #   - create a custom metaclass and derive YaclBaseClass from it,
        #     this way we can build this structure when constructing classes.
        #   - try to detect which attribs are defined where dynamically
        #     during lookup.
        #
        # A first method is not too hard and can be done precisely,
        # but uses complex Python techniques.
        # Second method is to is more tricky, but doable. Either
        # the attrib we are considering is a value or a function.
        # For functions it is either defined in place (using def),
        # or coming 'dynamically' from other place (like 'appending()').
        # In both cases these functions will be unique, so we can use
        # 'is'. If they compare same between classes, it doesn't make
        # too much difference. Their effect can be considerd equal.
        # (Share a literal function, with no parameters, like 'increment'
        #  would be broken tho, but that is good price to pay).
        # For values, well, if they are reused literally the same
        # object, again it doesn't matter. Otherwise if they are different
        # objects, we can assume they are different, even if they are the
        # same.

        possible_attrib = getattr(base_class, attrib_name)
        # Traverse rest of MRO, and see if it is defined higher in the
        # inheritance chain, if yes, then it is not actually defined in
        # 'base_class'.

        own_attrib = True
        j = i + 1
        while j < mro_len:
          base_class2 = mro[j]
          if _DEBUG: _debug_print(f"          Checking next class {base_class2} if it has this attribute and it is the same one")
          if hasattr(base_class2, attrib_name):
            if getattr(base_class2, attrib_name) is possible_attrib:
              own_attrib = False
              break
            # The base_class2 has attrib_name, but it is different, we can also break then.
            break
          j += 1

        if not own_attrib:
          # We found it, but it is the same function / value, as one found higher
          # in the inheritance chain, so don't use it.
          if _DEBUG: _debug_print(f"          Do not use it because it is actually defined in some parent class of the currently evaluated class")
          i = j  # Skip appropiate number of classes. This is to avoid O(N^2) lookups.
          continue
        else:
          if _DEBUG: _debug_print(f"          Use it, it is this class own attribute")

        attrib = possible_attrib
        found_base_class = base_class
        return found_base_class, attrib
        break
      else:
        if _DEBUG: _debug_print(f"      it does not have searched attribute! continue search")
        i += 1
    return None, None

  # Similar to '_custom_mro_lookup'. But ignores the value of '_current_evaluated_class',
  # starts the lookup from the most derived class. We still need to do manual mro
  # iteration, because we want not just the value of the attribute, but also to know
  # in which class exactly it was defined (so if the attrib is a function,
  # and it uses 'get()', we can properly handle it later using '_custom_mro_lookup'.
  def _custom_mro_lookup_rooted(attrib_name):
    found_base_class = None
    mro = _current_full_eval_context.current_most_derived_class.mro()
    for i, base_class in enumerate(mro):
      if _DEBUG: _debug_print(f"   Found next after current, using: {base_class}, trying")
      if hasattr(base_class, attrib_name):
        if _DEBUG: _debug_print(f"      it has searched attribute! use it and break")
        possible_attrib = getattr(base_class, attrib_name)

        own_attrib = True
        for j, base_class2 in enumerate(mro[i + 1:]):
          if _DEBUG: _debug_print(f"          Checking next class {base_class2} if it has this attribute and it is the same one")
          if hasattr(base_class2, attrib_name):
            if getattr(base_class2, attrib_name) is possible_attrib:
              own_attrib = False
              break
            break

        if not own_attrib:
          if _DEBUG: _debug_print(f"          Do not use it because it is actually defined in some parent class of the currently evaluated class")
          continue
        else:
          if _DEBUG: _debug_print(f"          Use it, it is this class own attribute")

        attrib = possible_attrib
        found_base_class = base_class
        return found_base_class, attrib
        break
      else:
        if _DEBUG: _debug_print(f"      it does not have searched attribute! continue search")
        i += 1
    return None, None

  if attrib_name is None:   # get() form.
    base_attrib_name = _current_full_eval_context.current_evaluated_attrib_name[-1]
    assert isinstance(base_attrib_name, str)

    attrib_name = base_attrib_name

    attrib = None

    if _DEBUG:
        _debug_print(f"doing unqualified lookup for '{attrib_name}'.")
        _debug_print(f"{_current_full_eval_context.current_evaluated_class         =}")
        _debug_print(f"{_current_full_eval_context.current_evaluated_class.mro()   =}")
        _debug_print(f"{_current_full_eval_context.current_most_derived_class.mro()=}")

    found_base_class, attrib = _custom_mro_lookup(base_attrib_name)
    # assert attrib is not None, f"attrib {attrib_name} not defined in any base classes of {current_evaluated_class}"
    # (TODO: maybe if attrib is None, lookup in context? but I think it shouldn't be allowed, as this makes use of '= external' optional and implicit, which I think would be a bad practice)
    # if attrib was found, evaluate it as usual (including lambda / func evals).
    _current_full_eval_context.current_evaluated_attrib_name.append(base_attrib_name)
  else:  # get('actual_name') form.
    assert not attrib_name.startswith('__'), f"Using get with attribute starting with __ is not allowed. Got: {attrib_name}"
    assert isinstance(attrib_name, str)

    # TODO(baryluk): Lookup for '_name', if get('name') was used, but 'name' attribute doesn't exist.
    if not attrib_name.startswith('_') and not hasattr(_current_full_eval_context.current_most_derived_class, attrib_name) and hasattr(_current_full_eval_context.current_most_derived_class, '_' + attrib_name):
      attrib_name = '_' + attrib_name

    if hasattr(_current_full_eval_context.current_most_derived_class, attrib_name) and hasattr(_current_full_eval_context.current_most_derived_class, '_' + attrib_name):
      assert False, f"Defining both {attrib_name} and _{attrib_name} not allowed."

    _current_full_eval_context.current_evaluated_attrib_name.append(attrib_name)  # append in case there is call to 'get(other_attrib_name') or 'get()' if attrib is a FunctionType.

    if _DEBUG: _debug_print(f"doing qualified lookup for '{attrib_name}'.\n   {_current_full_eval_context.current_evaluated_class=}\n   {_current_full_eval_context.current_evaluated_class.mro()   =}\n   {_current_full_eval_context.current_most_derived_class.mro()=}")

    # TODO(baryluk): Problem with this is that, even if the current_evaluate_class has no attrib_name itself,
    # this will resolve to something.
    # We need to redo the mro similar to above, and remember the class.
    #attrib = getattr(current_evaluated_class, attrib_name, None)
    found_base_class, attrib = _custom_mro_lookup_rooted(attrib_name)

  import types

  try:
    if found_base_class is not None:
      _current_full_eval_context.current_evaluated_class = found_base_class   # will restore in 'finally' (even on return)

    if not hasattr(_current_full_eval_context.current_evaluated_class, attrib_name):  # We do allow value 'None', which is different than missing attribute (or bug).
      if attrib_name in _current_full_eval_context.current_evaluated_context:
        value = _current_full_eval_context.current_evaluated_context[attrib_name]
        if _DEBUG: _debug_print(f"  Using value from the context instead: {value}")
        return value
      else:
        if default_value is not _NoDefault:
          if _DEBUG: _debug_print(f"  Using default value argument: {default_value}")
          return default_value
        return getattr(_current_full_eval_context.current_evaluated_class, attrib_name)  # This will generate AttributeError.

    # _debug_print(attrib_name, "value:", attrib, "type:", type(attrib))

    # TODO(baryluk): Check base classes if it is marked as required or something.

    if attrib is None:
      return None

    if isinstance(attrib, External) or isinstance(attrib, ExternalPrimary):
      # _debug_print("attrib:", attrib, "is external, checking context:", _current_full_eval_context.current_evaluated_context)

      attrib_value = _current_full_eval_context.current_evaluated_context[attrib_name]

      if attrib.validator:
        assert attrib.validator(attrib_value), f"attrib {attrib_name} with value {attrib_value} failed validation specified in the base class"

      #if attrib.formatter:
      #  

      return attrib_value

    # attrib = getattr(_current_full_eval_context.current_evaluated_class, attrib_name, None)

    # TODO(baryluk): We need to check entire MRO (including the base class),
    # to check for validators. The field maybe was required, or external,
    # and was provided, but validators might still need to be run.

    # We allow forms of value, or function. In case of function, eval it (and handle reentry to 'get' if needed).
    if isinstance(attrib, types.FunctionType):
        if _DEBUG: _debug_print(f"   attrib: {attrib} - evaluate it")
        _minor_debug_indent += 1

        _total_eval_stack_push(attrib_name=original_attrib_name)
        attrib = attrib()
        _total_eval_stack_pop()
        _minor_debug_indent -= 1

    # Does a deep iteration over lists, dicts and sets, and values.
    # If any value or element of the container is a subclass of
    # LazyEvaluateClass (which is usually a result of use of 'include'
    # function), evaluate these classes too.
    def _recursive_nested_evaluator(start_attrib):
      if type(start_attrib) is type and issubclass(start_attrib, YaclBaseClass):
        _total_eval_stack_push(attrib_name=original_attrib_name)
        nested_evaluate_result = _nested_evaluate(start_attrib)
        _total_eval_stack_pop()
        if nested_evaluate_result is not None:
          del nested_evaluate_result['__type']
          del nested_evaluate_result['__execute']
        return nested_evaluate_result
      elif type(start_attrib) is type and issubclass(start_attrib, LazyEvaluateClass):
        _total_eval_stack_push(attrib_name=original_attrib_name)
        nested_evaluate_result = _nested_evaluate(start_attrib.cls, start_attrib.extra_context)
        _total_eval_stack_pop()
        if nested_evaluate_result is not None:
          del nested_evaluate_result['__type']
          del nested_evaluate_result['__execute']
        return nested_evaluate_result
      elif isinstance(start_attrib, list):
        new_list = []
        for list_elem in start_attrib:
          if type(list_elem) is type and issubclass(list_elem, LazyEvaluateClass):
            _total_eval_stack_push(attrib_name=original_attrib_name)
            nested_evaluate_result = _nested_evaluate(list_elem.cls, list_elem.extra_context)
            _total_eval_stack_pop()
            # TODO(baryluk): We might also allow for this: [include_with_name(SomeClass), include_with_name(SomeOtherClass)]
            # Each class will define a key (name), and the result of the evluation will be a dict, not list.
            # Additionally we will check that there are no duplicate keys.
            if nested_evaluate_result is not None:  # None indicats that _enable returned False.
              del nested_evaluate_result['__type']
              del nested_evaluate_result['__execute']
              new_list.append(nested_evaluate_result)
          else:
            new_list.append(_recursive_nested_evaluator(list_elem))
        return new_list
      elif isinstance(start_attrib, dict):
        new_dict = type(start_attrib)()  # Use same type as attrib, not just dict() / {} blindly.
        for key, value in start_attrib.items():
          if type(value) is type and issubclass(value, LazyEvaluateClass):
            _total_eval_stack_push(attrib_name=original_attrib_name)
            nested_evaluate_result = _nested_evaluate(value.cls, value.extra_context)
            _total_eval_stack_pop()
            if nested_evaluate_result is not None:  # None indicats that _enable returned False.
              del nested_evaluate_result['__type']
              del nested_evaluate_result['__execute']
              new_dict[key] = nested_evaluate_result
          else:
            new_dict[key] = _recursive_nested_evaluator(value)
        return new_dict
      else:
        # Leave attrib as is.
        return start_attrib

    # We now need to iterate the result if it is a list or dict.
    # If any of the elements or values, is subclass of YaclBaseClass,
    # we need to recursively call yacl_eval.evalute_ on it with the same context.
    if _DEBUG: _debug_print(f"   attrib: {attrib} - before nested evaluator (checkig lists, dicts, sets for LazyEvaluateClass recursively)")
    _major_debug_indent += 1
    attrib = _recursive_nested_evaluator(attrib)
    _total_eval_stack_push(attrib_name=original_attrib_name)
    _total_eval_stack_pop()
    _major_debug_indent -= 1
    if _DEBUG: _debug_print(f"   attrib: {attrib} - after nested evaluator")

    assert not(type(attrib) is type and issubclass(attrib, Required)), f"Required attribute {attrib_name} not overriden. Most derived class: {_fullname(_current_full_eval_context.current_most_derived_class)}."
    assert not(isinstance(attrib, Required)), f"Required attribute {attrib_name} not overriden. Most derived class: {_fullname(_current_full_eval_context.current_most_derived_class)}. Location of attribute definition: {attrib.frameinfo.filename}:{attrib.frameinfo.lineno}"

    return attrib
  finally:
      _current_full_eval_context.current_evaluated_attrib_name.pop()
      _current_full_eval_context.current_evaluated_class = saved_current_evaluated_class
      _current_full_eval_context.current_most_derived_class = saved_current_most_derived_class


@export
@beartype
def get(attrib_name : str = None, /, default_value = _NoDefault):
  """The heart of inheritance mechanism in yacl.

     Can only be called inside ("static") functions inside YaclBaseClass
     subclasses. We call these functions "yacl class attributes".
     Their name is provided by the class attribute definition.

     A simple attribute "x" can be defined by simply assigning a
     non-function value:

     ```python3
     class A(YaclBaseClass):
         x = 7    # attribute "x" with value 7.
     ```

     However yacl shines, when functions are used to defines attributes,
     as that allow for dynamic creation of attribute values based on
     other attributes, as well most (or intermidiate) derived classes
     overrides (which can itself by value or functions), and inheritance
     (called templating) of classes and mixins, which can do mix of
     overrides or chaining and modifying in both direction.

     ```python3
     class A(YaclBaseClass):
        def x():
            return get('y') * 1000
        y = 3

     class B(A):
        y = 7

        # x will evaluate to 7000
     ```

     ```python3
     class A(YaclBaseClass):
        def x():
            return 10
        y = 3

     class B(A):
        def x():
            return get() + 42    # will evaluate to 52
     ```


    Two modes of operation:

    1. When called with `attrib_name` present. This is called qualified lookup.

     Returns (possibly from internal cache) the value of the attribute
     as defined in the most derived class (or class that is closest to the
     most derived class) in the inheritance chain.
     If attribute is not found in any class in the inheritance chain,
     it returns a value from the context item that is currently being
     evaluated. If not found either default_value is returned if set
     (can be any value, including `None`, `False`, `[]`, etc), and
     memoized in the internal cache. Otherwise exception is thrown.

     Direct and indirect self recursion within same context (that doesn't
     cover the use of `include` function to implement nesting) is prohibited,
     because it will lead to the infinite recursion (there is no tail call
     optimisation, or way to break such infinite recursion without also
     modifying the global state, which is a bad practice). Evaluator will
     detect such recursions as soon as possible and throw an exception.

     Example:

     ```python3
     class A(YaclBaseClass):
         x = 5

     class B(A):
         def y():
             return get('x') * 10

     class C(B):
         x = 9

     class D(C):    # Note superclass!
         x = 1

     class E(B):    # Note superclass.!
         x = 1
     ```

     When evaluated C, the result will contain `{"y": 90}`  (and `{"x": 9}).

     Evaluating B, the result will contain `{"y": 50}`  (and `{"x": 5}).

     Evaluating D and E, the result will contain `{"y": 10}`  (and `{"x": 1}).

     Example:

     ```python3
     class A(YaclBaseClass):
         x = 5
         def z():
            return get('x') + get('y')

     class B(A):
         def y():
             return get('x') * 10

     class C(B):
         x = 900
         y = 100
     ```

     When evaluated C, the result will contain `{"y": 100, "z": 1000}`  (and `{"x": 900}).

     Note that yacl class A, will fail to evaluate on its own, because
     it references attribute "z" that is not defined in A or its base classes.
     Such class will be usually called abstract, and marked as such using `ABC`
     metaclass.

     You can thing of `get(attrib_name)` as a form of virtual dispatch, like
     `self.attrib_name()` from standard Python. But it has some extra
     functionalities when considering multiple inheritance and mixins,
     that cannot be implemented standard Python objects method calls.


    2. When called without `attrib_name`. This is called unqualified lookup.

     Returns (not utilizing any caching in the current yacl version) the value
     of the attribute with the SAME name as currently evaluated attribute,
     but searching for it after the current class in the inheritance chain
     (using standard Python method resolution order aka MRO).
     Calling `get()` without `attrib_name` in the most base class, with no
     superclass defining a attribute in question, will lead to lookup error.
     Indirect recursion back to other attributes (using qualified lookups)
     is allowed, but any recursion back to the same exact attribute and class,
     would cause infinite recursion and is detected and raises an exception.

     Usage of functional style accumulation using some weird tricks is not
     allowed, because for any practical purposes it will be inefficient,
     uses a lot of stack, makes code convoluted, and in practice is just
     a sign of bad design in your yacl code or a bug in your yacl code.

     Example:

     Assume we evaluated class C (the last one).

     ```
     class A(YaclBaseClass):
         x = 5

     class B(A):
         def x():
             return get() * 10   # will return 50

         def y():
             return get('x')     # will return 51.

     class C(B):
         def x():
             return get() + 1    # will return 51
     ```

     You can think of this as similar to `super().attrib_name()` from
     standard Python. However, it works in specific way when using multiple
     inheritance and mixins, that makes it unique.

     Static functions are used instead of normal methods with `self`
     for few reasons: 1) It is shorter to type. 2) Allows us to do both
     values and function with same caller semantic. 3) self would allow
     code to do stateful computations, which could creat undeterministic
     results if the order of evaluation is random or unpredictable,
     and lead to poor separation of concerns and isolation. 4) It would
     not help with desired mixin and multiple inheritance semantics
     we want to accomplish, wihout using very complex metaclasses.

     The difference to `super()` shows in multiple inheritance and mixins.

     Example:
     ```python3
     class A(YaclBaseClass, ABC):
         x = ["A"]

     class BMix(A):
         def x(): return get() + ["B"]

     class CMix(A):
         def x(): return get() + ["C"]

     class D(A):
         def x(): return get() + ["D"]

     class E(D, CMix, BMix):
         def x(): return get() + ["E"]
     ```

     When evaluating yacl class E, it will produce `{"x": ["A", "B", "C", "D", "E"]}`.


     Standard Python approach would be this:

     ```python3
     class A:
         def x(self):
             return ["A"]

     class BMix(A):
         def x(self): return super().x() + ["B"]

     class CMix(A):
         def x(self): return super().x() + ["C"]

     class D(A):
         def x(self): return super().x() + ["D"]

     class E(CMix, D, BMix):
         def x(self): return super().x() + ["E"]


     print(E().x())
     ```

     Which is significantly more verbose, and in some situations, can
     lead to loosing of functionality if coded without the care.


     Notes:

     If the attribute value found is a callable, it must be a zero argument
     callable, that will be automatically evaluated once, and its result
     will be used as an actual value. This means:

     ```python3
     class A(YaclBaseClass):
        a = 5

     class B(YaclBaseClass):
        def a():
           return 5

     class C(YaclBaseClass):
        a = lambda: 5

     def halfconstant(x):
         return lambda: x//2

     class D(YaclBaseClass):
        a = halfconstant(10)
     ```

     All behave EXACTLY the same.


     Note: It is perfectly fine to call using keyword arguments for the default value:

     ```python3
     class MyMixin(GenericMixin):
        def foo():
           return get(default_value=5) * 10
     ```
  """
  global _current_full_eval_context

  # The form get() is currently uncached. Otherwise we would need to cache it per class too. As get() does evaluate differently depending on inheritance level.

  if attrib_name is None:
    assert len(_current_full_eval_context.current_evaluated_attrib_name) >= 1

  if attrib_name is not None:
    assert isinstance(attrib_name, str)
    assert len(attrib_name) >= 1

    if attrib_name in _current_full_eval_context.current_evaluated_attrib_name:
      assert False, f"Recursion detected in get('{attrib_name}') with current get stack of {_current_full_eval_context.current_evaluated_attrib_name}"

    if attrib_name in _current_full_eval_context.memoized_attrib_values:
      # Take value from cache
      attrib_value = _current_full_eval_context.memoized_attrib_values[attrib_name]
      if _DEBUG: _debug_print(f"Taking value from cache: {attrib_value}")
      return attrib_value

    _current_full_eval_context.current_evaluated_attrib_name.append(attrib_name)

  _total_eval_stack_push(attrib_name=attrib_name)

  attrib_value = _get_uncached(attrib_name, default_value)

  _total_eval_stack_pop()

  if attrib_name is not None:
    _current_full_eval_context.memoized_attrib_values[attrib_name] = attrib_value  # cache

    _current_full_eval_context.current_evaluated_attrib_name.pop()

  return attrib_value


@export
@beartype
def has(attrib_name : str, /) -> bool:
    """Requires explicit `attrib_name`.

       Even if this function returns True, the corresponding call to
       `get(attrib_name)` can fail due to internal exceptions in the attribute or
       it referncing other attributes in its own implementation. Mostly useful
       for code that accesses many attributes using iteration or dynamic creation
       of attributes that are optional and not defined explicitly.

       `has()` is not allowed (at least at the moment). This is because, in general
       when using `has()` it will be in some subclass, and we (developer and code),
       knows statically full inheritance chain, including all attributes.
       Note: It might be allowed in the future in GenericMixins, which do not
       share this property.
    """
    raise NotImplementedError


@export
@beartype
def get_attribs(*, prefix : str = '', strip_prefix : bool = True) -> Union[Iterable[str], list[str], set[str]]:
    """Returns a list of all attributes starting with the `prefix`,
       and `prefix` stripped (unless strip_prefix is False).

       Usage of empty prefix for listing is discouraged, and might generate
       a warning in the future.

       This function does not consule current context item.
    """
    global _current_full_eval_context

    skipped_attrib_names = set(['_enable', '_execute'])

    results = []
    for attrib_name in dir(_current_full_eval_context.current_most_derived_class):
        if attrib_name in skipped_attrib_names:
            continue
        if attrib_name.startswith(prefix):
            if strip_prefix and len(prefix) > 0:
                results.append(attrib_name[len(prefix):])
            else:
                results.append(attrib_name)

    return results


@export
@beartype
def get_from_context(attrib_name : str = None, default_value: Any = _NoDefault, /) -> Any:
    """Just like `get(attrib_name)` / `get()` but bypasses all inheritance,
       and only looks in the current context item. If key is not found,
       `default_value` is returned, if it is provided,
       otherwise a `KeyError` exception is thrown.
    """
    global _current_full_eval_context

    if attrib_name is None:
      assert len(_current_full_eval_context.current_evaluated_attrib_name) >= 1
      attrib_name = _current_full_eval_context.current_evaluated_attrib_name[-1]

    if attrib_name in _current_full_eval_context.current_evaluated_context:
        value = _current_full_eval_context.current_evaluated_context[attrib_name]
        if _DEBUG: _debug_print(f"  Using value from the context: {value}")
        return value

    if default_value is not _NoDefault:
        if _DEBUG: _debug_print(f"  Using default value argument: {default_value}")
        return default_value

    raise KeyError(attrib_name)
    # return _current_full_eval_context.current_evaluated_context[attrib_name]  # This will create the same KeyError exception.


@export
def get_full_context() -> list[dict[str, Any]]:
    """Returns a full context set (not just one item), including the current
       context item too.

       This is useful for few purposes, for example:
       - in some replicated storage systems, we might automatically
         compute which dc is master, and which is a slave.
       - we can pass a list of other dcs, to one dc, to create a full mesh of
         connections.
       - we can size a global monitoring dashboard, and its parameters, based
         on the knowlage of number of monitoring jobs and monitored jobs in each
         cluster.

       It is illegal to modify the object (and any of its subobjects) returned by
       this function (and this might be enforced by crashing the interpreter
       without ability to recover using exception handling)!
    """
    global _current_full_eval_context

    # if _DEBUG: _debug_print("full context:", _current_full_eval_context.current_full_context)
    return _current_full_eval_context.current_full_context



import time

_start_time_ = time.time()


@export
def start_time() -> float:
    """Returns a stable timestamp that is initialized only once.
    Subsequent calls will return same value."""
    return _start_time_


@export
def start_time_iso() -> str:
    """Same as start_time(), but returns a string in semi-ISO8601 style.

    Whetever a local time zone is used, or UTC, and whetever the time zone
    is included in the string, will be configurable in the future.
    """
    return str(_start_time_)


@export
def start_time_iso8601() -> str:
    """Same as start_time(), but returns a string in complaint ISO8601 style.

    Whetever a local time zone is used, or UTC, and whetever the time zone
    is included in the string, will be configurable in the future.
    """
    return str(_start_time_)


@beartype
def _nested_evaluate(cls : Type[YaclBaseClass], extra_context : dict[str, Any] = {}, /) -> dict[str, Any]:
    global _current_full_eval_context, _DEBUG

    saved_full_eval_context = _current_full_eval_context
    _current_full_eval_context = FullEvalContext()  # new full eval context
    _current_full_eval_context.current_full_context = saved_full_eval_context.current_full_context

    new_context_item = saved_full_eval_context.current_evaluated_context | extra_context  # Copy the context data and append extra context.

    # We don't do this here, because yacl_eval._evaluate already does that for us, and expects empty evaluated_context initially.
    # _current_full_eval_context.current_evaluated_context = new_context_item


    try:
        import yacl_eval

        return yacl_eval._evaluate(cls, new_context_item, execute=False)
    finally:
        _current_full_eval_context = saved_full_eval_context


#@private
def _fullname(cls: type):
    return cls.__module__ + '.' + cls.__qualname__
