last workig state.

This commit is contained in:
2025-12-13 14:12:35 +01:00
parent 1761de8acb
commit 841bc7c805
227 changed files with 694550 additions and 251 deletions

View File

@@ -0,0 +1,181 @@
# genotype.py
import json
import random
import time
from typing import Dict, List, Tuple, Any, Optional
def generate_id() -> float:
return random.random()
def create_neural_weights(vector_length: int) -> List[float]:
return [random.uniform(-2.0, 2.0) for _ in range(vector_length)]
def construct(
morphology_module,
hidden_layer_densities: List[int],
file_name: Optional[str] = None,
*,
add_bias: bool = False,
) -> Dict[str, Any]:
rnd_seed = time.time_ns() & 0xFFFFFFFF
random.seed(rnd_seed)
S = morphology_module.get_InitSensor(morphology_module)
A = morphology_module.get_InitActuator(morphology_module)
sensor = {
"id": S.get("id", generate_id()),
"name": S["name"],
"vector_length": int(S["vector_length"]),
"cx_id": None, # wird später gesetzt
"fanout_ids": [], # wird später gesetzt
# optional:
# "scape": S.get("scape")
}
actuator = {
"id": A.get("id", generate_id()),
"name": A["name"],
"vector_length": int(A["vector_length"]),
"cx_id": None, # wird später gesetzt
"fanin_ids": [], # wird später gesetzt
# optional:
# "scape": A.get("scape")
}
output_vl = actuator["vector_length"]
layer_densities = list(hidden_layer_densities) + [output_vl]
cortex_id = generate_id()
layers = _create_neuro_layers(
cx_id=cortex_id,
sensor=sensor,
actuator=actuator,
layer_densities=layer_densities,
add_bias=add_bias,
)
input_layer = layers[0]
output_layer = layers[-1]
sensor["cx_id"] = cortex_id
sensor["fanout_ids"] = [n["id"] for n in input_layer]
actuator["cx_id"] = cortex_id
actuator["fanin_ids"] = [n["id"] for n in output_layer]
neuron_ids = [n["id"] for layer in layers for n in layer]
cortex = {
"id": cortex_id,
"sensor_ids": [sensor["id"]],
"actuator_ids": [actuator["id"]],
"neuron_ids": neuron_ids,
}
# 7) Genotyp zusammensetzen
genotype = {
"cortex": cortex,
"sensor": sensor,
"actuator": actuator,
"neurons": [n for layer in layers for n in layer],
}
# 8) Optional speichern
if file_name:
save_genotype(file_name, genotype)
return genotype
def _create_neuro_layers(
cx_id: float,
sensor: Dict[str, Any],
actuator: Dict[str, Any],
layer_densities: List[int],
*,
add_bias: bool,
) -> List[List[Dict[str, Any]]]:
layers: List[List[Dict[str, Any]]] = []
input_idps: List[Tuple[float, int]] = [(sensor["id"], sensor["vector_length"])]
for layer_index, layer_density in enumerate(layer_densities):
neuron_ids = [generate_id() for _ in range(layer_density)]
if layer_index < len(layer_densities) - 1:
next_ids = [generate_id() for _ in range(layer_densities[layer_index + 1])]
output_ids = next_ids
else:
output_ids = [actuator["id"]]
this_layer: List[Dict[str, Any]] = []
for _nid in neuron_ids:
proper_input = _create_neural_input(input_idps, add_bias=add_bias)
neuron = {
"id": _nid,
"layer_index": layer_index,
"cx_id": cx_id,
"activation_function": "tanh",
"input_weights": [{"input_id": i, "weights": w} for (i, w) in proper_input],
"output_ids": output_ids[:], # Kopie
}
this_layer.append(neuron)
layers.append(this_layer)
input_idps = [(n["id"], 1) for n in this_layer]
return layers
def _is_bias_tuple(t: Tuple[Any, Any]) -> bool:
key, _ = t
return isinstance(key, str) and key == "bias"
def _create_neural_input(
input_idps: List[Tuple[float, int]],
*,
add_bias: bool,
) -> List[Tuple[Any, List[float]]]:
proper: List[Tuple[Any, List[float]]] = []
for input_id, vl in input_idps:
proper.append((input_id, create_neural_weights(vl)))
if add_bias:
proper.append(("bias", [random.random() - 0.5]))
return proper
def save_genotype(file_name: str, genotype: Dict[str, Any]) -> None:
with open(file_name, "w") as f:
json.dump(genotype, f, indent=2)
def load_from_file(file_name: str) -> Dict[str, Any]:
with open(file_name, "r") as f:
return json.load(f)
def print_genotype(file_name: str) -> None:
g = load_from_file(file_name)
cx = g["cortex"]
print("[CORTEX]", cx)
sids = cx.get("sensor_ids", [])
nids = cx.get("neuron_ids", [])
aids = cx.get("actuator_ids", [])
nid2n = {n["id"]: n for n in g.get("neurons", [])}
sid2s = {g["sensor"]["id"]: g["sensor"]} if "sensor" in g else {s["id"]: s for s in g.get("sensors", [])}
aid2a = {g["actuator"]["id"]: g["actuator"]} if "actuator" in g else {a["id"]: a for a in g.get("actuators", [])}
for sid in sids:
print("[SENSOR]", sid2s.get(sid))
for nid in nids:
print("[NEURON]", nid2n.get(nid))
for aid in aids:
print("[ACTUATOR]", aid2a.get(aid))

View File

@@ -0,0 +1,848 @@
# genotype.py.old
from __future__ import annotations
import asyncio
import json
import random
import time
from typing import Any, Dict, List, Tuple, Optional, Callable
from mathema.core.db import Neo4jDB
# ------------------------------------------------------------
# ID-Serialisierung im Stil des Buchs (als Strings)
# ------------------------------------------------------------
def erlang_id_cortex(uid: float) -> str:
return f"{{{{origin,{uid}}},cortex}}"
def erlang_id_neuron(layer_idx: int, uid: float) -> str:
return f"{{{{{layer_idx},{uid}}},neuron}}"
def erlang_id_sensor(uid: float) -> str:
return f"{{{uid},sensor}}"
def erlang_id_actuator(uid: float) -> str:
return f"{{{uid},actuator}}"
def now_unique() -> float:
return time.time()
def generate_ids(n: int) -> List[float]:
return [now_unique() + i * 1e-6 for i in range(n)]
# ------------------------------------------------------------
# GenotypeBuilder
# ------------------------------------------------------------
class GenotypeBuilder:
"""
Async-Port von Erlangs genotype. Benötigt:
- Neo4jDB
- morphology.py mit get_InitSensors(morph) / get_InitActuators(morph)
- optional: population_monitor mit async create_specie(pop_id, constraint, fingerprint) -> specie_id
"""
def __init__(self, db: Neo4jDB, population_monitor: Optional[Any] = None):
self.db = db
self.population_monitor = population_monitor # erwartet .create_specie(...)
# ====================== Public API ======================
async def construct_Agent(self, specie_id: Any, agent_id: Any, speccon: Dict[str, Any]) -> Dict[str, Any]:
random.seed(time.time())
generation = 0
cx_id, pattern = await self.construct_Cortex(agent_id, generation, speccon)
agent = {
"id": agent_id,
"cx_id": cx_id,
"specie_id": specie_id,
"constraint": speccon,
"generation": generation,
"pattern": pattern,
"evo_hist": [],
# default vals
"population_id": None,
"fingerprint": None,
"fitness": None,
"innovation_factor": 0
}
await self._write_agent(agent)
await self.update_fingerprint(agent_id)
return agent
async def construct_Cortex(self, agent_id: Any, generation: int, speccon: Dict[str, Any]) -> Tuple[
Any, List[Tuple[int, List[Any]]]]:
from importlib import import_module
morphology_mod = import_module("morphology")
cx_uid = now_unique()
cx_id = erlang_id_cortex(cx_uid)
morphology_name = speccon["morphology"]
init_sensors = morphology_mod.get_InitSensor(morphology_name)
init_actuators = morphology_mod.get_InitActuator(morphology_name)
sensors = []
for S in init_sensors:
uid = now_unique()
sensors.append({
**S,
"id": erlang_id_sensor(uid),
"cx_id": cx_id,
"generation": generation,
"fanout_ids": S.get("fanout_ids", [])
})
actuators = []
for A in init_actuators:
uid = now_unique()
actuators.append({
**A,
"id": erlang_id_actuator(uid),
"cx_id": cx_id,
"generation": generation,
"fanin_ids": A.get("fanin_ids", [])
})
neuron_ids = await self.construct_InitialNeuroLayer(cx_id, generation, speccon, sensors, actuators)
sensor_ids = [s["id"] for s in sensors]
actuator_ids = [a["id"] for a in actuators]
cortex = {
"id": cx_id,
"agent_id": agent_id,
"neuron_ids": neuron_ids,
"sensor_ids": sensor_ids,
"actuator_ids": actuator_ids
}
await self._write_cortex_and_io(cortex, sensors, actuators)
pattern = [(0, neuron_ids)]
return cx_id, pattern
async def construct_InitialNeuroLayer(
self,
cx_id: Any,
generation: int,
speccon: Dict[str, Any],
sensors: List[Dict[str, Any]],
actuators: List[Dict[str, Any]],
) -> List[Any]:
neuron_ids: List[Any] = []
for A in actuators:
vl = int(A["vector_length"])
n_uids = generate_ids(vl)
n_ids = [erlang_id_neuron(0, u) for u in n_uids]
for n_id in n_ids:
if random.random() >= 0.5:
S = random.choice(sensors)
input_specs = [(S["id"], int(S["vector_length"]))]
S["fanout_ids"] = [n_id] + list(S.get("fanout_ids", []))
else:
input_specs = [(S["id"], int(S["vector_length"])) for S in sensors]
for S in sensors:
S["fanout_ids"] = [n_id] + list(S.get("fanout_ids", []))
await self.construct_Neuron(cx_id, generation, speccon, n_id, input_specs, [A["id"]])
A["fanin_ids"] = n_ids + list(A.get("fanin_ids", []))
neuron_ids.extend(n_ids)
return neuron_ids
# Ersetze deine bisherigen _pack_inputs/_unpack_inputs durch diese Versionen:
"""
helper functions. because we use list stuff from erlang.
this is not needed anymore, if we start to use neo4j friendly
data model.
"""
def _pack_inputs(self, input_idps: list[dict]) -> tuple[list, list, list]:
ids, wflat, lengths = [], [], []
for e in input_idps:
if "bias" in e:
vals = list(e["bias"])
ids.append("bias")
else:
vals = list(e["weights"])
ids.append(e["id"])
lengths.append(len(vals))
wflat.extend(vals)
return ids, wflat, lengths
def _unpack_inputs(self, input_ids: list, input_w: list, input_w_len: list) -> list[dict]:
out, i = [], 0
for iid, L in zip(input_ids or [], input_w_len or []):
chunk = list(input_w[i:i + L]);
i += L
if iid == "bias":
out.append({"bias": chunk})
else:
out.append({"id": iid, "weights": chunk})
return out
def _normalize_scape(self, v: Any) -> Any:
# erlaubt nur primitive Property-Typen
if isinstance(v, (str, int, float, bool)) or v is None:
return v
if isinstance(v, dict) and "private" in v:
return v["private"] # dein bisheriger Fall: {"private":"xor_sim"}
return str(v)
async def construct_Neuron(
self,
cx_id: Any,
generation: int,
speccon: Dict[str, Any],
n_id: Any,
input_specs: List[Tuple[Any, int]],
output_ids: List[Any],
) -> None:
input_idps = self._create_InputIdPs(input_specs)
# expliziter Bias-Eintrag (Projektvorgabe)
input_idps.append({"bias": [self._rand_weight()]})
neuron = {
"id": n_id,
"generation": generation,
"cx_id": cx_id,
"af": self._generate_NeuronAF(speccon.get("neural_afs", [])),
"input_idps": input_idps,
"output_ids": output_ids,
"ro_ids": self._calculate_ROIds(n_id, output_ids),
}
await self._write_neuron(neuron)
async def update_fingerprint(self, agent_id: Any) -> None:
import json
a = await self._read_agent(agent_id)
if not a:
return
cx = await self._read_cortex(a["cx_id"])
if not cx:
return
# --- IO lesen ---
sensors = await self._read_sensors(cx.get("sensor_ids", []))
actuators = await self._read_actuators(cx.get("actuator_ids", []))
# --- pattern robust aus Agent nehmen (durch _read_agent schon deserialisiert) ---
raw_pat = a.get("pattern", []) or []
gen_pattern: List[Tuple[int, int]] = []
for item in raw_pat:
if isinstance(item, (list, tuple)) and len(item) >= 2:
li, ids_or_cnt = item[0], item[1]
try:
li = int(li)
except Exception:
continue
if isinstance(ids_or_cnt, (list, tuple)):
cnt = len(ids_or_cnt)
elif isinstance(ids_or_cnt, int):
cnt = ids_or_cnt
else:
cnt = 0
gen_pattern.append((li, cnt))
# --- Evo-Historie generalisieren (liefert primitive Strukturen) ---
gen_evo = self._generalize_EvoHist(a.get("evo_hist", []))
# --- IO-Deskriptoren auf primitive Typen reduzieren ---
def _vec_len(x):
return int(x.get("vector_length", x.get("vector_length", 0)) or 0)
gen_s_desc = [(s.get("name"), _vec_len(s), self._normalize_scape(s.get("scape"))) for s in sensors]
gen_a_desc = [(ac.get("name"), _vec_len(ac), self._normalize_scape(ac.get("scape"))) for ac in actuators]
# --- Fingerprint bauen und als JSON speichern ---
fp_obj = {
"pattern": gen_pattern,
"evo": gen_evo,
"sensors": gen_s_desc,
"actuators": gen_a_desc,
}
fp_json = json.dumps(fp_obj, ensure_ascii=False)
await self._set_agent_fingerprint(agent_id, fp_json)
# ----------------- Ergänzungen aus Erlang -----------------
async def speciate(self, agent_id: Any) -> None:
"""Port von speciate/1."""
await self.update_fingerprint(agent_id)
A = await self._read_agent(agent_id)
if not A:
return
# Test-Agent?
if A["id"] == "test":
await self._set_agent_fitness(agent_id, None)
return
# Eltern-Specie und Population holen
Parent_S = await self._read_specie(A["specie_id"])
if not Parent_S:
# ohne specie → keine Speziation möglich
return
P = await self._read_population(Parent_S["population_id"])
if not P:
return
# Spezies mit gleichem Fingerprint in der Population finden
same: Optional[Any] = await self._find_specie_by_fingerprint(P["id"], A["fingerprint"])
if same is None:
# Neue Spezies erzeugen (via population_monitor)
if not self.population_monitor or not hasattr(self.population_monitor, "create_specie"):
raise RuntimeError(
"speciate(): population_monitor.create_specie(pop_id, constraint, fingerprint) fehlt")
new_specie_id = await self.population_monitor.create_specie(P["id"], A["constraint"], A["fingerprint"])
S = await self._read_specie(new_specie_id)
if not S:
return
# Agent updaten: neue specie_id, fitness=undefined
await self._update_agent_fields(agent_id, {"specie_id": new_specie_id, "fitness": None})
# Spezies-Agentliste erweitern (prepend wie im Buch reicht, Reihenfolge egal)
await self._append_specie_agent(new_specie_id, agent_id, replace=False)
else:
# Bestehende Spezies updaten
await self._update_agent_fields(agent_id, {"specie_id": same, "fitness": None})
await self._append_specie_agent(same, agent_id, replace=False)
async def clone_Agent(self, agent_id: Any, clone_agent_id: Optional[Any] = None) -> Any:
"""
Port von clone_Agent/1,2 (vereinfacht transaktional durch Reihenfolge).
- erzeugt neue IDs nach Buchregeln
- kopiert Nodes mit remappten IDs
- schreibt Cortex/Agent-Knoten für den Klon
"""
if clone_agent_id is None:
clone_agent_id = f"{{{now_unique()},agent}}"
A = await self._read_agent(agent_id)
if not A:
raise ValueError(f"agent not found: {agent_id}")
Cx = await self._read_cortex(A["cx_id"])
if not Cx:
raise ValueError(f"cortex not found for agent: {agent_id}")
# 1) ID-Mapping erzeugen
idmap: Dict[str, str] = {}
# bias bleibt auf 'bias' → wir mappen nicht (wie im Erlang ETS bias->bias)
idmap["bias"] = "bias"
# Agent
idmap[str(agent_id)] = str(clone_agent_id)
# Cortex-ID remappen: {{origin,uid},cortex} -> gleicher Layer-Tag 'origin'
cx_new = erlang_id_cortex(now_unique())
idmap[str(Cx["id"])] = cx_new
# Neuronen/Sensoren/Aktuatoren IDs remappen
for nid in Cx.get("neuron_ids", []):
# {{L,uid},neuron} -> {{L,new},neuron}
layer = self._parse_layer(nid) or 0
idmap[str(nid)] = erlang_id_neuron(layer, now_unique())
for sid in Cx.get("sensor_ids", []):
idmap[str(sid)] = erlang_id_sensor(now_unique())
for aid in Cx.get("actuator_ids", []):
idmap[str(aid)] = erlang_id_actuator(now_unique())
# 2) Originalelemente laden
sensors = await self._read_sensors(Cx.get("sensor_ids", []))
neurons = await self._read_neurons(Cx.get("neuron_ids", []))
actuators = await self._read_actuators(Cx.get("actuator_ids", []))
# 3) Clones schreiben (Sensors/Actuators/Neurons)
# (IDs, cx_id, *_ids, input_idps remappen)
clone_sensors = []
for S in sensors:
clone_sensors.append({
**S,
"id": idmap[str(S["id"])],
"cx_id": idmap[str(S["cx_id"])],
"fanout_ids": [idmap.get(str(x), str(x)) for x in S.get("fanout_ids", [])],
})
clone_actuators = []
for A0 in actuators:
clone_actuators.append({
**A0,
"id": idmap[str(A0["id"])],
"cx_id": idmap[str(A0["cx_id"])],
"fanin_ids": [idmap.get(str(x), str(x)) for x in A0.get("fanin_ids", [])],
})
# Neuronen: input_idps, output_ids, ro_ids mappen
clone_neurons = []
for N in neurons:
# Aus flachen Properties rekonstruieren
orig_idps = self._unpack_inputs(N.get("input_ids"), N.get("input_w"), N.get("input_w_len"))
# IDs remappen
remapped_idps = []
for e in orig_idps:
if "bias" in e:
remapped_idps.append({"bias": e["bias"]})
else:
remapped_idps.append({"id": idmap.get(str(e["id"]), str(e["id"])), "weights": e["weights"]})
# Wieder flach packen
input_ids, input_w, input_w_len = self._pack_inputs(remapped_idps)
clone_neurons.append({
"id": idmap[str(N["id"])],
"cx_id": idmap[str(N["cx_id"])],
"generation": N.get("generation", 0),
"af": N.get("af", "tanh"),
"input_ids": input_ids,
"input_w": input_w,
"input_w_len": input_w_len,
"output_ids": [idmap.get(str(x), str(x)) for x in N.get("output_ids", [])],
"ro_ids": [idmap.get(str(x), str(x)) for x in N.get("ro_ids", [])],
})
# 4) Cortex & Agent (Klon) schreiben
clone_cortex = {
"id": cx_new,
"agent_id": idmap[str(agent_id)],
"sensor_ids": [c["id"] for c in clone_sensors],
"actuator_ids": [c["id"] for c in clone_actuators],
"neuron_ids": [c["id"] for c in clone_neurons],
}
# zuerst Neuronen, dann Cortex + IO, damit HAS_* sofort auflöst
for n in clone_neurons:
await self._write_neuron(n)
await self._write_cortex_and_io(clone_cortex, clone_sensors, clone_actuators)
# Agent-Klon: minimal id+cx_id updaten; Rest vom Original übernehmen
clone_agent = {
**A,
"id": idmap[str(agent_id)],
"cx_id": cx_new,
}
await self._write_agent(clone_agent)
return clone_agent["id"]
async def test(self) -> None:
"""Port von test/0 (vereinfacht, ohne Transaktion)."""
Specie_Id = "test"
Agent_Id = "test"
CloneAgent_Id = "test_clone"
SpecCon = {"morphology": "xor_mimic", "neural_afs": ["tanh", "cos", "gauss", "abs"]}
await self.construct_Agent(Specie_Id, Agent_Id, SpecCon)
await self.clone_Agent(Agent_Id, CloneAgent_Id)
await self.print(Agent_Id)
await self.print(CloneAgent_Id)
await self.delete_Agent(Agent_Id)
await self.delete_Agent(CloneAgent_Id)
async def create_test(self) -> None:
"""Port von create_test/0."""
Specie_Id = "test"
Agent_Id = "test"
SpecCon = {"morphology": "xor_mimic", "neural_afs": ["tanh", "cos", "gauss", "abs"]}
a = await self._read_agent(Agent_Id)
if a is None:
await self.construct_Agent(Specie_Id, Agent_Id, SpecCon)
await self.print(Agent_Id)
else:
await self.delete_Agent(Agent_Id)
await self.construct_Agent(Specie_Id, Agent_Id, SpecCon)
await self.print(Agent_Id)
# ====================== Helper ============================
def _create_InputIdPs(self, input_specs: List[Tuple[Any, int]]) -> List[Dict[str, Any]]:
res: List[Dict[str, Any]] = []
for input_id, vl in input_specs:
weights = [self._rand_weight() for _ in range(int(vl))]
res.append({"id": input_id, "weights": weights})
return res
def _rand_weight(self) -> float:
return random.random() - 0.5
def _generate_NeuronAF(self, afs: List[Any]) -> Any:
if not afs:
return "tanh"
return random.choice(afs)
def _parse_layer(self, neuron_id: str) -> Optional[int]:
try:
if "neuron" not in neuron_id:
return None
inner = neuron_id.split("},neuron")[0]
inner = inner.strip("{}").strip("{}")
parts = inner.split(",")
return int(parts[0])
except Exception:
return None
def _calculate_ROIds(self, self_id: Any, output_ids: List[Any]) -> List[Any]:
my_layer = self._parse_layer(str(self_id)) or 0
acc: List[Any] = []
for oid in output_ids:
s = str(oid)
if "actuator" in s:
continue
li = self._parse_layer(s)
if li is not None and li <= my_layer:
acc.append(oid)
return acc
def _generalize_EvoHist(self, evo_hist: List[Any]) -> List[Any]:
def strip_id(id_str: Any) -> Any:
s = str(id_str)
if "neuron" in s:
try:
inner = s.split("},neuron")[0].strip("{}").strip("{}")
layer = int(inner.split(",")[0])
return (layer, "neuron")
except Exception:
return ("?", "neuron")
if "actuator" in s:
return ("actuator",)
if "sensor" in s:
return ("sensor",)
return ("?",)
generalized = []
for item in evo_hist:
if isinstance(item, (list, tuple)):
g = []
for el in item:
if isinstance(el, (list, tuple)) and len(el) == 2:
g.append((el[0], strip_id(el[1])))
else:
g.append(strip_id(el))
generalized.append(tuple(g))
else:
generalized.append(item)
return generalized
# =============== Neo4j Schreib-/Lese-Hilfen ===============
async def _write_agent(self, agent: Dict[str, Any]) -> None:
cy = """
MERGE (a:agent {id:$id})
SET a.generation=$generation,
a.population_id=COALESCE($population_id, a.population_id),
a.specie_id=$specie_id,
a.cx_id=$cx_id,
a.fingerprint=COALESCE(a.fingerprint, $fingerprint),
a.constraint_json=$constraint_json,
a.evo_hist=$evo_hist,
a.fitness=COALESCE($fitness, a.fitness),
a.innovation_factor=COALESCE($innovation_factor, 0),
a.pattern=$pattern_json
"""
payload = {
**agent,
"constraint_json": json.dumps(agent.get("constraint", {}), ensure_ascii=False),
"pattern_json": json.dumps(agent.get("pattern", []), ensure_ascii=False),
}
await self.db.run_consume(cy, **payload)
async def _update_agent_fields(self, agent_id: Any, fields: Dict[str, Any]) -> None:
sets = ", ".join([f"a.{k}=${k}" for k in fields.keys()])
cy = f"MATCH (a:agent {{id:$id}}) SET {sets}"
await (await self.db.run_read(cy, id=agent_id, **fields)).consume()
async def _set_agent_fingerprint(self, agent_id: Any, fingerprint: Any) -> None:
cy = "MATCH (a:agent {id:$id}) SET a.fingerprint=$fp"
await (await self.db.run_read(cy, id=agent_id, fp=fingerprint)).consume()
async def _set_agent_fitness(self, agent_id: Any, fitness: Any) -> None:
cy = "MATCH (a:agent {id:$id}) SET a.fitness=$fitness"
await (await self.db.run_read(cy, id=agent_id, fitness=fitness)).consume()
async def _read_agent(self, agent_id: Any) -> Optional[Dict[str, Any]]:
cy = "MATCH (a:agent {id:$id}) RETURN a"
rec = await self.db.read_single(cy, id=agent_id)
if not rec:
return None
a = dict(rec["a"])
# robuste Deserialisierung
import json
# constraint
cj = a.get("constraint_json")
if isinstance(cj, str):
try:
a["constraint"] = json.loads(cj)
except Exception:
a["constraint"] = {}
elif "constraint" not in a or a["constraint"] is None:
a["constraint"] = {}
# pattern bevorzugt pattern_json; fallback: falls pattern als String gespeichert wurde
pj = a.get("pattern_json")
if isinstance(pj, str):
try:
a["pattern"] = json.loads(pj)
except Exception:
a["pattern"] = []
else:
p = a.get("pattern")
if isinstance(p, str):
try:
a["pattern"] = json.loads(p)
except Exception:
a["pattern"] = []
elif p is None:
a["pattern"] = []
return a
async def _read_specie(self, specie_id: Any) -> Optional[Dict[str, Any]]:
rec = await self.db.read_single("MATCH (s:specie {id:$id}) RETURN s", id=specie_id)
return dict(rec["s"]) if rec else None
async def _read_population(self, population_id: Any) -> Optional[Dict[str, Any]]:
rec = await self.db.read_single("MATCH (p:population {id:$id}) RETURN p", id=population_id)
return dict(rec["p"]) if rec else None
async def _find_specie_by_fingerprint(self, population_id: Any, fingerprint: Any) -> Optional[Any]:
"""Suche in Population eine Spezies mit exakt gleichem Fingerprint (wie im Buch)."""
rec = await self.db.read_single("""
MATCH (p:population {id:$pid})-[:HAS_SPECIE]->(s:specie)
WHERE s.fingerprint = $fp
RETURN s.id AS sid
LIMIT 1
""", pid=population_id, fp=fingerprint)
return rec["sid"] if rec else None
async def _append_specie_agent(self, specie_id: Any, agent_id: Any, replace: bool = False) -> None:
"""Fügt agent_id in specie.agent_ids ein; erstellt HAS_AGENT-Kante."""
cy = """
MATCH (s:specie {id:$sid})
SET s.agent_ids = CASE WHEN $replace THEN [$aid] ELSE coalesce([$aid] + s.agent_ids, [$aid]) END
"""
await (await self.db.run_read(cy, sid=specie_id, aid=agent_id, replace=replace)).consume()
# Beziehung:
await (await self.db.run_read("""
MATCH (s:specie {id:$sid}), (a:agent {id:$aid})
MERGE (s)-[:HAS_AGENT]->(a)
""", sid=specie_id, aid=agent_id)).consume()
async def _write_cortex_and_io(self, cortex: Dict[str, Any], sensors: List[Dict[str, Any]],
actuators: List[Dict[str, Any]]) -> None:
sensors = [{**s, "scape": self._normalize_scape(s.get("scape"))} for s in sensors]
actuators = [{**a, "scape": self._normalize_scape(a.get("scape"))} for a in actuators]
await self.db.run_consume("""
MERGE (c:cortex {id:$id})
SET c.agent_id=$agent_id, c.neuron_ids=$neuron_ids, c.sensor_ids=$sensor_ids, c.actuator_ids=$actuator_ids
""", **cortex)
if sensors:
await self.db.run_consume("""
UNWIND $sensors AS s
MERGE (x:sensor {id:s.id})
SET x.name=s.name,
x.cx_id=s.cx_id,
x.scape=s.scape,
x.vl=s.vl,
x.fanout_ids=COALESCE(s.fanout_ids, []),
x.generation=s.generation
""", sensors=sensors)
if actuators:
await self.db.run_consume("""
UNWIND $actuators AS a
MERGE (x:actuator {id:a.id})
SET x.name=a.name,
x.cx_id=a.cx_id,
x.scape=a.scape,
x.vl=a.vl,
x.fanin_ids=COALESCE(a.fanin_ids, []),
x.generation=a.generation
""", actuators=actuators)
await self.db.run_consume("""
MATCH (c:cortex {id:$cx})
WITH c
UNWIND c.sensor_ids AS sid
MATCH (s:sensor {id:sid})
MERGE (c)-[:HAS_SENSOR]->(s)
""", cx=cortex["id"])
await self.db.run_consume("""
MATCH (c:cortex {id:$cx})
WITH c
UNWIND c.actuator_ids AS aid
MATCH (a:actuator {id:aid})
MERGE (c)-[:HAS_ACTUATOR]->(a)
""", cx=cortex["id"])
await self.db.run_consume("""
MATCH (c:cortex {id:$cx})
WITH c
UNWIND c.neuron_ids AS nid
MATCH (n:neuron {id:nid})
MERGE (c)-[:HAS_NEURON]->(n)
""", cx=cortex["id"])
await self.db.run_consume("""
MATCH (c:cortex {id:$cx})
WITH c
UNWIND c.sensor_ids AS sid
MATCH (s:sensor {id:sid})
UNWIND s.fanout_ids AS nid
MATCH (n:neuron {id:nid})
MERGE (s)-[:FANOUT_TO]->(n)
""", cx=cortex["id"])
await self.db.run_consume("""
MATCH (c:cortex {id:$cx})
WITH c
UNWIND c.actuator_ids AS aid
MATCH (a:actuator {id:aid})
UNWIND a.fanin_ids AS nid
MATCH (n:neuron {id:nid})
MERGE (a)-[:FANIN_FROM]->(n)
""", cx=cortex["id"])
# <<< HIER am Ende hinzufügen: OUTPUT_TO-Kanten für alle Neuronen dieses Cortex >>>
await self.db.run_consume("""
MATCH (c:cortex {id:$cx})
UNWIND c.neuron_ids AS nid
MATCH (n:neuron {id:nid})
UNWIND n.output_ids AS oid
CALL {
WITH oid
OPTIONAL MATCH (m:neuron {id:oid}) RETURN m AS dst
UNION
WITH oid
OPTIONAL MATCH (a:actuator {id:oid}) RETURN a AS dst
}
WITH n, dst WHERE dst IS NOT NULL
MERGE (n)-[:OUTPUT_TO]->(dst)
""", cx=cortex["id"])
async def _write_neuron(self, neuron: Dict[str, Any]) -> None:
# Eingabe vereinheitlichen
if "input_idps" in neuron:
input_ids, input_w, input_w_len = self._pack_inputs(neuron["input_idps"])
else:
# bereits flach angeliefert
input_ids = neuron["input_ids"]
input_w = neuron["input_w"]
input_w_len = neuron["input_w_len"]
await self.db.run_consume("""
MERGE (n:neuron {id:$id})
SET n.generation=$generation,
n.cx_id=$cx_id,
n.af=$af,
n.input_ids=$input_ids,
n.input_w=$input_w,
n.input_w_len=$input_w_len,
n.output_ids=$output_ids,
n.ro_ids=$ro_ids
""",
id=neuron["id"],
generation=neuron["generation"],
cx_id=neuron["cx_id"],
af=neuron["af"],
input_ids=input_ids,
input_w=input_w,
input_w_len=input_w_len,
output_ids=neuron["output_ids"],
ro_ids=neuron["ro_ids"],
)
if neuron["ro_ids"]:
await self.db.run_consume("""
MATCH (n:neuron {id:$nid})
UNWIND $ros AS rid
MATCH (src:neuron {id:rid})
MERGE (n)-[:READS_OUTPUT_OF]->(src)
""", nid=neuron["id"], ros=neuron["ro_ids"])
async def _read_cortex(self, cx_id: Any) -> Optional[Dict[str, Any]]:
rec = await self.db.read_single(
"MATCH (c:cortex {id:$id}) RETURN c",
id=cx_id,
)
return dict(rec["c"]) if rec else None
async def _read_sensors(self, ids: List[Any]) -> List[Dict[str, Any]]:
if not ids:
return []
rows = await self.db.read_all("""
UNWIND $ids AS sid
MATCH (s:sensor {id:sid}) RETURN s
""", ids=ids)
return [dict(r["s"]) for r in rows]
async def _read_actuators(self, ids: List[Any]) -> List[Dict[str, Any]]:
if not ids:
return []
rows = await self.db.read_all("""
UNWIND $ids AS aid
MATCH (a:actuator {id:aid}) RETURN a
""", ids=ids)
return [dict(r["a"]) for r in rows]
async def _read_neurons(self, ids: List[Any]) -> List[Dict[str, Any]]:
if not ids:
return []
rows = await self.db.read_all("""
UNWIND $ids AS nid
MATCH (n:neuron {id:nid}) RETURN n
""", ids=ids)
return [dict(r["n"]) for r in rows]
# -------------- Convenience: delete / print ---------------
async def delete_Agent(self, agent_id: Any) -> None:
a = await self._read_agent(agent_id)
if not a:
return
cx = await self._read_cortex(a["cx_id"])
if cx:
await (await self.db.run_read("""
MATCH (c:cortex {id:$cid})
OPTIONAL MATCH (c)-[:HAS_NEURON]->(n:neuron)
OPTIONAL MATCH (c)-[:HAS_SENSOR]->(s:sensor)
OPTIONAL MATCH (c)-[:HAS_ACTUATOR]->(a:actuator)
DETACH DELETE n, s, a, c
""", cid=cx["id"])).consume()
await (await self.db.run_read("MATCH (a:agent {id:$id}) DETACH DELETE a", id=agent_id)).consume()
async def print(self, agent_id: Any) -> None:
a = await self._read_agent(agent_id)
if not a:
print("agent not found:", agent_id)
return
cx = await self._read_cortex(a["cx_id"])
print("AGENT:", a)
print("CORTEX:", cx)
if not cx:
return
sensors = await self._read_sensors(cx.get("sensor_ids", []))
res = await self.db.read_all("""
UNWIND $ids AS nid
MATCH (n:neuron {id:nid}) RETURN n
""", ids=cx.get("neuron_ids", []))
neurons = [dict(r["n"]) for r in res]
actuators = await self._read_actuators(cx.get("actuator_ids", []))
for s in sensors:
print("SENSOR:", s)
for n in neurons:
print("NEURON:", n)
for ac in actuators:
print("ACTUATOR:", ac)