#!/usr/bin/env python3 # -*- coding: utf-8 -*- import numpy as np import argparse import sys import pandas as pd from pathlib import Path from DORNconfiguration import NMAHEPAM_channels, SETH_channels, AHEPAM_channels from event_filters import prepare_trigger_indices, check_trigger # --------------------------- # Argumente # --------------------------- parser = argparse.ArgumentParser(description="Parse ED-data for DORN files with triggers") parser.add_argument("file", type=str) parser.add_argument("-map", type=str, default="ALL", choices=["ALL", "NMAHEPAM", "SETH", "AHEPAM"]) parser.add_argument("-eventhist", action="store_true") parser.add_argument("-Bhist", action="store_true") parser.add_argument("-trigger", nargs="+", default=None) parser.add_argument("-time", type=int, default=200) parser.add_argument("-nameadd", type=str, default="") args = parser.parse_args() file = args.file if not Path(file).is_file(): print("No valid file:", file) sys.exit() filename = Path(file).stem Path("hists").mkdir(parents=True, exist_ok=True) # --------------------------- # Parameter # --------------------------- minX_event, maxX_event, resX_event = -100, 5000, 0.838214 * 4 minX_B, maxX_B, resX_B = -5, 300, 0.838214 / 2 # --------------------------- # Mapping (OPTIMIERT) # --------------------------- def Evaluate_mapping(mapping_name): if mapping_name == "ALL": columns = [f"0-{i}" for i in range(24)] + [f"1-{i}" for i in range(24)] resolver_dict = {i: i for i in range(len(columns))} return columns, lambda ch, sl: ch + sl*24, None, resolver_dict if mapping_name == "NMAHEPAM": channels = NMAHEPAM_channels() elif mapping_name == "SETH": channels = SETH_channels() elif mapping_name == "AHEPAM": channels = AHEPAM_channels() else: sys.exit("Unknown mapping") valid = [ch for ch in channels if ch["name"]] columns = [ch["name"] for ch in valid] # schneller Resolver hwc_to_idx = {ch["hwc"]: i for i, ch in enumerate(valid)} def resolver(ch, sl): return hwc_to_idx.get(ch + sl*24) return columns, resolver, valid, hwc_to_idx # --------------------------- # Histogram # --------------------------- def create_event_histogram(columns, resolver, mapping_channels, time_threshold=200, triggers=None, B_only=False): # index map statt .index() col_index = {name: i for i, name in enumerate(columns)} if B_only: B_channels = ["B1","B2","B3","B4","B5","B6"] B_index = {b: i for i, b in enumerate(B_channels)} bins = int((maxX_B - minX_B) / resX_B) hist = np.zeros((bins+1, len(B_channels)+2)) hist[:,0] = np.linspace(minX_B, maxX_B, bins+1) else: bins = int((maxX_event - minX_event) / resX_event) hist = np.zeros((bins+1, len(columns)+1)) hist[:,0] = np.linspace(minX_event, maxX_event, bins+1) u_dict = {ch["name"]: ch["u"] for ch in (mapping_channels or [])} thr_dict = {ch["name"]: ch["thr"] for ch in (mapping_channels or [])} trigchans = None if triggers: from event_filters import NMAHEPAM_triggers trigchans = prepare_trigger_indices(triggers, columns, NMAHEPAM_triggers) current_event = [] event_start = None with open(file, "r", encoding="utf-8", errors="ignore") as f: for line in f: if not line.startswith("ED"): continue parts = line.split() try: time = int(float(parts[1])) sli = int(parts[2]) cha = int(parts[3]) raw = float(parts[-1]) / 0x20000 except: continue idx = resolver(cha, sli) if idx is None: continue name = columns[idx] val = raw * u_dict.get(name, 1.0) if event_start is None: event_start = time if abs(time - event_start) > time_threshold: process_event(current_event, hist, B_only, trigchans, columns, thr_dict, col_index, B_index if B_only else None) current_event = [] event_start = time current_event.append((name, val)) if current_event: process_event(current_event, hist, B_only, trigchans, columns, thr_dict, col_index, B_index if B_only else None) return hist # --------------------------- # Event Processing (neu) # --------------------------- def process_event(event, hist, B_only, trigchans, columns, thr_dict, col_index, B_index): event_dict = dict(event) if (trigchans is not None) and not check_trigger(event_dict, columns, trigchans, thr_dict): return if B_only: # B-Channels + Sum sum_val = 0 for b, i in B_index.items(): val = event_dict.get(b, 0) sum_val += val if minX_B <= val <= maxX_B: x = int((val - minX_B) / resX_B) hist[x, i+1] += 1 if minX_B <= sum_val <= maxX_B: x = int((sum_val - minX_B) / resX_B) hist[x, -1] += 1 else: for name, val in event: if minX_event <= val <= maxX_event: x = int((val - minX_event) / resX_event) hist[x, col_index[name]+1] += 1 # --------------------------- # Save # --------------------------- def save_hist(hist, columns, suffix): add = f"_{args.nameadd}" if args.nameadd else "" df = pd.DataFrame(hist, columns=["value"] + columns) fname = f"hists/{filename}{add}.{suffix}" df.to_csv(fname, sep=" ", index=False) print(fname, "created") # --------------------------- # MAIN # --------------------------- def main(): columns, resolver, channels, _ = Evaluate_mapping(args.map) if args.eventhist: hist = create_event_histogram(columns, resolver, channels, time_threshold=args.time, triggers=args.trigger) save_hist(hist, columns, "eventhist") if args.Bhist: hist = create_event_histogram(columns, resolver, channels, time_threshold=args.time, triggers=args.trigger, B_only=True) save_hist(hist, ["B1","B2","B3","B4","B5","B6","SUM"], "Bhist") if __name__ == "__main__": main()