compton 4 лет назад
Родитель
Сommit
89c2671faa

+ 14 - 1
README.md

@@ -1 +1,14 @@
-The repository for energy measurements
+Репозиторий с данными усреднений комптоновских энергий
+
+### Структура
+
+* **./tables** – содержит таблицы с усреднёнными энергиями для энергетических точек 
+по сезонам
+* **./src** – содержит исходный код
+* [cmd.inp.nsk.su/~compton/pictures](https://cmd.inp.nsk.su/~compton/pictures) – 
+картинки с усреднениями
+
+### Примечания
+
+1. В ветке `master` хранятся проверенные данные, в ветке `dev` более свежая, но менее 
+надёжная информация.

+ 53 - 0
src/README.md

@@ -0,0 +1,53 @@
+В этой папке лежит исходный код скриптов
+
+### `compton_filter.py`
+код для фильтрации данных комптоновских измерений из slowdb и переноса их в базу данных калибровок
+
+#### Подробности
+Беру данные из slowdb, соответствующие измерению комптоновских энергий (`g_id==43`), очищаю (`dt > 0`), вычисляю времена начала и окончания измерения.
+Дополнительно убираю перекрывающиеся измерения (оставляю только самые новые).
+Записываю оставшиеся строки в базу данных калибровок в **Compton_run**
+
+#### Пример использования
+
+`python compton_filter.py --season cmd3_2021_2 --config database.ini --update`
+
+* обозначает отфильтровать таблицу **cmd3_2021_2** из slowdb и обновить новыми значениями (если есть) базу данных калибровок
+* **database.ini** файл содержит параметры аутентификации для slowdb и бд калибровок, пример
+```
+[postgresql]
+host=xxx
+database=xxx
+user=xxx
+password=xxx
+
+[clbrDB]
+host=xxx
+database=xxx
+user=xxx
+password=xxx
+```
+
+* требуется `python3` и библиотека `psycopg2==2.9.1` для работы
+
+### `compton_combiner.py`
+код для объединения данных заходов и измерений комптоновской энергии из бд калибровок + вычисление усреднённых значений в точках по энергии
+
+#### Подробности
+Вычитываю раны из базы данных заходов и комптоновские измерения из базы данных калибровок.
+Суммирую светимость по заходам, соответствующим каждому комптоновскому измерению. Суммарные светимости, набранные в каждом комптоновском измерении становятся весами этих измерений.
+С помощью лайклихуда усредняю комптоновские измерения с учётом весов, соответствующих светимостям.
+
+#### Пример использования
+
+`python compton_combiner.py -s NNBAR2021 -c database.ini --csv --clbrdb --pics_folder ./pics`
+
+* означает вычислить средние энергии для сезона **NNBAR2021**, использовать конфиг файл с данными аутентификации **database.ini**, сохранить полученные результаты в csv таблицу, обновить данные в базе данных калибровок, сохранить картинки по точкам в директории **./pics**
+* в дополнение к предыдущему, **database.ini** должен содержать аутентификацию для базы данных заходов, пример
+```
+[cmdruns]
+host=xxx
+database=xxx
+user=xxx
+password=xxx
+```

+ 6 - 0
src/bad_comptons.txt

@@ -0,0 +1,6 @@
+1622188639 #incorrect laser line
+1622187130 #incorrect laser line
+1622185633 #incorrect laser line
+1622184134 #incorrect laser line
+1622182626 #incorrect laser line
+1622181118 #incorrect laser line

+ 472 - 0
src/compton_combiner.py

@@ -0,0 +1,472 @@
+"""Script to combine compton measurements with runs and process these data
+"""
+
+import argparse
+from configparser import ConfigParser
+from datetime import datetime, timedelta, timezone
+import sqlite3
+from typing import Union, Tuple, Optional
+
+from compton_filter import CalibrdbHandler
+from iminuit import Minuit
+import matplotlib.dates as mdates
+import matplotlib.pyplot as plt
+from mysql.connector import connect, Error
+import numpy as np
+import pandas as pd
+from tqdm import tqdm
+
+SEASONS = {
+    'name': ['HIGH2017', 'RHO2018', 'HIGH2019', 'LOW2020', 'HIGH2020', 'HIGH2021', 'NNBAR2021'],
+    'start_run': [36872, 48938, 70014, 85224, 89973, 98116, 107342, None],
+}
+
+class RunsDBHandler():
+    def __init__(self, host: str = 'cmddb', database: str = 'online', user: str = None, password: str = None):
+        self.conn = connect(host = host, database = database, user = user, password = password)
+        self.cur = self.conn.cursor()
+        self.cur.execute("SET time_zone = '+07:00';")
+        
+    @property
+    def fields(self) -> list:
+        """Returns a list of available columns in the RunsDB
+        """
+        
+        self.cur.execute("""DESCRIBE Runlog""")
+        return self.cur.fetchall()
+        
+    def load_tables(self, range: Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]], energy_point: Optional[float] = None, select_bad_runs: bool = False):
+        """
+        Parameters
+        ----------
+        range : Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]]
+            selection range
+            int range defines an interval in runs
+            datetime range defines a time interval (NSK: +7:00 time)
+        energy_point : Optional[float]
+            energy point name, MeV (default is  None)
+        select_bad_runs : bool
+            select runs with labels except (Y) (default is False)
+        """
+        
+        cond = ""
+        if isinstance(range[0], int):
+            cond = f" AND run >= {range[0]} "
+            if range[1] is not None:
+                cond += f" AND run <= {range[1]} "
+        elif isinstance(range[0], datetime):
+            cond = f" AND starttime >= %s "
+            if range[1] is not None:
+                cond += " AND stoptime <= %s"
+            else:
+                range = (range[0], )
+                
+        energy_cond = ""
+        if energy_point is not None:
+            energy_cond = f" AND energy = {energy_point}"
+            
+        quality_cond = ' quality = "Y" '
+        if select_bad_runs:
+            quality_cond = ' quality <> "Y" '
+            
+        sql_query = f"""
+        SELECT 
+            run, 
+            starttime, 
+            stoptime, 
+            energy, 
+            luminosity
+        FROM Runlog 
+        WHERE 
+            {quality_cond}
+            {cond}
+            {energy_cond}
+            AND luminosity > 0
+            AND stoptime > starttime
+            AND nevent > 0
+        ORDER BY run DESC"""
+        
+        if isinstance(range[0], datetime):
+            self.cur.execute(sql_query, range)
+        else:
+            self.cur.execute(sql_query)
+        
+        field_names = [i[0] for i in self.cur.description]
+        res = self.cur.fetchall()
+        return res, field_names
+        
+    def __del__(self):
+        self.conn.close()
+        
+class Combiner():
+    """Combines a dataframe with runs and a dataframe with compton measurements together
+    """
+    
+    def __init__(self, runsdb: Tuple[list, list], clbrdb: Tuple[list, list]):
+        """
+        Parameters
+        ----------
+        runsdb : Tuple[list, list]
+            table of runs (rows and field names)
+        clbrdb : Tuple[list, list]
+            table of compton measurements (rows and field names)            
+        """
+        
+        rdb_rows, r_fld = runsdb
+        cdb_rows, c_fld = clbrdb
+        
+        self.conn = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
+        self.cur = self.conn.cursor()
+        self.cur.execute(f"CREATE table runs (run, elabel, starttime timestamp, stoptime timestamp, luminosity)")
+        self.cur.execute(f"CREATE table compton (begintime timestamp, endtime timestamp, e_mean, e_std, spread_mean, spread_std)")
+        
+        run_row_generator = map(lambda x: (x[r_fld.index("run")], x[r_fld.index("energy")],
+                                           x[r_fld.index("starttime")], x[r_fld.index("stoptime")],
+                                           x[r_fld.index("luminosity")]), rdb_rows)
+        c_data_idx = c_fld.index("data")
+        compton_row_generator = map(lambda x: (x[c_fld.index("begintime")], x[c_fld.index("endtime")], 
+                                               float(x[c_data_idx][0]), float(x[c_data_idx][1]), 
+                                               float(x[c_data_idx][2]), float(x[c_data_idx][3])), cdb_rows)
+        
+        self.cur.executemany(f"""INSERT into runs VALUES ({','.join(['?']*5)})""", run_row_generator)
+        self.cur.executemany(f"""INSERT into compton VALUES ({','.join(['?']*6)})""", compton_row_generator)
+        
+        self.__create_combined_table()        
+        
+    def __create_combined_table(self):
+        create_combined_query = """
+        CREATE TABLE combined_table AS
+        SELECT 
+            runs.run AS run,
+            runs.elabel AS elabel,
+            runs.starttime as "run_start [timestamp]", 
+            runs.stoptime AS "run_stop [timestamp]", 
+            compton.begintime AS "compton_start [timestamp]", 
+            compton.endtime AS "compton_stop [timestamp]",
+            runs.luminosity, compton.e_mean, compton.e_std, compton.spread_mean, compton.spread_std
+        FROM runs, compton
+        WHERE 
+            (runs.starttime BETWEEN compton.begintime AND compton.endtime) 
+            OR (runs.stoptime BETWEEN compton.begintime AND compton.endtime)
+            OR (compton.begintime BETWEEN runs.starttime AND runs.stoptime)
+            OR (compton.endtime BETWEEN runs.starttime AND runs.stoptime);
+        """
+        self.cur.execute(create_combined_query)
+        return
+        
+    def combined_table(self) -> pd.DataFrame:
+        """Returns combined dataframe
+        """
+        
+        sql_query = """
+        SELECT * FROM combined_table;
+        """
+        df = pd.read_sql(sql_query, self.conn)
+        df['common_duration'] = df[['run_stop', 'compton_stop']].min(axis=1) - df[['run_start', 'compton_start']].max(axis=1)
+        df['run_duration'] = df['run_stop'] - df['run_start']
+        df['run_in_measurement'] = df['common_duration']/df['run_duration']
+        df = df.sort_values(by='run_in_measurement', ascending=False).drop_duplicates(subset='run').sort_values(by='run')
+        df = df.drop(['run_duration', 'common_duration', 'run_start', 'run_stop'], axis=1)
+        return df
+        
+    def __del__(self):
+        self.conn.close()
+        
+class Likelihood():
+    """
+    Likelihood function
+    """
+    
+    def __init__(self, means: np.array, sigmas: np.array, weights: np.array):
+        """
+        Parameters
+        ----------
+        means : np.array
+            array of means, [MeV]
+        sigmas : np.array
+            array of standard deviations, [MeV]
+        weights : np.array
+            array of luminosities
+        """
+        
+        self.means = means
+        self.sigmas = sigmas
+        self.weights = weights/weights.mean()
+        
+    def __call__(self, mean: float, sigma: float):
+        """
+        Calls likelihood calculation
+        
+        Parameters
+        ----------
+        mean : float
+            expected mean
+        sigma : float
+            expected standard deviation
+        """
+        
+        sigma_total = np.sqrt(sigma**2 + self.sigmas**2)
+        ln_L = -np.sum( self.weights*( ((mean - self.means)**2)/(2*(sigma_total**2)) + np.log(sigma_total) ) )
+        return -ln_L
+    
+def __estimate_point_with_closest(comb_df: pd.DataFrame, runs_df: pd.DataFrame, compton_df: pd.DataFrame):
+    # estimate energy by the nearest points
+    min_run_time = runs_df[runs_df.run == comb_df.iloc[0].at['run_first']].iloc[0].at['starttime']
+    max_run_time = runs_df[runs_df.run == comb_df.iloc[0].at['run_last']].iloc[0].at['stoptime']
+
+    nearest_row_before = compton_df.iloc[pd.Index(compton_df.endtime).get_loc(min_run_time, 'nearest')]
+    nearest_row_after = compton_df.iloc[pd.Index(compton_df.begintime).get_loc(max_run_time, 'nearest')]
+
+    # regulatization
+    nearest_row_before['data'][1] = max(nearest_row_before['data'][3], 1e-3)
+    nearest_row_after['data'][3] = max(nearest_row_after['data'][3], 1e-3)
+    nearest_row_before['data'][1] = max(nearest_row_before['data'][1], 1e-3)
+    nearest_row_after['data'][3] = max(nearest_row_after['data'][3], 1e-3)
+
+    mean_energy = (nearest_row_before['data'][0] + nearest_row_after['data'][0])/2
+    mean_spread = (nearest_row_before['data'][2] + nearest_row_after['data'][2])/2
+    std_energy = np.sqrt(1/(1/(nearest_row_before['data'][1])**2 + 1/(nearest_row_after['data'][1])**2))
+    std_spread = np.sqrt(1/(1/(nearest_row_before['data'][3])**2 + 1/(nearest_row_after['data'][3])**2))
+    sys_energy = np.std([nearest_row_before['data'][0], nearest_row_after['data'][0]])
+
+
+    return {
+        'energy_point': comb_df.elabel.min(),
+        'first_run': comb_df.run_first.min(),
+        'last_run': comb_df.run_last.max(), 
+        'mean_energy': mean_energy, 
+        'mean_energy_stat_err': std_energy, 
+        'mean_energy_sys_err': sys_energy, 
+        'mean_spread': mean_spread,
+        'mean_spread_stat_err': std_spread, 
+        'used_lum': 0, 
+        'comment': 'indirect measurement #2',
+    }, pd.DataFrame([])
+    
+def calculate_point(comb_df: pd.DataFrame, runs_df: pd.DataFrame, compton_df: pd.DataFrame, rdb) -> dict:
+    """Calculates parameters of the energy (mean, std, spread) in this dataFrame
+    
+    Parameters
+    ----------
+    comb_df : pd.DataFrame
+        table of the measurements linked with runs
+    runs_df : pd.DataFrame
+        table of the runs
+    compton_df : pd.DataFrame
+        table of the comptons
+    
+    Returns
+    -------
+    dict
+        average parameters on this DataFrame
+    """
+        
+    if (len(comb_df) == 1) and pd.isnull(comb_df.iloc[0].at['compton_start']):
+        # no direct measurements of the compton during data runs
+                
+        min_Yruntime = runs_df[runs_df.run == comb_df.iloc[0].at['run_first']].iloc[0].at['starttime']
+        max_Yruntime = runs_df[runs_df.run == comb_df.iloc[0].at['run_last']].iloc[0].at['stoptime']
+        dlt0 = timedelta(days=1)
+        # assymetric time because energy can be stable only after
+        runs_df_with_bads = rdb.load_tables((min_Yruntime, max_Yruntime + dlt0), energy_point = comb_df.iloc[0].at['elabel'], select_bad_runs = True)
+        
+        if len(runs_df_with_bads[0]) == 0:
+            return __estimate_point_with_closest(comb_df, runs_df, compton_df)
+        
+        runs_df_with_bads_df = pd.DataFrame(runs_df_with_bads[0], columns = runs_df_with_bads[1])
+        min_run_time, max_run_time = min(min_Yruntime, runs_df_with_bads_df.starttime.min()), max(max_Yruntime, runs_df_with_bads_df.stoptime.max())
+        
+        compton_meas = compton_df.query('((begintime>=@min_run_time)&(begintime<=@max_run_time))|((endtime>=@min_run_time)&(endtime<=@max_run_time))').copy()
+        res_df = pd.DataFrame(list(map(lambda x: {
+            'compton_start': x[1]['begintime'],
+            'compton_stop': x[1]['endtime'],
+            'e_mean': float(x[1]['data'][0]),
+            'e_std': float(x[1]['data'][1]),
+            'spread_mean': float(x[1]['data'][2]),
+            'spread_std': float(x[1]['data'][3]),
+        }, compton_meas.iterrows())))
+        res_df = res_df.query(f'abs(e_mean -{comb_df.iloc[0].at["elabel"]})<5')
+        
+        if len(res_df) == 0:
+            return __estimate_point_with_closest(comb_df, runs_df, compton_df)
+                        
+        return {
+            'energy_point': comb_df.elabel.min(),
+            'first_run': comb_df.run_first.min(),
+            'last_run': comb_df.run_last.max(), 
+            'mean_energy': res_df.e_mean.mean(), 
+            'mean_energy_stat_err': np.sqrt(1/np.sum(1/(res_df.e_std)**2)),
+            'mean_energy_sys_err': np.abs(comb_df.iloc[0].at['elabel'] - res_df.e_mean.mean()), 
+            'mean_spread': res_df.spread_mean.mean(),
+            'mean_spread_stat_err':np.sqrt(1/np.sum(1/(res_df.spread_std)**2)),
+            'used_lum': 0, 
+            'comment': 'indirect measurement #1',
+        }, res_df
+        
+    
+    df = comb_df.copy()
+    df.spread_std = np.where(df.spread_std < 1e-4, 1e-4, df.spread_std)
+    
+    df = df[df.e_std > 0]
+    mean_energy = np.sum(df.e_mean*df.luminosity/(df.e_std**2))/np.sum(df.luminosity/(df.e_std**2))
+    # std_energy = np.sqrt(1/np.sum((df.luminosity/df.luminosity.mean())/df.e_std**2))
+    
+    good_criterion = np.abs((df.e_mean - mean_energy)/np.sqrt(df.e_mean.std()**2 + df.e_std**2)) < 5
+    good_criterion = good_criterion
+    df = df[good_criterion]
+    
+    m = Minuit(Likelihood(df.e_mean, df.e_std, df.luminosity), mean=df.e_mean.mean(), sigma=df.e_mean.std())
+    m.errordef = 0.5 
+    m.limits['sigma'] = (0, None)
+    m.migrad();
+    sys_err = m.values['sigma']
+    mean_en = m.values['mean']
+    
+    mean_spread = np.sum(df.spread_mean*df.luminosity/(df.spread_std**2))/np.sum(df.luminosity/(df.spread_std**2))
+    std_spread = np.sqrt(1/np.sum((df.luminosity/df.luminosity.mean())/df.spread_std**2))
+    
+    res_dict = {
+        'energy_point': comb_df.elabel.min(), 
+        'first_run': comb_df.run_first.min(),
+        'last_run': comb_df.run_last.max(), 
+        'mean_energy': mean_en, 
+        'mean_energy_stat_err': m.errors['mean'], 
+        'mean_energy_sys_err': sys_err, 
+        'mean_spread': mean_spread,
+        'mean_spread_stat_err': std_spread, 
+        'used_lum': df.luminosity.sum()/comb_df.luminosity_total.sum(), 
+        'comment': '',
+    }
+    return res_dict, df
+
+def process_combined(combined_df: pd.DataFrame, runs_df: pd.DataFrame, compton_df: pd.DataFrame, pics_folder: Optional[str] = None, rdb: Optional[RunsDBHandler] = None) -> pd.DataFrame:
+    
+    if pics_folder is not None:
+        plt.ioff()
+        plt.style.use('ggplot')
+        locator = mdates.AutoDateLocator(minticks=5)
+        formatter = mdates.ConciseDateFormatter(locator)
+        formatter.formats = ['%y', '%b', '%d', '%H:%M', '%H:%M', '%S.%f', ]
+        formatter.zero_formats = [''] + formatter.formats[:-1]
+        formatter.zero_formats[3] = '%d-%b'
+        formatter.offset_formats = ['', '%Y', '%b %Y', '%d %b %Y', '%d %b %Y', '%d %b %Y %H:%M', ]
+    
+    runs_df = runs_df.rename({'luminosity': 'luminosity_full', 'energy': 'elabel'}, axis=1)
+    combined_df = pd.merge(combined_df.drop(['elabel'], axis=1), runs_df[['run', 'elabel', 'luminosity_full']], how='outer')
+    combined_df = combined_df.sort_values(by='run')
+    combined_df['luminosity'] = combined_df['luminosity'].fillna(0)
+    
+    combined_df['point_idx'] = np.cumsum(~np.isclose(combined_df.elabel, combined_df.elabel.shift(1), atol=1e-4))
+    combined_df = combined_df.groupby(['point_idx', 'compton_start'], dropna=False).agg(
+        elabel=('elabel', 'min'), elabel_test=('elabel', 'max'),
+        run_first=('run', 'min'), run_last=('run', 'max'),
+        luminosity=('luminosity', 'sum'), luminosity_total=('luminosity_full', 'sum'),
+        compton_stop=('compton_stop', 'min'), compton_stop_test=('compton_stop', 'max'),
+        e_mean=('e_mean', 'min'), e_mean_test=('e_mean', 'max'),
+        e_std=('e_std', 'min'), e_std_test=('e_std', 'max'),
+        spread_mean=('spread_mean', 'min'), spread_mean_test=('spread_mean', 'max'),
+        spread_std=('spread_std', 'min'), spread_std_test=('spread_std', 'max'),
+    ).reset_index().set_index('point_idx')
+    # return combined_df
+    
+    result_df = pd.DataFrame(columns=['energy_point', 'first_run', 'last_run', 'mean_energy', 'mean_energy_stat_err', 'mean_energy_sys_err', 'mean_spread', 'mean_spread_stat_err', 'used_lum', 'comment'])
+    
+    for i, table in tqdm(combined_df.groupby('point_idx', dropna=False)):
+        res_dict, good_df = calculate_point(table, runs_df, compton_df, rdb)
+        result_df = result_df.append(res_dict, ignore_index=True)
+        
+        if pics_folder is not None:
+            plt_table = good_df.dropna()
+            if len(plt_table) == 0:
+                continue
+            
+            total_error = np.sqrt(res_dict["mean_energy_stat_err"]**2 + res_dict["mean_energy_sys_err"]**2)
+            half_timedelta = (plt_table.compton_stop - plt_table.compton_start)/2 
+            time = plt_table.compton_start + half_timedelta
+            dlt0, total_time = timedelta(days=1), plt_table.compton_stop.max() - plt_table.compton_stop.min()
+            timelim = [plt_table.compton_stop.min() - 0.05*total_time, plt_table.compton_stop.max() + 0.05*total_time]
+            
+            fig, ax = plt.subplots(1, 1, dpi=120, tight_layout=True)
+            ax.errorbar(time, plt_table.e_mean, xerr=half_timedelta, yerr=plt_table.e_std, fmt='.')
+            ax.axhline(res_dict['mean_energy'], color='black', zorder=3, label='Mean')
+            ax.fill_between(timelim, 
+                            [res_dict['mean_energy'] - total_error]*2, 
+                            [res_dict['mean_energy'] + total_error]*2, color='green', zorder=1, alpha=0.4)
+            ax.tick_params(axis='x', labelrotation=45)
+            ax.xaxis.set_major_locator(locator)
+            ax.xaxis.set_major_formatter(formatter)
+            ax.set(title=f'{res_dict["energy_point"]}, E = {res_dict["mean_energy"]:.3f} ± {res_dict["mean_energy_stat_err"]:.3f} ± {res_dict["mean_energy_sys_err"]:.3f} MeV', xlabel='Time, NSK', ylabel='Energy, [MeV]', xlim=timelim)
+            plt.savefig(f'{pics_folder}/{res_dict["first_run"]}_{res_dict["energy_point"]}.png', transparent=True)
+            plt.close()
+    
+    return result_df
+
+def final_table_to_clbrdb(df: pd.DataFrame, clbrdb: CalibrdbHandler, runs_df: pd.DataFrame, season: str):
+    """Write good values from the averaged table into clbrdb
+    """
+    
+    good_values = (df.comment=='')|((df.comment!='')&((df.mean_energy.astype(float) - df.energy_point).abs()<5))
+    df_clbrdb = df.loc[good_values].drop(['comment', 'used_lum'], axis=1)
+    
+    df_clbrdb = pd.merge(df_clbrdb, runs_df[['run', 'starttime']], how='left', left_on='first_run', right_on='run').drop(['run'], axis=1)
+    df_clbrdb = pd.merge(df_clbrdb, runs_df[['run', 'stoptime']], how='left', left_on='last_run', right_on='run').drop(['run'], axis=1)
+    
+    df_clbrdb = df_clbrdb.assign(writetime=lambda df: df['stoptime'])
+    df_clbrdb = df_clbrdb[['writetime', 'starttime', 'stoptime',
+                           'energy_point', 'first_run', 'last_run', 'mean_energy', 
+                           'mean_energy_stat_err', 'mean_energy_sys_err', 'mean_spread', 'mean_spread_stat_err']].values.tolist()
+    clbrdb.insert(df_clbrdb, 'Misc', 'RunHeader', 'Compton_run_avg', 'Default', comment = season)
+    clbrdb.commit()
+
+# python scripts/compton_combiner.py -s NNBAR2021 -c database.ini --csv --clbrdb
+def main():
+    parser = argparse.ArgumentParser(description = 'Mean compton energy measurements from clbrdb')
+    parser.add_argument('-s', '--season', help = 'Name of the season')
+    parser.add_argument('-c', '--config', help = 'Config file containing information for access to databases')
+    parser.add_argument('--csv', action = 'store_true', help = 'Save csv file with data or not')
+    parser.add_argument('--clbrdb', action = 'store_true', help = 'Update Compton_run_avg clbrdb or not')
+    parser.add_argument('--pics_folder', help = 'Path to the directory for saving the pictures')
+    parser.add_argument('--only_last', action = 'store_true', help = 'Compute values of the last (in Compton_run_avg clbrdb) and new points only')
+    
+    args = parser.parse_args()
+    # logging.info(f"Arguments: season: {args.season}, config {args.config}")
+
+    parser = ConfigParser()
+    parser.read(args.config);
+    
+    rdb = RunsDBHandler(**parser['cmdruns'])
+    clbrdb = CalibrdbHandler(**parser['clbrDB'])
+    
+    idx = SEASONS['name'].index(args.season)
+    runs_range = (SEASONS['start_run'][idx], SEASONS['start_run'][idx+1])
+    
+    if args.only_last:
+        res_avg = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run_avg', num_last_rows = 1)
+        if len(res_avg[0]) != 0:
+            begintime = res_avg[0][0][res_avg[1].index("begintime")]
+            runs_range = (begintime, None)
+    
+    res_rdb = rdb.load_tables(runs_range)
+    runs_df = pd.DataFrame(res_rdb[0], columns=res_rdb[1])
+    
+    tdlt0 = timedelta(days=2)
+    time_range = (runs_df.starttime.min() - tdlt0, runs_df.stoptime.max() + tdlt0)
+    
+    res_clbrdb = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run', num_last_rows = None, timerange = time_range)
+    
+    cb = Combiner(res_rdb, res_clbrdb)
+    comb_df = cb.combined_table()
+    
+    compton_df = pd.DataFrame(res_clbrdb[0], columns=res_clbrdb[1])
+    
+    cdf = process_combined(comb_df, runs_df, compton_df, args.pics_folder, rdb)
+    
+    if args.csv:
+        cdf.to_csv(f'{args.season}.csv', index=False, float_format='%g')
+    
+    if args.clbrdb:
+        final_table_to_clbrdb(cdf, clbrdb, runs_df, args.season)
+    return 
+
+if __name__ == "__main__":
+    main()

+ 401 - 0
src/compton_filter.py

@@ -0,0 +1,401 @@
+"""
+Script to fill calibration database with filtering slowdb compton measurements
+"""
+
+import argparse
+from configparser import ConfigParser
+from datetime import datetime, timedelta, timezone
+import sys
+from typing import Tuple, List, Dict, Union, Optional
+import warnings
+import logging
+
+import psycopg2
+from psycopg2.extras import execute_values
+
+class PostgreSQLHandler():
+    """A common class for processing postgresql databases
+    """
+    
+    def __init__(self, host: str = 'cmddb', database: str = 'slowdb', user: str = None, password: str = None):
+        """
+        Parameters
+        ----------
+        host : str
+            host name (default is "cmddb")
+        database : str
+            database name (default is "slowdb")
+        user : str
+            username (default is None)
+        password : str
+            password (default is None)
+        """
+        
+        self.conn = psycopg2.connect(host = host, database = database, user = user, password = password)
+        self.cur = self.conn.cursor()
+        logging.info("PostgreSQL Hander created")
+        
+    @property
+    def list_tables(self) -> List[str]:
+        """Returns list of existed tables in the compton measurements slowDB
+        
+        Returns
+        -------
+        List[str]
+            list of tables
+        """
+        
+        logging.info("Get list of the slowdb tables")
+        self.cur.execute("""
+        SELECT table_name FROM information_schema.tables
+        WHERE table_schema = 'public'
+        """)
+        return list(map(lambda x: x[0], self.cur.fetchall()))
+
+class SlowdbComptonHandler(PostgreSQLHandler):
+    """A class for processing and filtering of compton measurements from slowdb
+    """
+    
+    def __is_overlapped_row(self, start_time_next: datetime, stop_time_prev: datetime):
+        gap = timedelta(seconds=2)
+        if(start_time_next < stop_time_prev):
+            logging.debug(f'time gap {abs(start_time_next - stop_time_prev)}')
+        return start_time_next < stop_time_prev - gap
+    
+    def __drop_overlapping_rows_list(self, table: list) -> list:
+        """Removes rows with overlapping time intervals from the table
+        
+        Parameters
+        ----------
+        table : list
+            the table MUST BE ORDERED BY TIME where 5th column is start_time, 6th column is end_time
+            
+        Returns
+        -------
+        list
+            clear table
+        """
+        
+        if len(table) == 0:
+            logging.info("Empty list. No overlapping rows")
+            return table
+        
+        logging.info("Drop overlapping rows in list representation")
+        table = table[::-1] # sort table by time from last to past
+        min_time = table[0][6]
+        overlapped_idxs = list()
+        
+        for idx, row in enumerate(table):
+            start_time, stop_time = row[5], row[6]
+            if self.__is_overlapped_row(min_time, stop_time):
+                overlapped_idxs.append(idx)
+            else:
+                min_time = start_time
+        
+        for index in sorted(overlapped_idxs, reverse=True): # strict condition of the backward loop
+            table.pop(index)
+        
+        return table[::-1]
+    
+    def load_tables(self, tables: List[str], daterange: Optional[datetime] = None):
+        """Returns tables containing compton energy measurements
+        
+        Parameters
+        ----------
+        tables : List[str]
+            names of tables in the slowdb compton measurements database 
+            (full list of available tables can be seen with the property tables)
+        daterange : Optional[datetime]
+            minimum time for selection (should contain timezone)
+        
+        Returns
+        -------
+        Union[pd.DataFrame, list]
+            table containing compton energy measurements with fields:
+            write_time - time when the row was written (contains timezone)
+            mean_energy - compton mean of the energy measurement [MeV]
+            std_energy - compton std of the energy measurement [MeV]
+            mean_spread - compton mean of the spread measurement [MeV]
+            std_spread - compton std of the spread measurement [MeV]
+            start_time - beginning time of the compton measurement (contains timezone)
+            end_time - end time of the compton measurement (contains timezone)
+        """
+        
+        time_condition = f"AND time>(%(date)s)" if daterange is not None else ""
+        
+        sql_query = lambda table: f"""SELECT 
+            time AS time, 
+            CAST(values_array[1] AS numeric) AS mean_energy, 
+            CAST(values_array[2] AS numeric) AS std_energy, 
+            ROUND(CAST(values_array[5]/1000 AS numeric), 6) AS mean_spread,
+            ROUND(CAST(values_array[6]/1000 AS numeric), 6) AS std_spread, 
+            date_trunc('second', time + (values_array[8] * interval '1 second')) AS start_time, 
+            date_trunc('second', time + (values_array[8] * interval '1 second') + (values_array[7] * interval '1 second')) AS stop_time
+            FROM {table} WHERE g_id=43 AND dt>0 {time_condition}"""
+        
+        full_sql_query = '\nUNION ALL\n'.join([sql_query(table) for table in tables]) + '\nORDER BY time;'
+        
+        logging.debug(f"Full sql query {full_sql_query}")
+        
+        self.cur.execute(full_sql_query, {'date': daterange})
+        table = self.cur.fetchall()
+        table = self.__drop_overlapping_rows_list(table)
+        return table
+    
+class CalibrdbHandler(PostgreSQLHandler):
+    """A class for processing of calibration database
+    """
+    
+    def select_table(self, system: str, algo: str, name: str, version: str = 'Default') -> int:
+        """Selects the table from database
+        
+        Parameters
+        ----------
+        system : str
+            name of the system
+        algo : str
+            name of the algorithm
+        name : str
+            name of the calibration
+        version : str
+            name of the calibration version (default is Default)
+            
+        Returns
+        -------
+        sid : int
+            value corresponding the table
+        """
+        
+        self.cur.execute(f"""SELECT * FROM clbrset 
+        WHERE system='{system}' AND algo='{algo}' AND name='{name}' AND version='{version}'""")
+        result = self.cur.fetchall()
+        logging.debug(f"selected clbrset: {result}")
+        if len(result) > 1:
+            logging.warning('Multiple equal calibration sets. clbrset DB problem')
+            return result[0]
+        sid = result[0][0]
+        return sid
+    
+    def load_table(self, system: str, algo: str, name: str, version: str = 'Default', 
+                   num_last_rows: Optional[int] = None, timerange: Optional[Tuple[datetime, datetime]] = None, 
+                   return_timezone: bool = False) -> Tuple[list, list]:
+        """Loads the calibration table
+        
+        Parameters
+        ----------
+        system : str
+            name of the system
+        algo : str
+            name of the algorithm
+        name : str
+            name of the calibration
+        version : str
+            name of the calibration version (default is Default)
+        num_last_rows : Optional[int]
+            the number of last rows of the table
+        timerange : Optional[Tuple[datetime, datetime]]
+            time range condition on the selection of the table (default is None)
+        return_timezone : bool
+            return timezone in output datetimes as a field or not (default is False)
+        
+        Returns
+        -------
+        Tuple[list, list]
+            the calibration table and name of fields
+        """
+        
+        sid = self.select_table(system, algo, name, version)
+        time_condition = "AND begintime BETWEEN %s AND %s" if timerange is not None else ""
+        tzone = "AT TIME ZONE 'ALMST'" if return_timezone else ''
+        sql_query = f"""SELECT 
+        cid, sid, createdby, 
+        time {tzone} AS time, 
+        begintime {tzone} AS begintime, 
+        endtime {tzone} AS endtime, 
+        comment, parameters, data
+        FROM clbrdata WHERE sid={sid} {time_condition} ORDER BY time DESC """
+        if num_last_rows is not None:
+            sql_query += f"LIMIT {num_last_rows}"
+            
+        if timerange is None:
+            self.cur.execute(sql_query)
+        else:
+            self.cur.execute(sql_query, timerange)
+        fields_name = [i[0] for i in self.cur.description]
+        table = self.cur.fetchall()
+        return table, fields_name
+        
+    def update(self, new_rows: list, system: str = "Misc", algo: str = "RunHeader", 
+               name: str = "Compton_run", version: str = 'Default', handle_last_time_row: bool = False):
+        """Writes new_rows in clbrdb
+        
+        Parameters
+        ----------
+        new_rows : list
+            list of the data for writing
+        handle_last_time_row : bool
+            (DANGEROUS PLACE - keep default False or don't commit changes if you don't know what you want)
+            update current values or not: replace all values in interval from min(begintime in new_rows) to max(endtime in new_rows)
+        """
+        
+        if len(new_rows) == 0:
+            return
+        
+        sid = self.select_table(system, algo, name, version)
+        
+        new_rows = list(map(lambda x: (sid, 'lxeuser', x[0], x[5], x[6], [x[1], x[2], x[3], x[4]]), new_rows))
+        
+        if handle_last_time_row:
+            min_new_time, max_new_time = min(map(lambda x: x[3], new_rows)), max(map(lambda x: x[4], new_rows))
+            self.delete_rows(sid = sid, createdby = 'lxeuser', time = (min_new_time, max_new_time))
+        
+        insert_query = """INSERT INTO clbrdata (sid, createdby, time, begintime, endtime, data) VALUES %s;"""
+        execute_values(self.cur, insert_query, new_rows, fetch=False)
+        logging.info(f"Inserted {len(new_rows)} new rows")
+        return
+    
+    def insert(self, new_rows: list, system: str, algo: str, name: str, version: str, 
+               update: bool = True, comment: Optional[str] = None):
+        """Insert new_rows in the table
+        
+        Parameters
+        ----------
+        new_rows : list
+            list of new rows in the follwing format
+        update : bool
+            update current calibration
+        comment : Optional[str]
+            common comment field
+        """
+        
+        sid = self.select_table(system, algo, name, version)
+        
+        if update:
+            update_query = f"""UPDATE clbrdata 
+            SET data = %(data)s, createdby = %(createdby)s, time = %(time)s, begintime = %(begintime)s, endtime = %(endtime)s
+            WHERE sid = %(sid)s AND comment = %(comment)s
+            """
+            for x in new_rows:
+                season_point = (comment if comment is not None else '') + '_' + str(x[3])
+                dict_row = {
+                    'sid': sid,
+                    'createdby': 'lxeuser',
+                    'time': x[0],
+                    'begintime': x[1],
+                    'endtime': x[2],
+                    'comment': season_point,
+                    'data': x[3:],
+                }
+                self.cur.execute(update_query, dict_row)
+        
+        insert_query = """INSERT INTO clbrdata (sid, createdby, time, begintime, endtime, comment, data) VALUES %s"""
+        comment_creator = lambda x: f'{comment if comment is not None else ""}_{str(x[3])}'
+        insert_rows = list(map(lambda x: (sid, 'lxeuser', x[0], x[1], x[2], comment_creator(x), x[3:]), new_rows))        
+        execute_values(self.cur, insert_query, insert_rows, fetch=False)        
+        
+        drop_query = f"""
+            DELETE FROM clbrdata a
+            USING clbrdata b
+            WHERE
+                a.sid = {sid}
+                AND a.cid > b.cid
+                AND a.sid = b.sid
+                AND a.comment = b.comment
+        """
+        self.cur.execute(drop_query)
+        
+        logging.info(f"Inserted {len(insert_rows)} rows into table: {system}/{algo}/{name}/{version}")
+        return
+    
+    def clear_table(self, sid: int, createdby: str):
+        delete_query = f"""DELETE FROM clbrdata WHERE sid = %s AND createdby = %s"""
+        logging.info(f"Clear ({sid}, {createdby}) table")
+        self.cur.execute(delete_query, (sid, createdby))
+        return
+        
+    def delete_row(self, sid: int, createdby: str, time: datetime):
+        delete_query = f"""DELETE FROM clbrdata 
+        WHERE sid = %s AND createdby = %s AND time = %s
+        """
+        self.cur.execute(delete_query, (sid, createdby, time))
+        logging.info(f"Deleted ({sid}, {createdby}, {time}) row")
+        return
+    
+    def delete_rows(self, sid: int, createdby: str, time: Tuple[datetime, datetime]):
+        delete_query = f"""DELETE FROM clbrdata 
+        WHERE sid = %s AND createdby = %s AND endtime > %s AND begintime < %s
+        """
+        self.cur.execute(delete_query, (sid, createdby, time[0], time[1]))
+        logging.info(f"Deleted ({sid}, {createdby} from {time[0]} to {time[1]}) rows")
+        return
+    
+    def remove_duplicates(self, system: str = "Misc", algo: str = "RunHeader", name: str = "Compton_run", version: str = 'Default', keep: str = 'last'):
+        sid = self.select_table(system, algo, name, version)
+        
+        keep_rule = ''
+        if keep == 'last':
+            keep_rule = '<'
+        elif keep == 'first':
+            keep_rule = '>'
+        else:
+            raise ValueError("keep argument must be 'last' or 'first'")
+        
+        remove_query = f"""
+            DELETE FROM clbrdata a
+            USING clbrdata b
+            WHERE
+                a.sid = {sid}
+                AND a.cid {keep_rule} b.cid
+                AND a.sid = b.sid
+                AND a.time = b.time                
+        """
+        self.cur.execute(remove_query)
+        pass
+    
+    def commit(self):
+        logging.info("Changes commited")
+        self.conn.commit()
+        return
+
+    def rollback(self):
+        logging.info("Changes aborted")
+        self.conn.rollback()
+        return
+    
+    def __del__(self):
+        logging.info("del clbr class")
+        self.cur.close()
+        self.conn.close()
+    
+    
+def main():
+    log_format = '[%(asctime)s] %(levelname)s: %(message)s'
+    logging.basicConfig(stream=sys.stdout, format=log_format, level=logging.INFO) #"filename=compton_filter.log"
+    logging.info("Program started")
+    
+    parser = argparse.ArgumentParser(description = 'Filter compton energy measurements from slowdb')
+    parser.add_argument('--season', help = 'Name of compton measurement table from slowdb')
+    parser.add_argument('--config', help = 'Config file containing information for access to databases')
+    parser.add_argument('--update', action = 'store_true', help = 'Writes only newest values into the db')
+    
+    args = parser.parse_args()
+    logging.info(f"Arguments: season: {args.season}, config {args.config}, update {args.update}")
+
+    parser = ConfigParser()
+    parser.read(args.config);
+    logging.info("Config parsed")
+    
+    clbrdb = CalibrdbHandler(**parser['clbrDB'])
+    last_written_row, _ = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run', num_last_rows = 1, return_timezone = True)
+    last_time = last_written_row[0][3] if (len(last_written_row) > 0) and (args.update) else None
+    
+    compton_slowdb = SlowdbComptonHandler(**parser['postgresql'])
+    res = compton_slowdb.load_tables([args.season], last_time)
+    
+    clbrdb.update(res, handle_last_time_row = args.update)
+    clbrdb.commit()
+    del clbrdb
+    
+# python scripts/compton_filter.py --season cmd3_2021_2 --config database.ini --update
+if __name__ == "__main__":
+    main()

+ 1 - 0
src/requirements.txt

@@ -0,0 +1 @@
+psycopg2==2.9.1

+ 38 - 0
tables/HIGH2017.csv

@@ -0,0 +1,38 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+512,36959,36962,857.195,0.000707107,0.000,0.000001,0.000707107,0,indirect measurement
+509.8,37005,37169,857.195,0.000707107,0.000,0.000001,0.000707107,0,indirect measurement
+850,37860,38509,857.195,0.000707107,0.000,0.000001,0.000707107,0,indirect measurement
+860,38526,38748,857.2377768747314,0.0169664,0.0630423757159152,8.912669596789605e-07,2.18218e-05,0.16869081677882097,
+870,38763,39022,867.0599570622977,0.0402774,0.20732438190032706,9.844808782249558e-07,1.71499e-05,0.4293961844075083,
+880,39042,39490,877.0523051924943,0.0123194,0.10995316426915773,1.2443387019616e-06,1.10737e-05,0.8574215723562806,
+890,39499,39879,889.2428720425526,0.0615789,0.4975069640163396,0.7605982888261509,0.00815704,0.990717568779759,
+900,39896,40173,899.6921822392736,0.0125385,0.05856441378423972,0.7629474123904398,0.0156462,0.9727518676836153,
+910,40184,40368,909.8971431150144,0.0121049,0.05558674296494111,0.8127729074266764,0.0135976,0.8903876290530194,
+920,40369,40529,920.1365113277699,0.0109011,0.040918334742958765,0.8205102225523018,0.0141036,0.9916543378884753,
+930,40552,40753,930.6391601938043,0.0154079,0.10325262042317272,0.8441432931031049,0.0121552,0.9926453249073202,
+936,40770,41784,935.5780309078953,0.0328014,0.3670015553775144,0.82855268022445,0.00768418,0.6963624052883751,
+942,41812,42142,942.0457240166681,0.0234608,0.2018589142637129,0.8211121903229162,0.0101685,0.9300325120274808,
+950,42150,42387,950.513673347404,0.020721,0.14358918874208426,0.8372280511664074,0.0101192,0.9967084428053775,
+960,42406,42628,960.7855722833872,0.0244285,0.18018781554348492,0.8830119883984535,0.0122455,0.9937280644999358,
+971,42651,42866,971.4223121828722,0.0131886,0.05178745857861493,0.9185000525472704,0.0145335,0.975389909337006,
+981,42875,43017,982.092922063329,0.0197584,0.09340062460820016,0.9153281452472092,0.0161165,0.9999935789652622,
+990,43038,43169,991.3811962058936,0.0198856,0.11089625131003608,0.9326955908154064,0.0151576,0.9829335972483972,
+1003.5,43198,43642,1003.453320271171,0.0120479,0.10403567535959383,0.9521987294945025,0.00898405,0.7674886894270225,
+940.2,43643,43658,940.3893303799133,0.211969,0.416569297399646,0.8605814584331459,0.0515171,0.7133951356900441,
+938.9,43659,44029,938.8830047614865,0.0116144,0.09194395130387738,0.8243114288696693,0.00923443,0.9612771543812014,
+939.6,44030,44258,939.626395366178,0.0120608,0.07659102335755653,0.8545764419798395,0.0107268,0.957744825778933,
+940.2,44259,44453,940.2028130213994,0.0153443,0.11618815215648297,0.8336481777222675,0.0107248,0.9848859451498384,
+938.3,44474,44693,938.3014214691486,0.0158833,0.11005905828989941,0.8489487607062363,0.011142,0.705482252998243,
+937.5,44699,44922,937.5668567345843,0.025102,0.22465286429480044,0.8379347416282455,0.012459,0.9555340998091107,
+940.8,44932,45154,940.7563611673784,0.018035,0.15455498090720138,0.5088800555639666,0.00744603,0.9619361321034805,
+937.5,45155,45244,937.4003737176,0.0235761,0.12049465392492985,0.8542786985298952,0.015401,1.0,
+840,45356,45487,839.689068383504,0.0434892,0.31835538386365886,0.6594874087659622,0.0123268,0.8461665647284214,
+825,45497,45639,825.170939565047,0.0118625,0.08851294359045407,0.6602539009001271,0.0106779,0.9594825732686445,
+800,45640,45749,800.9686962807405,0.00725546,0.03395850454894159,0.6351886540218856,0.0113695,0.9469136556438106,
+775,45761,45867,774.9254167216259,0.0405641,0.26462890177314596,0.5795732381821767,0.0108079,0.6780459270819538,
+750,45870,46001,749.640919770665,0.0851918,0.5078605517835951,0.5483723198071216,0.0169032,0.5216975889602978,
+725,46005,46198,725.0510427144044,0.00737768,0.0517749598104805,0.030786464479270845,0.00222668,0.9879454344754861,
+700,46199,46341,700.6405464142084,0.0105896,0.059842972987602244,0.008120512933646525,0.000602217,0.7420281778384943,
+675,46343,46554,675.005323283144,0.0202381,0.19007015106394554,0.01230001346668989,0.000817726,0.912225302620249,
+650,46555,46811,649.8423959922646,0.0235123,0.20551676109380373,0.5160649786138212,0.0107911,0.9073360906256065,
+641,46812,47681,641.1018475428701,0.0122883,0.21109043492232069,0.0008309130037564666,4.10236e-05,0.9715629194351897,

+ 45 - 0
tables/HIGH2019.csv

@@ -0,0 +1,45 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+550.1,70029,70259,549.638,0.0663007,0.280388,0.274935,0.0279948,1,
+550,70260,70907,549.363,0.019394,0.0890673,0.258351,0.0182396,0.814542,
+537.5,70914,71177,537.436,0.00566752,0.00349823,0.356101,0.00939482,0.965038,
+562.5,71178,71533,562.494,0.0172484,0.139389,0.31619,0.0100748,0.85604,
+575,71544,71907,574.892,0.016154,0.140403,0.314098,0.0089282,0.980715,
+587.5,71908,72265,587.724,0.0058743,0.0331639,0.394022,0.00705043,0.991091,
+600,72268,72541,599.877,0.00745426,3.86235e-05,0.348355,0.00877072,0.987794,
+612.5,72545,72786,612.511,0.0127695,0.0814827,0.475934,0.0113512,0.981632,
+625,72787,73026,624.595,0.0107073,0.0655232,0.389257,0.00870368,0.97194,
+637.5,73035,73373,637.042,0.00715196,0.0274563,0.373373,0.0092595,0.989287,
+650,73374,73635,649.951,0.012057,0.0486828,0.386896,0.0137696,0.882919,
+662.5,73636,73904,662.288,0.00814357,0.0292113,0.431835,0.0111521,0.941118,
+675,73926,74168,674.786,0.007297,0.000168465,0.435948,0.0118251,0.982564,
+687.5,74169,74365,687.11,0.00879096,0.0297741,0.388357,0.0121959,0.81745,
+700,74366,74526,699.867,0.00837703,0.0145344,0.419534,0.0125211,0.789167,
+712.5,74528,74699,712.566,0.0126203,0.0582831,0.437665,0.0119425,0.860905,
+725,74704,74953,725.481,0.102983,0.766797,0.485621,0.0158389,0.895494,
+737.5,74974,75458,737.34,0.0125654,0.055601,0.446419,0.0143668,0.772609,
+750,75459,75729,750.029,0.0291795,0.228219,0.472031,0.0116743,0.991013,
+762,75730,76023,762.8,0.0114512,0.0350202,0.502847,0.0186054,0.978708,
+775,76033,76235,774.98,0.0169313,0.0396594,0.554082,0.0306657,0.980432,
+787.5,76243,76441,787.437,0.0260712,0.117679,0.564323,0.0373623,0.957005,
+800,76450,76722,799.845,0.0142841,0.0553969,0.522679,0.0210216,0.974891,
+812.5,76725,77112,812.632,0.0181982,0.132172,0.52459,0.0165079,0.982579,
+825,77131,77412,824.44,0.0179109,0.061278,0.566135,0.028869,0.972461,
+837.5,77414,77628,837.512,0.0154919,0.00164148,0.600292,0.0252827,1,
+850,77631,77818,850.234,0.0193719,0.090351,0.592528,0.0286106,0.972198,
+862.5,77854,78065,861.65,0.0492665,0.147801,0.749081,0.0980504,0.556681,
+875,78066,78269,874.767,0.0550212,0.172086,0.815923,0.0585761,0.959227,
+887.5,78274,78504,887.543,0.0310995,0.0371546,0.69612,0.0545942,0.463575,
+900,78506,78721,900.447,0.0289717,0.160659,0.633954,0.0346361,0.99304,
+912.5,78722,78930,913.285,0.028541,0.190395,0.684577,0.0263561,0.999964,
+925,78931,79200,925.622,0.0180181,0.0613705,0.690751,0.023061,1,
+936,79212,79624,936.305,0.0210551,0.127126,0.680784,0.0199239,0.976089,
+950,79625,80067,950.15,0.0139195,0.0846748,0.706137,0.0165572,0.983522,
+962.5,80068,80504,962.686,0.0276941,0.260275,0.703307,0.0228235,0.884825,
+978,80506,80602,977.158,0.251228,0.615237,0.833497,0.118106,0.25124,
+975,80603,81312,972.913,0.0162675,0.150318,0.748374,0.013539,0.960856,
+987.5,81313,81772,987.811,0.0675838,0.690008,0.771285,0.0177661,0.836185,
+955,81775,82096,956.031,0.0206554,0.177912,0.717321,0.014822,0.992361,
+951.1,82106,82527,951.049,0.0227074,0.186633,0.717008,0.018655,0.939457,
+945,82528,82891,944.817,0.0203412,0.0881124,0.712099,0.0260311,0.80758,
+641,82925,84499,640.926,0.00729434,0.129079,0.387114,0.00372279,0.924474,
+612.5,84504,84890,612.62,0.0161365,0.13155,0.43257,0.00580136,0.844596,

+ 6 - 0
tables/HIGH2020.csv

@@ -0,0 +1,6 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+935,90096,91877,934.703,0.0129016,0.216133,0.642976,0.00460083,0.95654,
+945,91878,93144,945.457,0.00991441,0.140275,0.67747,0.00488972,0.96865,
+950,93145,94248,950.29,0.0125024,0.192124,0.674069,0.00466723,0.9527,
+960,94254,95389,960.251,0.00784121,0.106528,0.698359,0.00509014,0.997763,
+970,95392,98116,970.583,0.0134545,0.187506,0.727239,0.00535968,0.951041,

+ 5 - 0
tables/HIGH2021.csv

@@ -0,0 +1,5 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+970,98116,99536,968.802,0.0165831,0.300855,0.822091,0.00705078,0.909945,
+980,99550,101917,980.252,0.0195106,0.429634,0.800201,0.00693799,0.945638,
+990,101920,103585,990.293,0.0264978,0.53045,0.818369,0.00698623,0.961646,
+1003.5,103608,106704,1003.5,0.00550833,0.128077,0.853587,0.00478313,0.973243,

+ 14 - 0
tables/LOW2020.csv

@@ -0,0 +1,14 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+300,85245,85423,300.606,0.00641231,0.0329717,0.108259,0.00520971,0.91427,
+290,85428,85652,290.251,0.00228283,4.95932e-05,0.116755,0.00433146,0.928954,
+280,85653,85962,280.126,0.00296568,0.0106434,0.124375,0.00470075,0.905473,
+270,85963,86253,270.084,0.00641153,0.0404854,0.099187,0.00566547,0.969203,
+260,86255,86672,260.313,0.0064703,0.0380788,0.174982,0.0060302,0.955329,
+250,86692,87003,248.921,0.0128619,1.07857,0.103695,0.0251274,0,indirect measurement #1
+240,87013,87287,240.446,0.00548711,0.0219266,0.138808,0.0088042,0.944286,
+230,87292,87532,230.235,0.00641154,0.0207778,0.125523,0.012595,0.77478,
+220,87537,87877,220.235,0.00753515,0.0260705,0.086855,0.0185095,0.818374,
+210,87881,88232,210.047,0.00589668,0.0231134,0.142367,0.0136008,0.728434,
+200,88233,88721,200.028,0.00577876,0.0377638,0.118357,0.0122888,0.888611,
+190,88729,89507,190.493,0.025811,0.128297,0.0833015,0.0201117,0.552124,
+180,89515,89843,180.175,0.00786639,0.0363723,0.185217,0.0239178,0.909541,

+ 4 - 0
tables/NNBAR2021.csv

@@ -0,0 +1,4 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+935,107490,108528,934.982,0.00722275,0.0914814,0.760591,0.00610962,0.996312,
+937.5,108538,109600,937.505,0.00646455,0.0660102,0.778134,0.00704537,0.994043,
+938.3,109615,110497,938.349,0.0100365,0.125928,0.754424,0.00633318,0.993779,

+ 18 - 0
tables/README.md

@@ -0,0 +1,18 @@
+В этой папке лежат файлы с результатами усреднений комптоновских измерений для 
+энергетических точек по сезонам.
+
+Структура csv таблицы:
+
+  * `energy_point` &ndash; название энергетической точки
+  * `mean_energy` [MeV] &ndash; средневзвешенное значение комптоновских измерений энергии в данной точке
+  * `mean_energy_stat_err` [MeV] &ndash; статистическая ошибка в определении средней 
+энергии
+  * `mean_energy_sys_err` [MeV] &ndash; ошибка в определении средней энергии, связанная с разбросом средних энергий в разных комптоновских измерениях
+  * `mean_spread` [MeV] &ndash; средний разброс энергии по комптоновским измерениям
+  * `mean_spread_stat_err` [MeV] &ndash; статистическая ошибка в определении разброса энергий
+  * `used_lum` &ndash; доля светимости в точке, по которой удалось вычислить значения выше
+  * `comment` &ndash; комментарий (indirect measurement означает, что во время набора данных не было комптоновских измерений, поэтому значения оценивались по ближайшим комптоновским измерениям) 
+
+### Картинки
+картинки по точкам можно посмотреть на 
+[cmd.inp.nsk.su/~compton/pictures/](https://cmd.inp.nsk.su/~compton/pictures/)

+ 115 - 0
tables/RHO2018.csv

@@ -0,0 +1,115 @@
+energy_point,first_run,last_run,mean_energy,mean_energy_stat_err,mean_energy_sys_err,mean_spread,mean_spread_stat_err,used_lum,comment
+500,48945,49069,500.144,0.0260786,0.0393352,0.000239511,7.74073e-05,0.0953117,
+510,49070,49326,509.965,0.00603785,0.0295413,0.000186786,3.49826e-05,0.961457,
+500,49332,49371,499.901,0.0215006,0.0547759,0.000523511,5.58716e-05,0.905625,
+495,49372,49417,495.144,0.0319425,0.116181,0.000704623,0.000111993,0.897183,
+490,49418,49454,490.216,0.0207531,0.0368385,0.000588635,7.49284e-05,0.838545,
+485,49455,49525,485.248,0.0134813,0.0404236,0.000571811,3.7876e-05,0.929357,
+480,49530,49583,480.099,0.0206117,0.000253414,0.000577191,0.000100483,0.821404,
+475,49584,49672,474.923,0.0604198,0.1115,0.000557435,6.70181e-05,0.11702,
+470,49673,49723,470.117,0.0331187,0.114216,0.000154428,8.0256e-05,0.841292,
+465,49724,49797,465.255,0.00804977,0.00842335,0.27146,0.0147318,0.914422,
+460,49798,49860,460.364,0.00573452,2.50035e-05,0.268977,0.00935169,0.931375,
+455,49861,49909,455.086,0.00532421,0.00333177,0.24831,0.00918629,1,
+450,49910,49985,449.931,0.00581539,9.43312e-05,0.28265,0.0104235,1,
+445,49986,50040,444.909,0.00839996,0.00711218,0.0331676,0.00516524,1,
+440,50041,50089,440.103,0.0096356,0.0304674,0.247837,0.0104507,1,
+435,50090,50163,435.156,0.00457922,3.51178e-05,0.274097,0.00718675,0.965562,
+430,50164,50219,430.142,0.00615526,0.0122473,0.242905,0.0107584,0.973424,
+425,50220,50288,425.282,0.00621832,6.582e-05,0.250286,0.0131989,0.957687,
+420,50296,50344,420.03,0.00526541,0.000300497,0.2633,0.00974408,1,
+415,50345,50405,414.943,0.00970288,0.0286488,0.290036,0.0105052,0.75158,
+410,50406,50470,410.157,0.00864265,0.02904,0.245605,0.00854222,0.941655,
+405,50477,50535,405.305,0.00490703,0.00363857,0.226352,0.0101944,1,
+400,50542,50625,400.355,0.010664,0.0472261,0.236772,0.00990441,0.89385,
+399,50626,50660,398.986,0.00852924,8.47223e-05,0.230884,0.0183237,0.946361,
+398,50662,50713,398.057,0.00896271,0.0206826,0.242111,0.0163762,1,
+397,50714,50762,397.059,0.0119493,0.0356662,0.12183,0.00646653,1,
+396,50765,50812,396.079,0.00749244,0.0143637,0.248785,0.014194,0.96163,
+395,50813,50857,395.09,0.00949762,0.0278998,0.257464,0.0141877,0.950891,
+394,50858,50945,394.045,0.00485238,0.000131581,0.261941,0.00998457,1,
+393,50946,51028,393.077,0.00510637,0.00589278,0.250664,0.010247,1,
+392.5,51029,51105,392.531,0.0141172,0.0468011,0.239846,0.008311,0.654484,
+392,51106,51178,392.024,0.00493546,8.44291e-05,0.256436,0.00814911,0.479595,
+391.5,51179,51235,391.438,0.00829504,0.031291,0.247603,0.00672782,0.957737,
+391,51243,51320,390.972,0.0044467,0.0144519,0.220865,0.00682263,0.966713,
+390.5,51321,51415,390.508,0.00557435,0.0245358,0.211423,0.00695454,0.926714,
+390,51417,51472,390.027,0.00949123,0.0321351,0.256475,0.00900011,1,
+389.5,51473,51546,389.55,0.00757175,0.0238786,0.250213,0.00878439,0.864022,
+389,51547,51615,389.048,0.00671223,0.0217917,0.248339,0.00915724,0.904769,
+388.5,51616,51671,388.503,0.0114513,0.0363645,0.283674,0.0119368,0.968652,
+388,51672,51738,388.041,0.00473721,5.53719e-05,0.240657,0.00998262,0.929628,
+387.5,51739,51794,387.57,0.00530088,0.00588495,0.232771,0.0114369,0.924798,
+387,51795,51858,386.926,0.00768716,0.0254016,0.2413,0.0109364,0.940899,
+386,51859,51939,385.956,0.00509487,0.0175767,0.248399,0.00861338,0.972709,
+384,51940,52043,383.927,0.00830925,0.0312469,0.340486,0.00890649,0.81049,
+382,52044,52135,381.988,0.00487954,1.72965e-05,0.214353,0.0116719,0.859147,
+380,52136,52217,380.039,0.00560125,0.00760631,0.231447,0.0107367,0.840849,
+378,52218,52266,378.063,0.00170804,1.14433e-05,0.151742,0.0103385,0.904964,
+376,52267,52336,376.014,0.00704189,0.0153622,0.244712,0.0123848,0.644815,
+374,52337,52404,374.035,0.00503214,0.00764206,0.131805,0.00433703,0.971379,
+372,52405,52482,372.048,0.00585771,0.000114459,0.000287896,0.000123646,0.787101,
+370,52483,52534,369.987,0.00655208,0.00459809,0.192979,0.0144316,0.901959,
+368,52535,52766,367.912,0.0552728,0.204046,0.14924,0.0153852,0.547875,
+366,52768,52872,366.043,0.00610471,0.0177866,0.224486,0.0106309,0.970404,
+364,52874,52920,364.114,0.00189033,1.29926e-05,0.174563,0.0158281,0.976119,
+362,52921,52972,362.126,0.00949279,0.0249448,0.193938,0.0123343,0.974019,
+360,52973,53016,360.169,0.00818882,0.0190195,0.190001,0.0134369,0.952193,
+355,53018,53088,355.2,0.00459494,9.65488e-05,0.214824,0.00954534,0.964508,
+350,53089,53268,350.04,0.00586362,0.0328489,0.00025529,0.000132122,0.84817,
+345,53270,53417,345.096,0.00519682,0.0213099,0.209941,0.00673091,0.968757,
+340,53418,53510,340.099,0.0065723,0.00080052,0.000276593,9.12863e-05,0.987847,
+335,53514,53571,335.112,0.015831,0.0449148,0.205466,0.0144162,0.931061,
+330,53573,53670,330.174,0.0130361,0.000108389,0.160667,0.033001,0.273711,
+325,53672,53752,325.351,0.0229785,0.0747676,0.2183,0.00862256,0.24101,
+320,53753,53853,320.015,0.00383937,0.0166544,-0.000225756,6.77947e-05,0.891769,
+315,53854,53926,315.061,0.00470642,0.0115102,0.193227,0.0107325,0.839201,
+310,53931,54050,310.099,0.00365711,0.00854301,0.170494,0.00837891,0.872218,
+305,54051,54212,305.023,0.002721,0.00942049,0.000248592,4.51705e-05,0.900496,
+300,54213,54357,300.065,0.00314151,0.00762229,8.3077e-05,9.66731e-05,0.651113,
+295,54358,54435,295.075,0.00594495,0.0215788,0.000644166,4.7051e-05,0.874837,
+290,54437,54636,290.046,0.00405989,0.0227608,0.000150508,3.69662e-05,0.782923,
+285,54646,54781,285.077,0.00248285,0.00527213,0.000142773,2.94817e-05,0.872061,
+280,54784,54873,280.117,0.00382663,0.00947287,0.000218142,5.95967e-05,0.736618,
+274,54874,55739,273.883,0.00816359,0.124995,0.000203556,4.9428e-05,0.934691,
+345,55742,55807,344.849,0.00559847,0.00950317,0.226731,0.00960413,0.97035,
+350,55808,55864,349.876,0.00690237,3.74539e-05,0.258613,0.00994397,1,
+355,55871,55927,355.006,0.00522873,0.00723489,0.225227,0.00974424,0.89313,
+360,55928,56604,359.991,0.00183108,0.0137816,0.263769,0.00285386,0.903557,
+362,56605,56801,361.917,0.00322101,0.00508922,0.00377307,0.000688244,0.91619,
+364,56802,56842,363.888,0.00980191,0.0205997,0.252519,0.0114587,1,
+366,56843,56878,366.026,0.00719299,0.000123635,0.296246,0.0131157,0.934441,
+368,56881,56937,368.096,0.00619664,0.0120174,0.28544,0.00818785,0.916029,
+375,56938,58401,375.035,0.00129892,0.0184541,0.273629,0.00156284,0.987357,
+455,58402,58740,454.722,0.0140888,0.096667,0.33354,0.00869757,0.984618,
+465,58741,59121,464.867,0.0052504,0.0236399,0.0151718,0.00132601,0.933954,
+492,59126,59353,492.156,0.0208972,0.0960113,0.319064,0.00978686,0.948351,
+501,59358,59482,500.969,0.0065235,8.18709e-05,0.304386,0.00841466,0.937453,
+503,59483,59859,503.014,0.00434228,6.91568e-05,0.328086,0.00590828,0.988004,
+505,59872,60039,504.834,0.00589338,0.000174959,0.322438,0.0075964,0.967137,
+508,60040,60153,507.91,0.00682083,0.00864562,0.348244,0.00920077,0.935678,
+508.5,60154,60498,508.43,0.00968832,0.0505388,0.00132405,0.000476166,0.983852,
+509,60501,60741,508.984,0.00842494,5.73358e-05,0.325295,0.0138562,1,
+509.5,60742,61379,509.58,0.0040533,0.0181789,0.000818296,0.000242456,0.965065,
+510,61380,61857,510.003,0.00558801,0.0299611,0.361244,0.00582829,0.961663,
+510.5,61859,62076,510.498,0.00643359,0.00452802,0.36088,0.00856945,0.955015,
+511,62079,62311,511.076,0.00569824,0.00021046,0.338628,0.00703809,0.966491,
+511.5,62316,62432,511.493,0.00711271,0.0103626,0.360404,0.0101193,1,
+514,62438,62530,513.898,0.00926634,0.0209615,0.336616,0.0128447,1,
+517,62531,62642,516.941,0.00825726,0.00460242,0.3445,0.0116452,1,
+520,62643,62740,519.941,0.0118082,0.0267359,0.365273,0.0142929,1,
+525,62741,62829,524.941,0.0147226,9.89338e-05,0.379235,0.0198808,0.765222,
+530,62830,62908,530.044,0.01774,0.0488015,0.345866,0.0190425,0.993672,
+550,62909,63137,550.025,0.0089993,0.000153024,0.364789,0.0110716,0.968073,
+575,63138,63363,574.84,0.0141894,0.0502533,0.362344,0.0130809,0.667065,
+600,63364,63713,599.63,0.0116655,0.0771709,0.00206265,0.000619406,0.899776,
+460,63714,63983,460.311,0.0415775,0.295595,0.319782,0.00542116,0.843029,
+380,64017,64297,380.253,0.029548,0.308822,0.293221,0.00414334,0.961146,
+387.5,64298,64526,387.538,0.0062277,0.0200727,0.302868,0.0108342,0.715111,
+389,64527,64813,389.008,0.00425306,0.0224994,0.305103,0.00497574,0.859865,
+390.5,64814,65640,390.538,0.00215066,0.0221182,0.314895,0.00191266,0.956519,
+391.5,65641,66733,391.408,0.00136681,0.00970632,0.324171,0.00163052,0.969349,
+392.5,66741,67650,392.533,0.00547645,0.0731895,0.320238,0.00202801,0.953672,
+394,67651,67952,393.901,0.0181466,0.151346,0.0098515,0.000668337,0.892092,
+396,67956,68086,395.954,0.00351119,2.06964e-05,0.312287,0.00492272,0.913018,
+340,68112,69071,339.875,0.00553352,0.0913012,0.248007,0.00187836,0.924611,