Browse Source

Add new feature. Update calibration db with Muchnoi reanalysis files

compton 2 years ago
parent
commit
e84d413028
2 changed files with 83 additions and 11 deletions
  1. 9 1
      compton_combiner.py
  2. 74 10
      compton_filter.py

+ 9 - 1
compton_combiner.py

@@ -6,6 +6,7 @@ from configparser import ConfigParser
 from datetime import datetime, timedelta, timezone
 from datetime import datetime, timedelta, timezone
 import logging
 import logging
 import os
 import os
+import sys
 import sqlite3
 import sqlite3
 from typing import Union, Tuple, Optional
 from typing import Union, Tuple, Optional
 
 
@@ -39,6 +40,8 @@ class RunsDBHandler():
         
         
     def load_tables(self, range: Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]], energy_point: Optional[float] = None, select_bad_runs: bool = False):
     def load_tables(self, range: Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]], energy_point: Optional[float] = None, select_bad_runs: bool = False):
         """
         """
+        Returns a slice of the table with following fields: run, starttime, stoptime, energy, luminosity
+            
         Parameters
         Parameters
         ----------
         ----------
         range : Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]]
         range : Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]]
@@ -537,6 +540,10 @@ def save_csv(df: pd.DataFrame, filepath: str, update_current: bool = True):
 
 
 # python scripts/compton_combiner.py -s NNBAR2021 -c database.ini --csv_dir . --clbrdb
 # python scripts/compton_combiner.py -s NNBAR2021 -c database.ini --csv_dir . --clbrdb
 def main():
 def main():
+    log_format = '[%(asctime)s] %(levelname)s: %(message)s'
+    logging.basicConfig(stream=sys.stdout, format=log_format, level=logging.INFO) #"filename=compton_combiner.log"
+    logging.info("compton_combiner is started")
+    
     parser = argparse.ArgumentParser(description = 'Mean compton energy measurements from clbrdb')
     parser = argparse.ArgumentParser(description = 'Mean compton energy measurements from clbrdb')
     parser.add_argument('-s', '--season', help = 'Name of the season')
     parser.add_argument('-s', '--season', help = 'Name of the season')
     parser.add_argument('-c', '--config', help = 'Config file containing information for access to databases')
     parser.add_argument('-c', '--config', help = 'Config file containing information for access to databases')
@@ -548,7 +555,8 @@ def main():
     parser.add_argument('--old_averaging', action = 'store_true', help = 'Use old incomplete <E> = \frac{\sum{L_i E_i}{\sum{L_i}} averaging')
     parser.add_argument('--old_averaging', action = 'store_true', help = 'Use old incomplete <E> = \frac{\sum{L_i E_i}{\sum{L_i}} averaging')
     
     
     args = parser.parse_args()
     args = parser.parse_args()
-    # logging.info(f"Arguments: season: {args.season}, config {args.config}")
+    logging.info(f"""Arguments: season {args.season}, config {args.config}, csv_dir {args.csv_dir}, save_to_clbrdb {args.clbrdb},
+        pics_folder {args.pics_folder}, detailed_csv_folder {args.energy_point_csv_folder}, only_last {args.only_last}, old_average: {args.old_averaging}""")
 
 
     parser = ConfigParser()
     parser = ConfigParser()
     parser.read(args.config);
     parser.read(args.config);

+ 74 - 10
compton_filter.py

@@ -10,8 +10,13 @@ from typing import Tuple, List, Dict, Union, Optional
 import warnings
 import warnings
 import logging
 import logging
 
 
-import psycopg2
-from psycopg2.extras import execute_values
+try:
+    import psycopg2
+    from psycopg2.extras import execute_values
+except ImportError:
+    sys.path = list(filter(lambda x: "python2.7" not in x, sys.path))
+    import psycopg2
+    from psycopg2.extras import execute_values
 
 
 class PostgreSQLHandler():
 class PostgreSQLHandler():
     """A common class for processing postgresql databases
     """A common class for processing postgresql databases
@@ -33,7 +38,7 @@ class PostgreSQLHandler():
         
         
         self.conn = psycopg2.connect(host = host, database = database, user = user, password = password)
         self.conn = psycopg2.connect(host = host, database = database, user = user, password = password)
         self.cur = self.conn.cursor()
         self.cur = self.conn.cursor()
-        logging.info("PostgreSQL Hander created")
+        logging.info("PostgreSQL Handler created")
         
         
     @property
     @property
     def list_tables(self) -> List[str]:
     def list_tables(self) -> List[str]:
@@ -229,7 +234,7 @@ class CalibrdbHandler(PostgreSQLHandler):
         
         
     def update(self, new_rows: list, system: str = "Misc", algo: str = "RunHeader", 
     def update(self, new_rows: list, system: str = "Misc", algo: str = "RunHeader", 
                name: str = "Compton_run", version: str = 'Default', handle_last_time_row: bool = False):
                name: str = "Compton_run", version: str = 'Default', handle_last_time_row: bool = False):
-        """Writes new_rows in clbrdb
+        """Writes new_rows in clbrdb (for raw compton measurements)
         
         
         Parameters
         Parameters
         ----------
         ----------
@@ -240,7 +245,9 @@ class CalibrdbHandler(PostgreSQLHandler):
             update current values or not: replace all values in interval from min(begintime in new_rows) to max(endtime in new_rows)
             update current values or not: replace all values in interval from min(begintime in new_rows) to max(endtime in new_rows)
         """
         """
         
         
+        logging.info(f"Update {system}/{algo}/{name} is running...")
         if len(new_rows) == 0:
         if len(new_rows) == 0:
+            logging.info("Success. Nothing new.")
             return
             return
         
         
         sid = self.select_table(system, algo, name, version)
         sid = self.select_table(system, algo, name, version)
@@ -253,12 +260,12 @@ class CalibrdbHandler(PostgreSQLHandler):
         
         
         insert_query = """INSERT INTO clbrdata (sid, createdby, time, begintime, endtime, data) VALUES %s;"""
         insert_query = """INSERT INTO clbrdata (sid, createdby, time, begintime, endtime, data) VALUES %s;"""
         execute_values(self.cur, insert_query, new_rows, fetch=False)
         execute_values(self.cur, insert_query, new_rows, fetch=False)
-        logging.info(f"Inserted {len(new_rows)} new rows")
+        logging.info(f"Success. Inserted {len(new_rows)} new rows")
         return
         return
     
     
     def insert(self, new_rows: list, system: str, algo: str, name: str, version: str, 
     def insert(self, new_rows: list, system: str, algo: str, name: str, version: str, 
                update: bool = True, comment: Optional[str] = None):
                update: bool = True, comment: Optional[str] = None):
-        """Insert new_rows in the table
+        """Insert new_rows in the table (for average by energy points)
         
         
         Parameters
         Parameters
         ----------
         ----------
@@ -369,7 +376,57 @@ class CalibrdbHandler(PostgreSQLHandler):
         logging.info("del clbr class")
         logging.info("del clbr class")
         self.cur.close()
         self.cur.close()
         self.conn.close()
         self.conn.close()
+
+def processing_from_file(path):
+    """Processes text files (with names like 'vepp2k.edge.txt') that represent compton reanalyses by N.Muchnoi
+    
+    Parameters
+    ----------
+    path : Union[str, list]
+        path to the file/files, can be mask if str 
+    
+    Returns
+    -------
+    List[Tuple[datetime, Decimal, Decimal, Decimal, Decimal, datetime, datetime]]
+        list of tuples representing a single compton measurement with fields:
+        writetime, energy_mean, energy_err, spread_mean, spread_err, starttime, stoptime
+    """
+    
+    logging.info("Reading from files is running...")
+    from glob import glob
+    from decimal import Decimal
+    from datetime import datetime, timedelta, timezone
+    if isinstance(path, str):
+        files = glob(path)
+    else:
+        files = path
+    logging.info(f"Handle {len(files)} files")
+    rows = []
+    current_timezone = timedelta(hours=7)
+    # colnames = ('t', 'dt', 'E',  'dE', 'S', 'dS', 'B', 'dB', 'I', 'dI') # columns names in the text files
+    
+    def preprocess_row(row):
+        row = row.strip().split()
+
+        timestamp_mean, dt = int(row[0]), int(row[1])
+        timetamp_to_date = lambda timestamp: (datetime.utcfromtimestamp(timestamp) + current_timezone).astimezone(timezone(current_timezone))
+        t_start = timetamp_to_date(timestamp_mean - dt)
+        t_stop = timetamp_to_date(timestamp_mean + dt)
+        
+        t_write = t_stop
+
+        e_mean, de = Decimal(row[2]), Decimal(row[3])
+        s_mean, ds = Decimal(row[4])*Decimal('0.001'), Decimal(row[5])*Decimal('0.001') # keV to MeV
+
+        return (t_write, e_mean, de, s_mean, ds, t_start, t_stop)
     
     
+    for file in files:
+        with open(file, 'r') as f:
+            for row in f:
+                if not(row.startswith('#')):
+                    rows.append(preprocess_row(row))
+    logging.info(f"Success files reading. {len(rows)} in result.")
+    return rows
     
     
 def main():
 def main():
     log_format = '[%(asctime)s] %(levelname)s: %(message)s'
     log_format = '[%(asctime)s] %(levelname)s: %(message)s'
@@ -377,12 +434,16 @@ def main():
     logging.info("Program started")
     logging.info("Program started")
     
     
     parser = argparse.ArgumentParser(description = 'Filter compton energy measurements from slowdb')
     parser = argparse.ArgumentParser(description = 'Filter compton energy measurements from slowdb')
-    parser.add_argument('--season', help = 'Name of compton measurement table from slowdb')
     parser.add_argument('--config', help = 'Config file containing information for access to databases')
     parser.add_argument('--config', help = 'Config file containing information for access to databases')
+    parser.add_argument('--season', help = 'Name of compton measurement table from slowdb')
     parser.add_argument('--update', action = 'store_true', help = 'Writes only newest values into the db')
     parser.add_argument('--update', action = 'store_true', help = 'Writes only newest values into the db')
+    parser.add_argument('--files', nargs='*', help = """Mask to the path to the files like vepp2k.edge.txt. It has a higher priority than season. 
+                Update flag will be set as True if using this option.""")
     
     
     args = parser.parse_args()
     args = parser.parse_args()
-    logging.info(f"Arguments: season: {args.season}, config {args.config}, update {args.update}")
+    if args.files is not None:
+        args.update = True
+    logging.info(f"Arguments: config {args.config}, season: {args.season}, update {args.update}, files {args.files}")
 
 
     parser = ConfigParser()
     parser = ConfigParser()
     parser.read(args.config);
     parser.read(args.config);
@@ -392,8 +453,11 @@ def main():
     last_written_row, _ = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run', num_last_rows = 1, return_timezone = True)
     last_written_row, _ = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run', num_last_rows = 1, return_timezone = True)
     last_time = last_written_row[0][3] if (len(last_written_row) > 0) and (args.update) else None
     last_time = last_written_row[0][3] if (len(last_written_row) > 0) and (args.update) else None
     
     
-    compton_slowdb = SlowdbComptonHandler(**parser['postgresql'])
-    res = compton_slowdb.load_tables([args.season], last_time)
+    if args.files is None:
+        compton_slowdb = SlowdbComptonHandler(**parser['postgresql'])
+        res = compton_slowdb.load_tables([args.season], last_time)
+    else:
+        res = processing_from_file(args.files)
     
     
     clbrdb.update(res, handle_last_time_row = args.update)
     clbrdb.update(res, handle_last_time_row = args.update)
     clbrdb.commit()
     clbrdb.commit()