Browse Source

Set NSK timezones

compton 3 years ago
parent
commit
2f5ed4d9a5
3 changed files with 86 additions and 29 deletions
  1. 6 7
      bad_comptons.txt
  2. 15 2
      compton_combiner.py
  3. 65 20
      compton_filter.py

+ 6 - 7
bad_comptons.txt

@@ -1,7 +1,6 @@
-1622157426 #incorrect laser line
-1622155918 #incorrect laser line
-1622158934 #incorrect laser line
-1622160433 #incorrect laser line
-1622161930 #incorrect laser line
-1622163439 #incorrect laser line
-1583279091 #disrepancy with energy label
+1622188639 #incorrect laser line
+1622187130 #incorrect laser line
+1622185633 #incorrect laser line
+1622184134 #incorrect laser line
+1622182626 #incorrect laser line
+1622181118 #incorrect laser line

+ 15 - 2
compton_combiner.py

@@ -3,7 +3,7 @@
 
 import argparse
 from configparser import ConfigParser
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
 import sqlite3
 from typing import Union, Tuple, Optional
 
@@ -25,7 +25,7 @@ class RunsDBHandler():
     def __init__(self, host: str = 'cmddb', database: str = 'online', user: str = None, password: str = None):
         self.conn = connect(host = host, database = database, user = user, password = password)
         self.cur = self.conn.cursor()
-        self.cur.execute("SET time_zone = 'UTC';")
+        self.cur.execute("SET time_zone = '+07:00';")
         
     @property
     def fields(self) -> list:
@@ -36,6 +36,14 @@ class RunsDBHandler():
         return self.cur.fetchall()
         
     def load_tables(self, range: Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]]):
+        """
+        Parameters
+        ----------
+        range : Union[Tuple[int, Optional[int]], Tuple[datetime, datetime]]
+            selection range
+            int range defines an interval in runs
+            datetime range defines a time interval (NSK: +7:00 time)
+        """
         
         cond = ""
         if isinstance(range[0], int):
@@ -368,6 +376,11 @@ def main():
     
     cdf = process_combined(comb_df, runs_df, compton_df, './pics')
     cdf.to_csv(f'{args.season}.csv', index=False, float_format='%g')
+    
+    # df_to_clbrdb = cdf.loc[(cdf.comment=='')|((cdf.comment!='')&((cdf.mean_energy.astype(float) - cdf.energy_point).abs()<5))].drop(['comment', 'used_lum'],
+    #                                                                                                                   axis=1).values.tolist()
+    # clbrdb.insert([(args.season, df_to_clbrdb)], 'Misc', 'RunHeader', 'Compton_run_avg', 'Default')
+    # clbrdb.commit()
     return 
 
 if __name__ == "__main__":

+ 65 - 20
compton_filter.py

@@ -4,7 +4,7 @@ Script to fill calibration database with filtering slowdb compton measurements
 
 import argparse
 from configparser import ConfigParser
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
 from typing import Tuple, List, Dict, Union, Optional
 import warnings
 import logging
@@ -120,32 +120,31 @@ class SlowdbComptonHandler(PostgreSQLHandler):
             names of tables in the slowdb compton measurements database 
             (full list of available tables can be seen with the property tables)
         daterange : Optional[datetime]
-            minimum time for selection (UTC)
+            minimum time for selection (should contain timezone)
         
         Returns
         -------
         Union[pd.DataFrame, list]
             table containing compton energy measurements with fields:
-            write_time - time when the row was written (UTC)
+            write_time - time when the row was written (contains timezone)
             mean_energy - compton mean of the energy measurement [MeV]
             std_energy - compton std of the energy measurement [MeV]
             mean_spread - compton mean of the spread measurement [MeV]
             std_spread - compton std of the spread measurement [MeV]
-            start_time - beginning time of the compton measurement (UTC)
-            end_time - end time of the compton measurement (UTC)
+            start_time - beginning time of the compton measurement (contains timezone)
+            end_time - end time of the compton measurement (contains timezone)
         """
         
-        time_condition = f"AND time>(%(date)s AT TIME ZONE 'UTC')" if daterange is not None else ""
+        time_condition = f"AND time>(%(date)s)" if daterange is not None else ""
         
         sql_query = lambda table: f"""SELECT 
-            time AT TIME ZONE 'UTC' AS time, 
+            time AS time, 
             CAST(values_array[1] AS numeric) AS mean_energy, 
             CAST(values_array[2] AS numeric) AS std_energy, 
             ROUND(CAST(values_array[5]/1000 AS numeric), 6) AS mean_spread,
             ROUND(CAST(values_array[6]/1000 AS numeric), 6) AS std_spread, 
-            date_trunc('second', time AT TIME ZONE 'UTC' + (values_array[8] * interval '1 second')) AS start_time, 
-            date_trunc('second', time AT TIME ZONE 'UTC' + 
-            (values_array[8] * interval '1 second') + (values_array[7] * interval '1 second')) AS stop_time
+            date_trunc('second', time + (values_array[8] * interval '1 second')) AS start_time, 
+            date_trunc('second', time + (values_array[8] * interval '1 second') + (values_array[7] * interval '1 second')) AS stop_time
             FROM {table} WHERE g_id=43 AND dt>0 {time_condition}"""
         
         full_sql_query = '\nUNION ALL\n'.join([sql_query(table) for table in tables]) + '\nORDER BY time;'
@@ -193,7 +192,7 @@ class CalibrdbHandler(PostgreSQLHandler):
         sid = result[0][0]
         return sid
     
-    def load_table(self, system: str, algo: str, name: str, version: str = 'Default', num_last_rows: Optional[int] = None, timerange: Optional[Tuple[datetime, datetime]] = None) -> Tuple[list, list]:
+    def load_table(self, system: str, algo: str, name: str, version: str = 'Default', num_last_rows: Optional[int] = None, timerange: Optional[Tuple[datetime, datetime]] = None, return_timezone: bool = False) -> Tuple[list, list]:
         """Loads the calibration table
         
         Parameters
@@ -209,7 +208,9 @@ class CalibrdbHandler(PostgreSQLHandler):
         num_last_rows : Optional[int]
             the number of last rows of the table
         timerange : Optional[Tuple[datetime, datetime]]
-            time range (UTC) condition on the selection of the table (default is None)
+            time range condition on the selection of the table (default is None)
+        return_timezone : bool
+            return timezone in output datetimes as a field or not (default is False)
         
         Returns
         -------
@@ -219,7 +220,14 @@ class CalibrdbHandler(PostgreSQLHandler):
         
         sid = self.select_table(system, algo, name, version)
         time_condition = "AND begintime BETWEEN %s AND %s" if timerange is not None else ""
-        sql_query = f"""SELECT * FROM clbrdata WHERE sid={sid} {time_condition} ORDER BY time DESC """
+        tzone = "AT TIME ZONE 'ALMST'" if return_timezone else ''
+        sql_query = f"""SELECT 
+        cid, sid, createdby, 
+        time {tzone} AS time, 
+        begintime {tzone} AS begintime, 
+        endtime {tzone} AS endtime, 
+        comment, parameters, data
+        FROM clbrdata WHERE sid={sid} {time_condition} ORDER BY time DESC """
         if num_last_rows is not None:
             sql_query += f"LIMIT {num_last_rows}"
             
@@ -238,9 +246,10 @@ class CalibrdbHandler(PostgreSQLHandler):
         sid = self.select_table(system, algo, name, version)
         
         new_rows = list(map(lambda x: (sid, 'lxeuser', x[0], x[5], x[6], [x[1], x[2], x[3], x[4]]), new_rows))
+        # print(new_rows[0])
         
         if handle_last_time_row:
-            last_written_row, _ = self.load_table(system, algo, name, version, num_last_rows = 1)
+            last_written_row, _ = self.load_table(system, algo, name, version, num_last_rows = 1, return_timezone = True)
             if len(last_written_row) > 0:
                 if last_written_row[0][5] > new_rows[0][3]:
                     logging.info('Removing of overlapping written row')
@@ -250,6 +259,41 @@ class CalibrdbHandler(PostgreSQLHandler):
         execute_values(self.cur, insert_query, new_rows, fetch=False)
         logging.info(f"Inserted {len(new_rows)} new rows")
         return
+    
+    def insert(self, new_rows: List[Tuple[str, list]], system: str, algo: str, name: str, version: str, update: bool = True):
+        """Insert new_rows in the table
+        
+        Parameters
+        ----------
+        new_rows : List[Tuple[datetime, datetime, datetime, list]]
+            list of new rows (tuples) in the follwing format (comment: str, data: list)
+        update : bool
+            update current calibration
+        """
+        
+        sid = self.select_table(system, algo, name, version)
+        
+        time_now, dlt0 = datetime.utcnow(), timedelta(days=5000)
+        if update:
+            update_query = f"""UPDATE clbrdata SET endtime = %s
+            WHERE comment = %s AND endtime > %s AND sid = {sid} 
+            """
+            for row in new_rows:
+                season = row[0]
+                self.cur.execute(update_query, (time_now, season, time_now))
+        
+        insert_query = """INSERT INTO clbrdata (sid, createdby, time, begintime, endtime, comment, data) VALUES %s;"""
+        insert_rows = list(map(lambda x: (sid, 'lxeuser', time_now, time_now, time_now + dlt0, x[0], x[1]), new_rows))
+        
+        execute_values(self.cur, insert_query, insert_rows, fetch=False)        
+        logging.info(f"Inserted {len(insert_rows)} rows into table: {system}/{algo}/{name}/{version}")
+        return
+    
+    def clear_table(self, sid: int, createdby: str):
+        delete_query = f"""DELETE FROM clbrdata WHERE sid = %s AND createdby = %s"""
+        logging.info(f"Clear ({sid}, {createdby}) table")
+        self.cur.execute(delete_query, (sid, createdby))
+        return
         
     def delete_row(self, sid: int, createdby: str, time: datetime):
         delete_query = f"""DELETE FROM clbrdata 
@@ -297,25 +341,26 @@ def main():
     parser = argparse.ArgumentParser(description = 'Filter compton energy measurements from slowdb')
     parser.add_argument('--season', help = 'Name of compton measurement table from slowdb')
     parser.add_argument('--config', help = 'Config file containing information for access to databases')
+    parser.add_argument('--update', action = 'store_true', help = 'Only update table with new values')
     
     args = parser.parse_args()
-    logging.info(f"Arguments: season: {args.season}, config {args.config}")
+    logging.info(f"Arguments: season: {args.season}, config {args.config}, update {args.update}")
 
     parser = ConfigParser()
     parser.read(args.config);
     logging.info("Config parsed")
     
     clbrdb = CalibrdbHandler(**parser['clbrDB'])
-    last_written_row, _ = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run', num_last_rows = 1)
-    last_time = last_written_row[0][3] if len(last_written_row) > 0 else None
+    last_written_row, _ = clbrdb.load_table('Misc', 'RunHeader', 'Compton_run', num_last_rows = 1, return_timezone = True)
+    last_time = last_written_row[0][3] if (len(last_written_row) > 0) and (args.update) else None
     
     compton_slowdb = SlowdbComptonHandler(**parser['postgresql'])
-    res = compton_slowdb.load_tables(['cmd3_2021_2'], last_time)
+    res = compton_slowdb.load_tables([args.season], last_time)
     
-    clbrdb.update(res)
+    clbrdb.update(res, handle_last_time_row = args.update)
     clbrdb.commit()
     del clbrdb
     
-# python scripts/compton_filter.py --season cmd3_2021_2 --config database.ini
+# python scripts/compton_filter.py --season cmd3_2021_2 --config database.ini --update
 if __name__ == "__main__":
     main()