Skip to content

Sybase dependencies in sot org repos

Jean Connelly edited this page Jan 4, 2024 · 7 revisions

Packages

This includes an initial disposition for some packages. Ones that are blank or ?? have not been assessed.

  • Ska.DBI: modify to allow reading from CXC sybase databases (e.g. OCAT, APstat), perhaps just calling a standalone perl script on the fly to interface with server.
    Slow, but likely adequate for existing use cases.
    Fixed.
  • aca_dark_cal: no longer used
  • aca_lts_eval: superceded by yoshi
  • aca_required_temp: superceded by aca_lts_eval
  • aca_status_flags: legacy analysis
  • acq_stat_reports: deprecate in favor of mica acq stats and stop job
  • cmd_states: remove sybase table processing and references, no flight dependencies (note that the update job currently updates the hdf5 file as a part of the sybase processing, this should be moved to happen as part of the sqlite process)
  • fid_drift_mon: fair bit of work, old perl script creating fid_stats table
  • guide_stat_reports: deprecate in favor of mica guide stats and stop job
  • gyro_bias: legacy analysis
  • jobwatch: stop watching sybase tables
  • kadi: no longer uses timelines
  • mica: fair bit of work here, but hopefully the Ska.DBI fix will make this transparent
  • mp_parse_db: ??
  • obc_bad_stat_warm_pix: ??
  • obsvis_fov: ??
  • perigee_health_plots: Updated to remove sybase https://github.com/sot/perigee_health_plots/pull/14
  • periscope_drift_reports
  • periscope_tilt
  • pftank2t_check: not used or maintained
  • pog_aspect
  • schedule_view
  • star_stat_db: ??
  • starcheck: migrated to mica acq and guide stats.
  • state_of_aca: legacy, leave as is
  • telem_archive: migrate to sqlite
  • timelines: don't care
  • track_periscope: ??
  • validate_states: drop checking sybase states

Table migration to sqlite

In the astromon package there is a script sybase2sqlite.py and example usage NOTES.migrate_sqlite that should be quite helpful.

./starcheck/starcheck/data/bad_stars.py-
./starcheck/starcheck/data/bad_stars.py:dbh = Ska.DBI.DBI(dbi='sybase', user='aca_read')
./starcheck/starcheck/data/bad_stars.py-agasc_bad = open('manual_bad_stars').read()
./starcheck/starcheck/data/bad_stars.py-
./starcheck/starcheck/data/bad_stars.py-
./starcheck/starcheck/data/bad_stars.py-def sausage_list(gui, bad_acq, file='test_agasc.bad',
./starcheck/starcheck/data/bad_stars.py-                 gui_n=3, gui_med=.50, acq_n=6, acq_fail=.80):
--
./timelines/make_new_tables.py-
./timelines/make_new_tables.py:    syb = Ska.DBI.DBI(dbi='sybase', user='aca_read', server='sybase', database='aca',
./timelines/make_new_tables.py-                      numpy=False, verbose=opt.verbose)
./timelines/make_new_tables.py-    db = Ska.DBI.DBI(dbi=opt.dbi, server=opt.server, user=opt.user, database=opt.database,
./timelines/make_new_tables.py-                     numpy=False, verbose=opt.verbose)
./timelines/make_new_tables.py-
./timelines/make_new_tables.py-    for truncate in (
--
./timelines/timelines_test.py-
./timelines/timelines_test.py:    db = Ska.DBI.DBI(dbi='sybase', user='aca_read', database='aca', numpy=True)
./timelines/timelines_test.py-    # get the modification times of the likely directories
./timelines/timelines_test.py-    sumfile_modtimes = []
./timelines/timelines_test.py-    for load in loads:
./timelines/timelines_test.py-        tstart_date = DateTime(load['TStart (GMT)'])
./timelines/timelines_test.py-        cl = load['LOADSEG.NAME']
--
./timelines/timelines_test.py-        if self.dbi == 'sybase':
./timelines/timelines_test.py:            self.dbh = Ska.DBI.DBI(dbi='sybase', server='sybase',
./timelines/timelines_test.py-                                   user='aca_test', database='aca_tstdb')
./timelines/timelines_test.py-        else:
./timelines/timelines_test.py-            self.dbh = Ska.DBI.DBI(dbi='sqlite', server=self.dbfile)
./timelines/timelines_test.py-        return self.dbh
./timelines/timelines_test.py-
--
./timelines/timelines_test.py-        if self.first_npnt_state is None:
./timelines/timelines_test.py:            acadb = Ska.DBI.DBI(dbi='sybase', user='aca_read', database='aca',
./timelines/timelines_test.py-                                numpy=True, verbose=verbose)
./timelines/timelines_test.py-            es_query = """select * from cmd_states
./timelines/timelines_test.py-                          where datestop > '%s' and datestart < '%s'
./timelines/timelines_test.py-                          order by datestart asc""" % (first_timeline['datestart'], last_timeline['datestop'])
./timelines/timelines_test.py-            db_states = acadb.fetchall(es_query)
--
./timelines/timelines_test.py-
./timelines/timelines_test.py:    dbh = Ska.DBI.DBI(dbi='sybase', user='aca_read', database='aca', numpy=True, verbose=False)
./timelines/timelines_test.py-
./timelines/timelines_test.py-    good = [dict(load={'datestart': '2010:052:01:59:26.450',
./timelines/timelines_test.py-                         'datestop': '2010:052:12:20:06.101',
./timelines/timelines_test.py-                         'fixed_by_hand': 0,
./timelines/timelines_test.py-                         'id': 383104832,
--
./timelines/timelines_test.py-
./timelines/timelines_test.py:    dbh = Ska.DBI.DBI(dbi='sybase', user='aca_read', database='aca', numpy=True, verbose=False)
./timelines/timelines_test.py-
./timelines/timelines_test.py-    good = [dict(built_load={'file': 'C044_2301.sum',
./timelines/timelines_test.py-                               'first_cmd_time': '2010:052:01:59:26.450',
./timelines/timelines_test.py-                               'last_cmd_time': '2010:052:12:20:06.101',
./timelines/timelines_test.py-                               'load_scs': 128,
--
./timelines/timelines_test.py-
./timelines/timelines_test.py:    dbh = Ska.DBI.DBI(dbi='sybase', user='aca_read', database='aca', numpy=True, verbose=False)
./timelines/timelines_test.py-
./timelines/timelines_test.py-    good = [dict(load= {'datestart': '2010:052:01:59:26.450',
./timelines/timelines_test.py-                          'datestop': '2010:052:12:20:06.101',
./timelines/timelines_test.py-                          'fixed_by_hand': 0,
./timelines/timelines_test.py-                          'id': 383104832,
--
./timelines/update/make_old_tables.py-
./timelines/update/make_old_tables.py:    syb = Ska.DBI.DBI(dbi='sybase', user='aca_read', server='sybase', database='aca',
./timelines/update/make_old_tables.py-                      numpy=False, verbose=opt.verbose)
./timelines/update/make_old_tables.py-    db = Ska.DBI.DBI(dbi=opt.dbi, server=opt.server, user=opt.user, database=opt.database,
./timelines/update/make_old_tables.py-                     numpy=False, verbose=opt.verbose)
./timelines/update/make_old_tables.py-
./timelines/update/make_old_tables.py-    for truncate in (
--
./timelines/update/procedure.py-import Ska.DBI
./timelines/update/procedure.py:dbh = Ska.DBI.DBI(dbi='sybase', server='sybase', database='aca_tstdb', user='aca_test')
./timelines/update/procedure.py-
./timelines/update/procedure.py-#bash("./make_old_tables.py --verbose 2 --dbi sybase --server sybase --user aca_test --database aca_tstdb")
./timelines/update/procedure.py-
./timelines/update/procedure.py-#bash("sh dump_cmd_tables.sh")
./timelines/update/procedure.py-
--
./cmd_states/Chandra/cmd_states/cmd_states.py-    if db is None:
./cmd_states/Chandra/cmd_states/cmd_states.py:        db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read',
./cmd_states/Chandra/cmd_states/cmd_states.py-                         database='aca')
./cmd_states/Chandra/cmd_states/cmd_states.py-
./cmd_states/Chandra/cmd_states/cmd_states.py-    if date is not None:
./cmd_states/Chandra/cmd_states/cmd_states.py-        date = DateTime(date).date
./cmd_states/Chandra/cmd_states/cmd_states.py-
--
./cmd_states/Chandra/cmd_states/get_cmd_states.py-      # Get same states from Sybase (25 times slower)
./cmd_states/Chandra/cmd_states/get_cmd_states.py:      >>> states2 = fetch_states('2011:100', '2011:101', vals=['obsid', 'simpos'], dbi='sybase')
./cmd_states/Chandra/cmd_states/get_cmd_states.py-      >>> states2 == states
./cmd_states/Chandra/cmd_states/get_cmd_states.py-      array([ True,  True,  True], dtype=bool)
./cmd_states/Chandra/cmd_states/get_cmd_states.py-
./cmd_states/Chandra/cmd_states/get_cmd_states.py-    :param start: start date (default=Now-10 days)
./cmd_states/Chandra/cmd_states/get_cmd_states.py-    :param stop: stop date (default=None)
--
./cmd_states/make_new_tl_ls.py-
./cmd_states/make_new_tl_ls.py:    syb = Ska.DBI.DBI(dbi='sybase', user='aca_read', database='aca',
./cmd_states/make_new_tl_ls.py-                      numpy=False, verbose=True)
./cmd_states/make_new_tl_ls.py-    db = Ska.DBI.DBI(dbi='sqlite', server=opt.server, numpy=False,
./cmd_states/make_new_tl_ls.py-                     verbose=False)
./cmd_states/make_new_tl_ls.py-
./cmd_states/make_new_tl_ls.py-    for drop in ('VIEW timeline_loads', 'TABLE timelines',
--
./cmd_states/tests/cmp_telem.py-# db = Ska.DBI.DBI(dbi='sqlite', server='db_base.db3')
./cmd_states/tests/cmp_telem.py:db = Ska.DBI.DBI(dbi='sybase')
./cmd_states/tests/cmp_telem.py-
./cmd_states/tests/cmp_telem.py-datestart = '%d:365' % (year-1)
./cmd_states/tests/cmp_telem.py-datestop = '%d:001' % (year+1)
./cmd_states/tests/cmp_telem.py-
./cmd_states/tests/cmp_telem.py-if 'states' not in globals():
--
./cmd_states/tests/otg_telem.py-
./cmd_states/tests/otg_telem.py:# db = Ska.DBI.DBI(dbi='sybase')
./cmd_states/tests/otg_telem.py-
./cmd_states/tests/otg_telem.py-datestart = DateTime('2008:360').date
./cmd_states/tests/otg_telem.py-datestop = DateTime('2010:220').date
./cmd_states/tests/otg_telem.py-
./cmd_states/tests/otg_telem.py-if 1 or 'states' not in globals():
--
./cmd_states/tests/update_tables.py-db = Ska.DBI.DBI(dbi='sqlite', server='test.db3', verbose=False)
./cmd_states/tests/update_tables.py:# db = Ska.DBI.DBI(dbi='sybase')
./cmd_states/tests/update_tables.py-
./cmd_states/tests/update_tables.py-if drop:
./cmd_states/tests/update_tables.py-    tables = ('cmd_states', 'cmds', 'cmd_intpars', 'cmd_fltpars')
./cmd_states/tests/update_tables.py-    for table in reversed(tables):
./cmd_states/tests/update_tables.py-        try:
--
./mica/mica/archive/asp_l1_proc.py-
./mica/mica/archive/asp_l1_proc.py:    apstat_db = dict(dbi='sybase',
./mica/mica/archive/asp_l1_proc.py-                     server='sqlsao',
./mica/mica/archive/asp_l1_proc.py-                     database='axafapstat')
./mica/mica/archive/asp_l1_proc.py-    proc_db_file = os.path.join(MICA_ARCHIVE, 'asp1', 'processing_asp_l1.db3')
./mica/mica/archive/asp_l1_proc.py-    if not os.path.exists(proc_db_file) or os.stat(proc_db_file).st_size == 0:
./mica/mica/archive/asp_l1_proc.py-        if not os.path.exists(config['data_root']):
--
./mica/mica/archive/obsid_archive.py-        self._arc5 = Ska.arc5gl.Arc5gl()
./mica/mica/archive/obsid_archive.py:        self._apstat = dict(dbi='sybase', server='sqlsao',
./mica/mica/archive/obsid_archive.py-                            database='axafapstat')
./mica/mica/archive/obsid_archive.py:        self._aca_db = dict(dbi='sybase', server='sybase',
./mica/mica/archive/obsid_archive.py-                                   user='aca_read')
./mica/mica/archive/obsid_archive.py-        config = self.config
./mica/mica/archive/obsid_archive.py-        db_file = os.path.join(os.path.abspath(config['data_root']),
./mica/mica/archive/obsid_archive.py-                               'archfiles.db3')
./mica/mica/archive/obsid_archive.py-        if not os.path.exists(db_file) or os.stat(db_file).st_size == 0:
--
./mica/mica/archive/obsid_archive.py-                    # see if it is discarded
./mica/mica/archive/obsid_archive.py:                    with Ska.DBI.DBI(dbi='sybase', server='sqlsao', database='axafocat') as db:
./mica/mica/archive/obsid_archive.py-                        target_status = db.fetchone(
./mica/mica/archive/obsid_archive.py-                            "select status from target where obsid = {}".format(obs['obsid']))
./mica/mica/archive/obsid_archive.py-                        if target_status['status'] == 'discarded':
./mica/mica/archive/obsid_archive.py-                            logger.info("Skipping {}, obsid 'discarded'".format(
./mica/mica/archive/obsid_archive.py-                                    obs['obsid']))
--
./mica/mica/catalog/catalog.py-
./mica/mica/catalog/catalog.py:#tdb = Ska.DBI(dbi='sybase', server='sybase',
./mica/mica/catalog/catalog.py-#              user='aca_read')
./mica/mica/catalog/catalog.py-
./mica/mica/catalog/catalog.py-
./mica/mica/catalog/catalog.py-def plot(obsid, mp_dir=None):
./mica/mica/catalog/catalog.py-    sc = get_starcheck_catalog(obsid, mp_dir)
--
./mica/mica/report/report.py-
./mica/mica/report/report.py:    with Ska.DBI.DBI(dbi='sybase', server='sqlsao', user='aca_ops', database='axafocat') as ocat_db:
./mica/mica/report/report.py-        ocat_info = ocat_db.fetchone("""select * from target inner join prop_info on
./mica/mica/report/report.py-                                    target.proposal_id = prop_info.proposal_id
./mica/mica/report/report.py-                                    and target.obsid = {}""".format(obsid))
./mica/mica/report/report.py-        # If this target didn't have a proposal, just get whatever is there
./mica/mica/report/report.py-        if ocat_info is None:
--
./mica/mica/report/report.py-def official_vv(obsid):
./mica/mica/report/report.py:    vv_db = Ska.DBI.DBI(dbi='sybase', server='sqlsao', user='jeanconn', database='axafvv')
./mica/mica/report/report.py-    vv = vv_db.fetchone("""select vvid from vvreport where obsid = {obsid}
./mica/mica/report/report.py-                           and creation_date = (
./mica/mica/report/report.py-                              select max(creation_date) from vvreport where obsid = {obsid})
./mica/mica/report/report.py-                        """.format(obsid=obsid))
./mica/mica/report/report.py-    if vv is not None:
--
./mica/mica/report/report.py-def official_vv_notes(obsid, summary):
./mica/mica/report/report.py:    vv_db = Ska.DBI.DBI(dbi='sybase', server='sqlsao', user='jeanconn', database='axafvv',
./mica/mica/report/report.py-                        numpy=False)
./mica/mica/report/report.py-    all_vv = vv_db.fetchall("""select * from vvreport where obsid = {obsid}
./mica/mica/report/report.py-                        """.format(obsid=obsid))
./mica/mica/report/report.py-    if not len(all_vv):
./mica/mica/report/report.py-        return None
--
./mica/mica/report/report.py-def get_aiprops(obsid):
./mica/mica/report/report.py:    ACA_DB = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./mica/mica/report/report.py-    aiprops = ACA_DB.fetchall(
./mica/mica/report/report.py-        "select * from aiprops where obsid = {} order by tstart".format(
./mica/mica/report/report.py-            obsid))
./mica/mica/report/report.py-    return aiprops
./mica/mica/report/report.py-
--
./mica/mica/vv/core.py-        import Ska.DBI
./mica/mica/vv/core.py:        apstat = Ska.DBI.DBI(dbi='sybase',
./mica/mica/vv/core.py-                             server='sqlsao',
./mica/mica/vv/core.py-                             database='axafapstat')
./mica/mica/vv/core.py-        # take these from the first aspect solution file header
./mica/mica/vv/core.py-        aspect_1 = apstat.fetchall("""SELECT * FROM aspect_1
./mica/mica/vv/core.py-                                      WHERE obsid = {obsid}
--
./mica/mica/vv/core.py-        obi = int(self.asol_header['OBI_NUM'])
./mica/mica/vv/core.py:        ocat_db = Ska.DBI.DBI(dbi='sybase', server='sqlsao', database='axafocat')
./mica/mica/vv/core.py-        stars = ocat_db.fetchall("select * from stars where "
./mica/mica/vv/core.py-                                 "obsid = {} and obi = {} "
./mica/mica/vv/core.py-                                 "and type != 0".format(obsid, obi))
./mica/mica/vv/core.py-        ocat_db.conn.close()
./mica/mica/vv/core.py-        if len(np.unique(stars['obi'])) > 1:
--
./mica/mica/web/star_hist.py-    """
./mica/mica/web/star_hist.py:    with DBI(dbi='sybase', server='sybase', user='aca_read') as db:
./mica/mica/web/star_hist.py-        gui = db.fetchall(
./mica/mica/web/star_hist.py-            'select * from trak_stats_data where type != "FID" and id = {}'.format(
./mica/mica/web/star_hist.py-                agasc_id))
./mica/mica/web/star_hist.py-    if not len(gui):
./mica/mica/web/star_hist.py-        return []
--
./Ska.DBI/Ska/DBI/DBI.py-      db = DBI(dbi='sqlite', server=dbfile, numpy=False, verbose=True)
./Ska.DBI/Ska/DBI/DBI.py:      db = DBI(dbi='sybase', server='sybase', user='aca_ops', database='aca')
./Ska.DBI/Ska/DBI/DBI.py:      db = DBI(dbi='sybase')   # Use defaults (same as above)
./Ska.DBI/Ska/DBI/DBI.py-
./Ska.DBI/Ska/DBI/DBI.py-    :param dbi:  Database interface name (sqlite, sybase)
./Ska.DBI/Ska/DBI/DBI.py-    :param server: Server name (or file name for sqlite)
./Ska.DBI/Ska/DBI/DBI.py-    :param user: User name (optional)
./Ska.DBI/Ska/DBI/DBI.py-    :param passwd: Password (optional).  Read from aspect authorization if required and not supplied.
--
./Ska.DBI/Ska/DBI/tests/test_dbi.py-class TestSybaseWithNumpy(DBI_BaseTests):
./Ska.DBI/Ska/DBI/tests/test_dbi.py:    db_config = dict(dbi='sybase', server='sybase', user='aca_test',
./Ska.DBI/Ska/DBI/tests/test_dbi.py-                     database='aca_tstdb', numpy=True)
./Ska.DBI/Ska/DBI/tests/test_dbi.py-
./Ska.DBI/Ska/DBI/tests/test_dbi.py-
./Ska.DBI/Ska/DBI/tests/test_dbi.py-@pytest.mark.skipif('not HAS_SYBASE', reason='No Sybase support for Python 3')
./Ska.DBI/Ska/DBI/tests/test_dbi.py-class TestSybaseWithoutNumpy(DBI_BaseTests):
./Ska.DBI/Ska/DBI/tests/test_dbi.py:    db_config = dict(dbi='sybase', server='sybase', user='aca_test',
./Ska.DBI/Ska/DBI/tests/test_dbi.py-                     database='aca_tstdb', numpy=False)
./Ska.DBI/Ska/DBI/tests/test_dbi.py-
./Ska.DBI/Ska/DBI/tests/test_dbi.py-
./Ska.DBI/Ska/DBI/tests/test_dbi.py-def test_context_manager():
./Ska.DBI/Ska/DBI/tests/test_dbi.py-    with DBI(dbi='sqlite', server=':memory:') as db:
--
./jobwatch/skawatch.py-            query='SELECT MAX({timekey}) AS maxtime FROM {table}',
./jobwatch/skawatch.py:            dbi='sybase', server='sybase', user='aca_read', database='aca')
./jobwatch/skawatch.py-
./jobwatch/skawatch.py-
./jobwatch/skawatch.py-class SkaSqliteDbWatch(DbWatch):
./jobwatch/skawatch.py-    def __init__(self, task, maxage=1, dbfile=None, table=None, timekey='tstart'):
./jobwatch/skawatch.py-        super(SkaSqliteDbWatch, self).__init__(
--
./schedule_view/get_schedules.py-
./schedule_view/get_schedules.py:sqlaca = DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./schedule_view/get_schedules.py-
./schedule_view/get_schedules.py-mp_sched_path = '/proj/web-icxc/htdocs/mp/schedules'
./schedule_view/get_schedules.py-mp_mplogs_path = '/proj/web-icxc/htdocs/mp/mplogs'
./schedule_view/get_schedules.py-file_nav = {'mplogs_url': 'file://%s' % mp_mplogs_path,
./schedule_view/get_schedules.py-            'mp_sched_path': mp_sched_path,
--
./kadi/kadi/update_cmds.py-    # Get timeline_loads including and after start
./kadi/kadi/update_cmds.py:    db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./kadi/kadi/update_cmds.py-    timeline_loads = db.fetchall("""SELECT * from timeline_loads
./kadi/kadi/update_cmds.py-                                    WHERE datestop > '{}' AND datestart < '{}'
./kadi/kadi/update_cmds.py-                                    ORDER BY id"""
./kadi/kadi/update_cmds.py-                                 .format(start.date, stop.date))
./kadi/kadi/update_cmds.py-    db.conn.close()
--
./pftank2t_check/pftank2t_check.py-    logger.info('Connecting to database to get cmd_states')
./pftank2t_check/pftank2t_check.py:    db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read',
./pftank2t_check/pftank2t_check.py-                     database='aca')
./pftank2t_check/pftank2t_check.py-
./pftank2t_check/pftank2t_check.py-    tnow = DateTime(opt.run_start).secs
./pftank2t_check/pftank2t_check.py-    if opt.oflsdir is not None:
./pftank2t_check/pftank2t_check.py-        # Get tstart, tstop, commands from backstop file in opt.oflsdir
--
./perigee_health_plots/pass_plots.py-    pass_time_file = 'pass_times.txt'
./perigee_health_plots/pass_plots.py:    aca_db = DBI(dbi='sybase', server='sybase',
./perigee_health_plots/pass_plots.py-                 user='aca_read', database='aca')
./perigee_health_plots/pass_plots.py-    obsids = aca_db.fetchall("""SELECT obsid,obsid_datestart,obsid_datestop
./perigee_health_plots/pass_plots.py-                                from observations
./perigee_health_plots/pass_plots.py-                                where obsid_datestart > '%s'
./perigee_health_plots/pass_plots.py-                                and obsid_datestart < '%s'"""
--
./track_periscope/get_max_processing.py-
./track_periscope/get_max_processing.py:sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read',
./track_periscope/get_max_processing.py-                     numpy=True, database='aca' )
./track_periscope/get_max_processing.py-
./track_periscope/get_max_processing.py:axafapstat = Ska.DBI.DBI(dbi='sybase', server='sqlocc', user='aca_ops',
./track_periscope/get_max_processing.py-                         database='axafapstat')
./track_periscope/get_max_processing.py-
./track_periscope/get_max_processing.py-
./track_periscope/get_max_processing.py-
./track_periscope/get_max_processing.py-class ObiError(Exception):
--
./track_periscope/get_missing.py-
./track_periscope/get_missing.py:sqlaca = DBI(dbi='sybase', server='sybase', verbose=True)
./track_periscope/get_missing.py-
./track_periscope/get_missing.py-all_obs = sqlaca.fetchall("""select obsid,obi
./track_periscope/get_missing.py-from observations
./track_periscope/get_missing.py-where obsid < 40000
./track_periscope/get_missing.py-and kalman_datestart > '2000:001:00:00:00.000'""")
--
./track_periscope/per_obs_periscope_tilt.py-
./track_periscope/per_obs_periscope_tilt.py:    sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_ops',
./track_periscope/per_obs_periscope_tilt.py-                         numpy=True, database='aca')
./track_periscope/per_obs_periscope_tilt.py-
./track_periscope/per_obs_periscope_tilt.py-    all_obs = sqlaca.fetchall(
./track_periscope/per_obs_periscope_tilt.py-        """select obsid,obi
./track_periscope/per_obs_periscope_tilt.py-           from observations
--
./guide_stat_reports/get_binned_data.py-
./guide_stat_reports/get_binned_data.py:dbh = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./guide_stat_reports/get_binned_data.py-
./guide_stat_reports/get_binned_data.py-stars = dbh.fetchall("""select kalman_tstart as tstart, n_samples,
./guide_stat_reports/get_binned_data.py-not_tracking_samples, obc_bad_status_samples
./guide_stat_reports/get_binned_data.py-from trak_stats_data
./guide_stat_reports/get_binned_data.py-where type != 'FID' and color is not null and
--
./guide_stat_reports/get_month_bin_data.py-#csec_year = 86400 * 365.25
./guide_stat_reports/get_month_bin_data.py:dbh = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./guide_stat_reports/get_month_bin_data.py-stars = dbh.fetchall("""select kalman_tstart as tstart, n_samples,
./guide_stat_reports/get_month_bin_data.py-                        not_tracking_samples, obc_bad_status_samples
./guide_stat_reports/get_month_bin_data.py-                        from trak_stats_data
./guide_stat_reports/get_month_bin_data.py-                        where type != 'FID' and color is not null and
./guide_stat_reports/get_month_bin_data.py-                        kalman_tstart >= %f and kalman_tstart < %f"""
--
./guide_stat_reports/gui_stat_reports.py-    
./guide_stat_reports/gui_stat_reports.py:    sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca', numpy=True)
./guide_stat_reports/gui_stat_reports.py-    min_time = DateTime('2003:001:00:00:00.000')
./guide_stat_reports/gui_stat_reports.py-
./guide_stat_reports/gui_stat_reports.py-    data_table = 'trak_stats_data'
./guide_stat_reports/gui_stat_reports.py-
./guide_stat_reports/gui_stat_reports.py-
--
./guide_stat_reports/make_binned_fit.py-
./guide_stat_reports/make_binned_fit.py:#dbh = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./guide_stat_reports/make_binned_fit.py-
./guide_stat_reports/make_binned_fit.py-#stars = dbh.fetchall("""select kalman_tstart as tstart, n_samples,
./guide_stat_reports/make_binned_fit.py-#not_tracking_samples, obc_bad_status_samples
./guide_stat_reports/make_binned_fit.py-#from trak_stats_data
./guide_stat_reports/make_binned_fit.py-#where type != 'FID' and color is not null and
--
./guide_stat_reports/make_likelihood_fit.py-
./guide_stat_reports/make_likelihood_fit.py:dbh = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./guide_stat_reports/make_likelihood_fit.py-if 'stars' not in globals():
./guide_stat_reports/make_likelihood_fit.py-    stars = dbh.fetchall("""select obsid, slot, type, color, kalman_datestart, kalman_tstart as tstart, n_samples,
./guide_stat_reports/make_likelihood_fit.py-    not_tracking_samples, obc_bad_status_samples
./guide_stat_reports/make_likelihood_fit.py-    from trak_stats_data
./guide_stat_reports/make_likelihood_fit.py-    where type != 'FID' and color is not null and
--
./pog_aspect/acq_gui_stats.py-from Chandra.Time import DateTime
./pog_aspect/acq_gui_stats.py:aca_db = DBI(dbi='sybase', server='sybase', user='aca_read')
./pog_aspect/acq_gui_stats.py-import numpy as np
./pog_aspect/acq_gui_stats.py-# load a recently made table of the acq stats database
./pog_aspect/acq_gui_stats.py-#acq = np.load('/proj/sot/ska/data/acq_stat_reports/acq_stats_with_temp_and_warmpix.npy')
./pog_aspect/acq_gui_stats.py-#gui = np.load('/proj/sot/ska/data/gui_stat_reports/guide_stats_with_temp_and_warmpix.npy')
./pog_aspect/acq_gui_stats.py-
--
./periscope_drift_reports/reports.py-
./periscope_drift_reports/reports.py:sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca', numpy=True)
./periscope_drift_reports/reports.py-
./periscope_drift_reports/reports.py-
./periscope_drift_reports/reports.py:axafapstat = Ska.DBI.DBI(dbi='sybase', server='sqlocc', user='aca_ops',
./periscope_drift_reports/reports.py-                         database='axafapstat')
./periscope_drift_reports/reports.py-
./periscope_drift_reports/reports.py-
./periscope_drift_reports/reports.py-GRADIENTS = dict(OOBAGRD3=dict(yag=6.98145650e-04,
./periscope_drift_reports/reports.py-                               zag=9.51578351e-05,
--
./acq_stat_reports/acq_stat_reports.py-    
./acq_stat_reports/acq_stat_reports.py:    sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca', numpy=True)
./acq_stat_reports/acq_stat_reports.py-    min_acq_time = DateTime('2000:001:00:00:00.000')
./acq_stat_reports/acq_stat_reports.py-
./acq_stat_reports/acq_stat_reports.py-    all_acq = sqlaca.fetchall('select * from acq_stats_data where tstart >= %f'
./acq_stat_reports/acq_stat_reports.py-                              % min_acq_time.secs )
./acq_stat_reports/acq_stat_reports.py-
--
./acq_stat_reports/make_likelihood_fit.py-
./acq_stat_reports/make_likelihood_fit.py:dbh = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./acq_stat_reports/make_likelihood_fit.py-if 'stars' not in globals():
./acq_stat_reports/make_likelihood_fit.py-    stars = dbh.fetchall("""select obsid, slot, type, color, obc_id, mag, mag_obs, tstart, tstop
./acq_stat_reports/make_likelihood_fit.py-    from acq_stats_data
./acq_stat_reports/make_likelihood_fit.py-    where color is not null and
./acq_stat_reports/make_likelihood_fit.py-    tstart >= %f and tstart < %f
--
./mp_parse_db/get_missing.py-logger.info("Retrieving all timelines")
./mp_parse_db/get_missing.py:#acadb_read = DBI(dbi='sybase', server='sybase', 
./mp_parse_db/get_missing.py-#                 user='aca_read', database='aca', numpy='True')
./mp_parse_db/get_missing.py:acadb = DBI(dbi='sybase', server='sybase', user='aca_read', verbose=True)
./mp_parse_db/get_missing.py-#acadb = DBI(dbi='sqlite', server='db.db3', verbose=True)
./mp_parse_db/get_missing.py-
./mp_parse_db/get_missing.py-logger.info("Retrieving all observations from obidet_0_5...")
./mp_parse_db/get_missing.py:axafapstat = DBI(dbi='sybase', server = 'sqlocc',
./mp_parse_db/get_missing.py-                 user='aca_ops', database='axafapstat', numpy='True')
./mp_parse_db/get_missing.py-all_ap = axafapstat.fetchall("""select obsid,obi,tstart,tstop,convert(char(19),ap_date,100) as ap_date
./mp_parse_db/get_missing.py-                                from obidet_0_5 as x
./mp_parse_db/get_missing.py-                                where x.ap_date =
./mp_parse_db/get_missing.py-                                (select max(o.ap_date) from obidet_0_5 as o
--
./mp_parse_db/get_missing.py-logger.info("Retrieving all rows from mp_load_info...")
./mp_parse_db/get_missing.py:#acadb_write = DBI(dbi='sybase', server='sybase',
./mp_parse_db/get_missing.py-#                  user='aca_ops', database='aca_tstdb', numpy='True')
./mp_parse_db/get_missing.py-all_loads = acadb.fetchall("""select obsid,obi,convert(char(19), last_ap_date,100) as last_ap_date from mp_load_info""")
./mp_parse_db/get_missing.py-all_loads_set = set((x['obsid'], x['obi'], str(x['last_ap_date'])) for x in all_loads)
./mp_parse_db/get_missing.py-    
./mp_parse_db/get_missing.py-logger.info("Determining set to be updated in mp_load_info")
--
./mp_parse_db/make_new_tables.py-
./mp_parse_db/make_new_tables.py:    syb = Ska.DBI.DBI(dbi='sybase', database='aca',numpy=False, verbose=opt.verbose)
./mp_parse_db/make_new_tables.py-    db = Ska.DBI.DBI(dbi=opt.dbi, server=opt.server, numpy=False, verbose=opt.verbose)
./mp_parse_db/make_new_tables.py-
./mp_parse_db/make_new_tables.py-    for drop in ('TABLE mp_load_info',
./mp_parse_db/make_new_tables.py-                 'VIEW timeline_loads',
./mp_parse_db/make_new_tables.py-                 'TABLE timelines', 'TABLE load_segments',
--
./mp_parse_db/mp_parse.py-def path_via_dir_search( obs ):
./mp_parse_db/mp_parse.py:    ocat = DBI(dbi='sybase', server = 'sqlocc',
./mp_parse_db/mp_parse.py-               user='aca_ops', database='axafocat', numpy='True')
./mp_parse_db/mp_parse.py-    soe = ocat.fetchall("""select obsid,obi,orl_filename,soe_filename,type
./mp_parse_db/mp_parse.py-                           from view_soe where obsid = %d and obi = %d"""
./mp_parse_db/mp_parse.py-                        % (obs['obsid'], obs['obi']))
./mp_parse_db/mp_parse.py-    if ( len(soe) > 1):
--
./mp_parse_db/mp_parse.py-    logger.info("Retrieving all timelines")
./mp_parse_db/mp_parse.py:    #acadb_read = DBI(dbi='sybase', server='sybase', 
./mp_parse_db/mp_parse.py-    #                 user='aca_read', database='aca', numpy='True')
./mp_parse_db/mp_parse.py-    acadb = DBI(dbi=opt.dbi, server=opt.db, verbose=True)
./mp_parse_db/mp_parse.py-    all_timelines = acadb.fetchall("select * from timelines order by id")
./mp_parse_db/mp_parse.py-    
./mp_parse_db/mp_parse.py-    logger.info("Retrieving all observations from obidet_0_5...")
./mp_parse_db/mp_parse.py:    axafapstat = DBI(dbi='sybase', server = 'sqlocc',
./mp_parse_db/mp_parse.py-                     user='aca_ops', database='axafapstat', numpy='True')
./mp_parse_db/mp_parse.py-    
./mp_parse_db/mp_parse.py-    #all_ap = axafapstat.fetchall("""select obsid,obi,tstart,tstop,convert(char(26),ap_date,109) as ap_date
./mp_parse_db/mp_parse.py-    #                                from obidet_0_5 where (quality = 'D' or quality = 'Q')""")
./mp_parse_db/mp_parse.py-
--
./mp_parse_db/mp_parse.py-
./mp_parse_db/mp_parse.py:    #acadb_write = DBI(dbi='sybase', server='sybase',
./mp_parse_db/mp_parse.py-    #                  user='aca_ops', database='aca_tstdb', numpy='True')
./mp_parse_db/mp_parse.py-
./mp_parse_db/mp_parse.py-    all_loads_set = set((x['obsid'], x['obi'], str(x['last_ap_date'])) for x in all_loads)
./mp_parse_db/mp_parse.py-
./mp_parse_db/mp_parse.py-    logger.info("Determining set to be updated in mp_load_info")
--
./aca_dark_cal/warm_pix_corr.py-mp_dir = '/data/mpcrit1/mplogs/'
./aca_dark_cal/warm_pix_corr.py:db = DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./aca_dark_cal/warm_pix_corr.py-task = 'aca_dark_cal'
./aca_dark_cal/warm_pix_corr.py-TASK_SHARE = os.path.join(os.environ['SKA'], 'share', task)
./aca_dark_cal/warm_pix_corr.py-
./aca_dark_cal/warm_pix_corr.py-
./aca_dark_cal/warm_pix_corr.py-def get_options():
--
./aca_status_flags/plot_cen_simple.py-import Ska.DBI
./aca_status_flags/plot_cen_simple.py:db = Ska.DBI.DBI(dbi='sybase', server='sybase')
./aca_status_flags/plot_cen_simple.py-if 'obs' not in globals():
./aca_status_flags/plot_cen_simple.py-    obs = db.fetchall("""select obsid, kalman_datestart, kalman_datestop
./aca_status_flags/plot_cen_simple.py-    from observations where obsid = %s""" % obsid)[0]
./aca_status_flags/plot_cen_simple.py-
./aca_status_flags/plot_cen_simple.py-if 'telem' not in globals():
--
./aca_status_flags/plot_delta_radec.py-
./aca_status_flags/plot_delta_radec.py:db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./aca_status_flags/plot_delta_radec.py-if 'obs' not in globals():
./aca_status_flags/plot_delta_radec.py-    obs = db.fetchall("""select obsid, kalman_datestart, kalman_datestop
./aca_status_flags/plot_delta_radec.py-    from observations where obsid = %s""" % obsid)[0]
./aca_status_flags/plot_delta_radec.py-
./aca_status_flags/plot_delta_radec.py-if 'telem' not in globals():
--
./astromon/sybase2sqlite.py-
./astromon/sybase2sqlite.py:dbsy = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./astromon/sybase2sqlite.py-dbs3 = Ska.DBI.DBI(dbi='sqlite', server=args.sqlite_file)
./astromon/sybase2sqlite.py-
./astromon/sybase2sqlite.py-
./astromon/sybase2sqlite.py-# Get table schema
./astromon/sybase2sqlite.py-cursy = dbsy.conn.cursor()
--
./fid_drift_mon/calc_abs_cel_pointing.py-
./fid_drift_mon/calc_abs_cel_pointing.py:    db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./fid_drift_mon/calc_abs_cel_pointing.py-    vals = db.fetchall(query)
./fid_drift_mon/calc_abs_cel_pointing.py-    db.conn.close()
./fid_drift_mon/calc_abs_cel_pointing.py-
./fid_drift_mon/calc_abs_cel_pointing.py-    return vals
./fid_drift_mon/calc_abs_cel_pointing.py-
--
./fid_drift_mon/plot_drift.py-
./fid_drift_mon/plot_drift.py:    db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./fid_drift_mon/plot_drift.py-
./fid_drift_mon/plot_drift.py-    for det in dets:
./fid_drift_mon/plot_drift.py-        # Some filtering here?
./fid_drift_mon/plot_drift.py-        detstats = get_fid_stats(db, det)
./fid_drift_mon/plot_drift.py-        plotfids(detstats, det, args.data_dir)
--
./fid_drift_mon/plot_drift_model.py-
./fid_drift_mon/plot_drift_model.py:    db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read')
./fid_drift_mon/plot_drift_model.py-    detstats = get_fid_stats(db, 'ACIS')
./fid_drift_mon/plot_drift_model.py-    db.conn.close()
./fid_drift_mon/plot_drift_model.py-    fig, ax1 = plotfids(detstats, 'ACIS', tstart)
./fid_drift_mon/plot_drift_model.py-
./fid_drift_mon/plot_drift_model.py-    dat = fetch.MSID('aach1t', tstart, stat='5min')
--
./fid_drift_mon/plot_starcheck_vs_telem.py-    fids_starcheck = {}
./fid_drift_mon/plot_starcheck_vs_telem.py:    with Ska.DBI.DBI(server='sybase', dbi='sybase', user='aca_read') as db:
./fid_drift_mon/plot_starcheck_vs_telem.py-        for obsid, dwell in dwells.items():
./fid_drift_mon/plot_starcheck_vs_telem.py-            # Only accept the first dwell of science observations
./fid_drift_mon/plot_starcheck_vs_telem.py-            if obsid in fids_starcheck:
./fid_drift_mon/plot_starcheck_vs_telem.py-                logger.info('Skipping obsid {} already in fids_starcheck'.format(obsid))
./fid_drift_mon/plot_starcheck_vs_telem.py-                continue
--
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py-mp_dir = '/data/mpcrit1/mplogs/'
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py:db = DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py-task = 'aca_dark_cal'
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py-TASK_SHARE = '/proj/sot/ska/share/aca_dark_cal'
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py-#TASK_SHARE = os.path.join(os.environ['SKA'], 'share', task)
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py-
./obc_bad_stat_warm_pix/make_warm_pix_estimation_table.py-
--
./obc_bad_stat_warm_pix/warm_pix.py-        if 'trak_stats' not in globals():
./obc_bad_stat_warm_pix/warm_pix.py:            aca_db = Ska.DBI.DBI(dbi='sybase', server='sybase',
./obc_bad_stat_warm_pix/warm_pix.py-                                 user='aca_read')
./obc_bad_stat_warm_pix/warm_pix.py-            trak_stats = aca_db.fetchall(
./obc_bad_stat_warm_pix/warm_pix.py-                """select * from trak_stats_data where type != 'FID' and
./obc_bad_stat_warm_pix/warm_pix.py-                   kalman_datestart > '2007:000' order by kalman_tstart""")
./obc_bad_stat_warm_pix/warm_pix.py-        more_stats = []
--
./obsvis_fov/fov.py-
./obsvis_fov/fov.py:    with Ska.DBI.DBI(dbi='sybase', server='sqlsao',
./obsvis_fov/fov.py-                     user=username, passwd=passwd,
./obsvis_fov/fov.py-                     database='axafocat') as dbh:
./obsvis_fov/fov.py-        obsids = []
./obsvis_fov/fov.py-        for reqid in reqids:
./obsvis_fov/fov.py-            len_reqid = len(str(reqid))
--
./aca_required_temp/make_reports.py-
./aca_required_temp/make_reports.py:db = DBI(dbi='sybase', server='sqlsao', database='axafocat', user='aca_ops')
./aca_required_temp/make_reports.py-query = """SELECT t.obsid, t.ra, t.dec,
./aca_required_temp/make_reports.py-t.y_det_offset as y_offset, t.z_det_offset as z_offset,
./aca_required_temp/make_reports.py-t.approved_exposure_time, t.instrument, t.grating, t.obs_ao_str
./aca_required_temp/make_reports.py-FROM target t
./aca_required_temp/make_reports.py-WHERE
--
./aca_lts_eval/make_reports.py-
./aca_lts_eval/make_reports.py:db = DBI(dbi='sybase', server='sqlsao', database='axafocat', user='aca_ops')
./aca_lts_eval/make_reports.py-query = """SELECT t.obsid, t.ra, t.dec,
./aca_lts_eval/make_reports.py-t.type, t.y_det_offset as y_offset, t.z_det_offset as z_offset, 
./aca_lts_eval/make_reports.py-t.approved_exposure_time, t.instrument, t.grating, t.obs_ao_str, p.ao_str
./aca_lts_eval/make_reports.py-FROM target t
./aca_lts_eval/make_reports.py-right join prop_info p on t.ocat_propid = p.ocat_propid
--
./aca_lts_eval/make_obsid_files.py-
./aca_lts_eval/make_obsid_files.py:db = DBI(dbi='sybase', server='sqlsao', database='axafocat', user='aca_ops')
./aca_lts_eval/make_obsid_files.py-query = """SELECT t.obsid, t.ra, t.dec,
./aca_lts_eval/make_obsid_files.py-t.type, t.y_det_offset as y_offset, t.z_det_offset as z_offset, 
./aca_lts_eval/make_obsid_files.py-t.approved_exposure_time, t.instrument, t.grating, t.obs_ao_str, p.ao_str
./aca_lts_eval/make_obsid_files.py-FROM target t
./aca_lts_eval/make_obsid_files.py-right join prop_info p on t.ocat_propid = p.ocat_propid
--
./acis_thermal_check/acis_thermal_check/main.py-        self.logger.info('Connecting to database to get cmd_states')
./acis_thermal_check/acis_thermal_check/main.py:        db = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read',
./acis_thermal_check/acis_thermal_check/main.py-                         database='aca')
./acis_thermal_check/acis_thermal_check/main.py-
./acis_thermal_check/acis_thermal_check/main.py-        tnow = DateTime(opt.run_start).secs
./acis_thermal_check/acis_thermal_check/main.py-        if opt.oflsdir is not None:
./acis_thermal_check/acis_thermal_check/main.py-            # Get tstart, tstop, commands from backstop file in opt.oflsdir
--
./star_stat_db/update_star_stats.py-
./star_stat_db/update_star_stats.py:sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read',
./star_stat_db/update_star_stats.py-                     numpy=True, database='aca')
./star_stat_db/update_star_stats.py:sqlocc = Ska.DBI.DBI(dbi='sybase', server='sqlocc', user='aca_ops',
./star_stat_db/update_star_stats.py-                     numpy=True, database='axafocat')
./star_stat_db/update_star_stats.py-
./star_stat_db/update_star_stats.py-
./star_stat_db/update_star_stats.py-logger = logging.getLogger()
./star_stat_db/update_star_stats.py-logger.setLevel(logging.DEBUG)
--
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py-mp_dir = '/data/mpcrit1/mplogs/'
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py:db = DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py-task = 'aca_dark_cal'
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py-TASK_SHARE = '/proj/sot/ska/share/aca_dark_cal'
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py-#TASK_SHARE = os.path.join(os.environ['SKA'], 'share', task)
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py-
./state_of_aca/dark_cals_with_temp/dark_cals_with_temp.py-
--
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py-
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py:sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca', numpy=True)
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py-min_acq_time = DateTime('2000:001:00:00:00.000')
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py-
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py-cols_except_date = ('obsid', 'obi', 'tstart', 'tstop', 'slot', 'idx', 'cat_pos',
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py-                    'type', 'agasc_id', 'obc_id', 'yang', 'zang', 'mag', 'color',
./state_of_aca/guide_acq_warm_pix/make_acq_table_with_warm_pix.py-                    'halfw', 'mag_obs', 'yang_obs', 'zang_obs', 'y_offset', 'z_offset',
--
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py-    
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py:    sqlaca = Ska.DBI.DBI(dbi='sybase', server='sybase', user='aca_read', database='aca', numpy=True)
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py-    min_time = DateTime('2003:001:00:00:00.000')
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py-
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py-    data_table = 'trak_stats_data'
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py-
./state_of_aca/guide_acq_warm_pix/make_gui_table_with_warm_pix.py-    cols_except_date = ('obsid',
--
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py-mp_dir = '/data/mpcrit1/mplogs/'
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py:db = DBI(dbi='sybase', server='sybase', user='aca_read', database='aca')
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py-task = 'aca_dark_cal'
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py-TASK_SHARE = '/proj/sot/ska/share/aca_dark_cal'
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py-
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py-
./state_of_aca/guide_acq_warm_pix/make_warm_pix_estimation_table.py-def get_options():
--
./state_of_aca/responsivity/responsivity.py-
./state_of_aca/responsivity/responsivity.py:    aca_db = DBI(server='sybase', dbi='sybase', user='aca_read')
./state_of_aca/responsivity/responsivity.py-    for agasc_id in repeats['id']:
./state_of_aca/responsivity/responsivity.py-        print agasc_id
./state_of_aca/responsivity/responsivity.py-        if agasc_id in cache:
./state_of_aca/responsivity/responsivity.py-            obsdata = cache[agasc_id]
./state_of_aca/responsivity/responsivity.py-        else:
--
./state_of_aca/responsivity/responsivity.py-def get_repeats(n_repeats=300):  # default gives us 4 stars
./state_of_aca/responsivity/responsivity.py:    aca_db = DBI(server='sybase', dbi='sybase', user='aca_read')
./state_of_aca/responsivity/responsivity.py-    repeats = aca_db.fetchall("select id, count(id) as num_obs from trak_stats_data "
./state_of_aca/responsivity/responsivity.py-                              "group by id having (count(id) > {})".format(n_repeats))
./state_of_aca/responsivity/responsivity.py-    repeats = repeats[repeats['id'] >= 20]
./state_of_aca/responsivity/responsivity.py-    aca_db.conn.close()
./state_of_aca/responsivity/responsivity.py-
--
./validate_states/validate_states.py-    logger.info('Connecting to database to get cmd_states')
./validate_states/validate_states.py:    db = Ska.DBI.DBI(dbi='sybase', server='sybase',
./validate_states/validate_states.py-                     user='aca_read', database='aca')
./validate_states/validate_states.py-
./validate_states/validate_states.py-    datestart = DateTime(datestart).date
./validate_states/validate_states.py-    datestop = DateTime(datestop).date
./validate_states/validate_states.py-    logger.info('Getting commanded states between %s - %s' %
--
./telem_archive/make_obs_aiprops.py-    # Connect to the sqlocc apstat database
./telem_archive/make_obs_aiprops.py:    db_apstat = Ska.DBI.DBI(dbi='sybase', server='sqlocc', database='axafapstat',
./telem_archive/make_obs_aiprops.py-                            user='aca_ops',
./telem_archive/make_obs_aiprops.py-                            verbose=(opt.loglevel<20), numpy=False)
./telem_archive/make_obs_aiprops.py-
./telem_archive/make_obs_aiprops.py-    # Drop table first (mostly for development work)
./telem_archive/make_obs_aiprops.py-    if opt.drop:
--
./periscope_tilt/tilt/find_sources.py-MTIME = 1457707041.222744
./periscope_tilt/tilt/find_sources.py:sqlaca = DBI(dbi='sybase', user='aca_read')
./periscope_tilt/tilt/find_sources.py-
./periscope_tilt/tilt/find_sources.py-
./periscope_tilt/tilt/find_sources.py-
./periscope_tilt/tilt/find_sources.py-#XRAY_DATA = '/data/aca/archive/xray_for_periscope'
./periscope_tilt/tilt/find_sources.py-projdir = '/proj/sot/ska/analysis/periscope_tilt_2016'
--
./periscope_tilt/tilt/review_images.py-
./periscope_tilt/tilt/review_images.py:acadb = DBI(server='sybase', dbi='sybase', user='aca_read')
./periscope_tilt/tilt/review_images.py-DATADIR = 'auto'
./periscope_tilt/tilt/review_images.py-
./periscope_tilt/tilt/review_images.py-obs_srcs = glob(os.path.join(DATADIR, "obs*/picked_src.dat"))
./periscope_tilt/tilt/review_images.py-srcs = []
./periscope_tilt/tilt/review_images.py-for src_file in obs_srcs:
--
./gyro_bias/kalman_threshold/search_scripts/from_derived.py-def get_load_segments():
./gyro_bias/kalman_threshold/search_scripts/from_derived.py:    with Ska.DBI.DBI(dbi='sybase', server='sybase', database='aca', user='aca_read') as db:
./gyro_bias/kalman_threshold/search_scripts/from_derived.py-        load_segments = db.fetchall("select * from load_segments")
./gyro_bias/kalman_threshold/search_scripts/from_derived.py-    load_segments = Table(load_segments)
./gyro_bias/kalman_threshold/search_scripts/from_derived.py-    return load_segments
./gyro_bias/kalman_threshold/search_scripts/from_derived.py-
./gyro_bias/kalman_threshold/search_scripts/from_derived.py-
Clone this wiki locally