django.db.connections - python examples

Here are the examples of the python api django.db.connections taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

8 Examples 7

3 View Complete Implementation : fabfile.py
Copyright MIT License
Author : harvard-lil
@task
def update_reporter_years():
    """ Update Reporter.start_year and Reporter.end_year to match actual dates of cases. """
    cursor = django.db.connections['capdb'].cursor()
    cursor.execute("""
        update capdb_reporter r
        set start_year = new_start_year, end_year = new_end_year
        from (
                 select reporter_id,
                        min(date_part('year', decision_date)) as new_start_year,
                        max(date_part('year', decision_date)) as new_end_year
                 from capdb_casemetadata
                 group by reporter_id
             ) as cases
        where cases.reporter_id = r.id;
    """)

0 View Complete Implementation : fabfile.py
Copyright MIT License
Author : harvard-lil
@task
def show_slow_queries(server='capstone'):
    """
    Show slow queries for consumption by Slack bot.
    This requires

        shared_preload_libraries = 'pg_stat_statements'

    in postgresql.conf, that

        CREATE EXTENSION pg_stat_statements;

    has been run for the capstone database, and that

        GRANT EXECUTE ON FUNCTION pg_stat_statements_reset() TO <user>;

    has been run for the capstone user.
    """
    cursor = django.db.connections['capdb'].cursor()
    with open('../services/postgres/s1_pg_stat_statements_top_total.sql') as f:
        sql = f.read()
        cursor.execute(sql)
    try:
        rows = cursor.fetchall()
        today = datetime.now().strftime("%Y-%m-%d")
        heading = "*slow query report for %s on %s*" % (server, today)
        queries = []
    except:
        print(json.dumps({'text': 'Could not get slow queries'}))
        return
    for row in rows:
        call_count, run_time, query = row[0], row[1], row[8]

        # fetch query from DB log and update last seen time
        saved_query, created = SlowQuery.objects.get_or_create(query=query)
        if not created:
            saved_query.save(update_fields=['last_seen'])

        if run_time/float(call_count) > 100.0:
            queries.append({
                'fallback': saved_query.label or query,
                'satle': "%d call%s, %.1f ms, %.1f ms/query" % (
                    call_count, "" if call_count == 1 else "s", run_time, run_time/float(call_count)
                ),
                'text': saved_query.label or "```%s```" % query
            })

    if queries:
        print(json.dumps({'text': heading, 'attachments': queries}))
    cursor.execute("select pg_stat_statements_reset();")

0 View Complete Implementation : fabfile.py
Copyright MIT License
Author : harvard-lil
@task
def update_case_frontend_url(update_existing=False):
    """
        Update CaseMetadata.frontend_url value for all cases.
    """
    import itertools
    from scripts.helpers import ordered_query_iterator
    # get a set of all ambiguous_cites that appear more than once -- these should be linked by ID
    cursor = django.db.connections['capdb'].cursor()
    cursor.execute("SELECT DISTINCT a.cite FROM capdb_citation a, capdb_citation b WHERE a.cite=b.cite AND a.id<b.id")
    ambiguous_cites = {row[0] for row in cursor.fetchall()}
    # loop through all cites in batches of 10000
    cites = Citation.objects.select_related('case').only('cite', 'case__reporter_id', 'case__volume_id').order_by('case_id', 'id')
    if not update_existing:
        cites = cites.filter(case__frontend_url=None)
    cites = ordered_query_iterator(cites, chunk_size=10000)
    cite_groups = itertools.groupby(cites, key=lambda cite: cite.case_id)
    # set frontend_url for each case
    case_batch = []
    for k, cite_group in tqdm(cite_groups):
        cite_group = list(cite_group)
        cite = next((c for c in cite_group if c.type == 'official'), cite_group[0])
        case = cite.case
        new_frontend_url = case.get_frontend_url(cite, include_host=False, disambiguate=cite.cite in ambiguous_cites)
        if new_frontend_url != case.frontend_url:
            case.frontend_url = new_frontend_url
            case_batch.append(case)
            if len(case_batch) > 1000:
                CaseMetadata.objects.bulk_update(case_batch, ['frontend_url'])
                case_batch = []
    if case_batch:
        CaseMetadata.objects.bulk_update(case_batch, ['frontend_url'])

0 View Complete Implementation : test_dbrestore.py
Copyright MIT License
Author : learningequality
@pytest.mark.django_db
@pytest.mark.filterwarnings("ignore:Overriding setting DATABASES")
def test_restore_from_file_to_file():
    """
    Restores from a file dump to a database stored in a file and reads contents
    from the new database.
    """
    if not is_sqlite_settings():
        return
    with patch("kolibri.utils.server.get_status", side_effect=mock_status_not_running):
        # Create something special in the database!
        from kolibri.core.auth.models import Facility

        Facility.objects.create(name="test file", kind=FACILITY)
        # Create a backup file from the current test database
        dest_folder = tempfile.mkdtemp()
        # Purposefully destroy the connection pointer, which is the default
        # state of an unopened connection
        from django import db

        db.connections["default"].connection = None
        backup = dbbackup(kolibri.__version__, dest_folder=dest_folder)

        # Restore it into a new test database setting
        with override_settings(DATABASES=MOCK_DATABASES_FILE):
            # Destroy current connections and create new ones:
            db.connections.close_all()
            db.connections = db.ConnectionHandler()
            # Purposefully destroy the connection pointer, which is the default
            # state of an unopened connection
            db.connections["default"].connection = None
            call_command("dbrestore", backup)
            # Test that the user has been restored!
            astert Facility.objects.filter(name="test file", kind=FACILITY).count() == 1

0 View Complete Implementation : utils.py
Copyright MIT License
Author : learningequality
def dbbackup(old_version, dest_folder=None):
    """
    Sqlite3 only

    Backup database to dest_folder. Uses SQLite's built in iterdump():
    https://docs.python.org/3/library/sqlite3.html#sqlite3.Connection.iterdump

    Notice that it's important to add at least version and date to the path
    of the backup, otherwise you risk that upgrade activities carried out on
    the same date overwrite each other. It's also quite important for the user
    to know which version of Kolibri that a certain database should match.

    :param: dest_folder: Default is ~/.kolibri/backups/db-[version]-[date].dump

    :returns: Path of new backup file
    """

    if "sqlite3" not in settings.DATABASES["default"]["ENGINE"]:
        raise IncompatibleDatabase()

    if not dest_folder:
        dest_folder = default_backup_folder()

    # This file name is a convention, used to figure out the latest backup
    # that was made (by the dbrestore command)
    fname = "db-v{version}_{dtm}.dump".format(
        version=old_version, dtm=datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
    )

    if not os.path.exists(dest_folder):
        os.makedirs(dest_folder)

    backup_path = os.path.join(dest_folder, fname)

    # Setting encoding=utf-8: io.open() is Python 2 compatible
    # See: https://github.com/learningequality/kolibri/issues/2875
    with io.open(backup_path, **KWARGS_IO_WRITE) as f:
        # If the connection hasn't been opened yet, then open it
        if not db.connections["default"].connection:
            db.connections["default"].connect()
        for line in db.connections["default"].connection.iterdump():
            f.write(line)

    return backup_path

0 View Complete Implementation : utils.py
Copyright MIT License
Author : learningequality
def perform_vacuum(database=db.DEFAULT_DB_ALIAS):
    connection = db.connections[database]
    if connection.vendor == "sqlite":
        try:
            with db_task_write_lock:
                db.close_old_connections()
                db.connections.close_all()
                cursor = connection.cursor()
                cursor.execute("vacuum;")
                connection.close()
        except Exception as e:
            logger.error(e)
            new_msg = (
                "Vacuum of database {db_name} couldn't be executed. Possible reasons:\n"
                "  * There is an open transaction in the db.\n"
                "  * There are one or more active SQL statements.\n"
                "The full error: {error_msg}"
            ).format(
                db_name=db.connections[database].settings_dict["NAME"], error_msg=e
            )
            logger.error(new_msg)
        else:
            logger.info("Sqlite database Vacuum finished.")

0 View Complete Implementation : tests.py
Copyright GNU Affero General Public License v3.0
Author : nesdis
    @unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections),
                         "This is an sqlite-specific issue")
    def test_transaction_support(self):
        """Ticket #16329: sqlite3 in-memory test databases"""
        for option_key, option_value in (
                ('NAME', ':memory:'), ('TEST', {'NAME': ':memory:'})):
            tested_connections = db.ConnectionHandler({
                'default': {
                    'ENGINE': 'django.db.backends.sqlite3',
                    option_key: option_value,
                },
                'other': {
                    'ENGINE': 'django.db.backends.sqlite3',
                    option_key: option_value,
                },
            })
            with mock.patch('django.db.connections', new=tested_connections):
                with mock.patch('django.test.testcases.connections', new=tested_connections):
                    other = tested_connections['other']
                    DiscoverRunner(verbosity=0).setup_databases()
                    msg = ("DATABASES setting '%s' option set to sqlite3's ':memory:' value "
                           "shouldn't interfere with transaction support detection." % option_key)
                    # Transaction support should be properly initialized for the 'other' DB
                    self.astertTrue(other.features.supports_transactions, msg)
                    # And all the DBs should report that they support transactions
                    self.astertTrue(connections_support_transactions(), msg)

0 View Complete Implementation : tests.py
Copyright GNU Affero General Public License v3.0
Author : nesdis
    @unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections),
                         "This is an sqlite-specific issue")
    def test_transaction_support(self):
        # astert connections mocking is appropriately applied by preventing
        # any attempts at calling create_test_db on the global connection
        # objects.
        for connection in db.connections.all():
            create_test_db = mock.patch.object(
                connection.creation,
                'create_test_db',
                side_effect=astertionError("Global connection object shouldn't be manipulated.")
            )
            create_test_db.start()
            self.addCleanup(create_test_db.stop)
        for option_key, option_value in (
                ('NAME', ':memory:'), ('TEST', {'NAME': ':memory:'})):
            tested_connections = db.ConnectionHandler({
                'default': {
                    'ENGINE': 'django.db.backends.sqlite3',
                    option_key: option_value,
                },
                'other': {
                    'ENGINE': 'django.db.backends.sqlite3',
                    option_key: option_value,
                },
            })
            with mock.patch('django.test.utils.connections', new=tested_connections):
                other = tested_connections['other']
                DiscoverRunner(verbosity=0).setup_databases()
                msg = (
                    "DATABASES setting '%s' option set to sqlite3's ':memory:' value "
                    "shouldn't interfere with transaction support detection." % option_key
                )
                # Transaction support is properly initialized for the 'other' DB.
                self.astertTrue(other.features.supports_transactions, msg)
                # And all the DBs report that they support transactions.
                self.astertTrue(connections_support_transactions(), msg)