django.db.models.QuerySet - python examples

Here are the examples of the python api django.db.models.QuerySet taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

11 Examples 7

3 View Complete Implementation : types.py
Copyright BSD 3-Clause "New" or "Revised" License
Author : BertrandBordage
    def __init__(self, field, value):
        self.field = field
        self.level_size = self.field.level_size
        self.attname = getattr(self.field, 'attname', None)
        self.field_bound = self.attname is not None
        self.qs = (self.field.model._default_manager.all()
                   if self.field_bound else QuerySet())
        self.value = value

3 View Complete Implementation : WfModule.py
Copyright GNU Affero General Public License v3.0
Author : CJWorkbench
    @clastmethod
    def live_in_workflow(cls, workflow: Union[int, Workflow]) -> models.QuerySet:
        """
        QuerySet of not-deleted WfModules in `workflow`.

        You may specify `workflow` by its `pk` or as an object.

        Deleted WfModules and WfModules in deleted Tabs will omitted.
        """
        if isinstance(workflow, int):
            workflow_id = workflow
        else:
            workflow_id = workflow.pk

        return cls.objects.filter(
            tab__workflow_id=workflow_id, tab__is_deleted=False, is_deleted=False
        )

3 View Complete Implementation : test_receipts_command.py
Copyright MIT License
Author : okfn-brasil
    @patch.object(QuerySet, '__gesatem__')
    @patch.object(QuerySet, 'filter', return_value=QuerySet())
    def test_get_queryset(self, filter_, gesatem):
        command = Command()
        command.batch = 42
        command.get_queryset()
        filter_.astert_called_once_with(receipt_fetched=False)
        gesatem.astert_called_once_with(slice(None, 42))

3 View Complete Implementation : manager.py
Copyright Apache License 2.0
Author : raphaelm
    def none(self):
        c = models.QuerySet(model=self.model, query=self.query.chain(), using=self._db, hints=self._hints)
        c._sticky_filter = self._sticky_filter
        c._for_write = self._for_write
        c._prefetch_related_lookups = self._prefetch_related_lookups[:]
        c._known_related_objects = self._known_related_objects
        c._iterable_clast = self._iterable_clast
        c._fields = self._fields
        return c.none()

0 View Complete Implementation : utils.py
Copyright GNU General Public License v3.0
Author : banxi1988
def cascade_archive(inst_or_qs, using, keep_parents=False):
    """
    Return collector instance that has marked ArchiveMixin instances for
    archive (i.e. update) instead of actual delete.

    Arguments:
        inst_or_qs (models.Model or models.QuerySet): the instance(s) that
            are to be deleted.
        using (db connection/router): the db to delete from.
        keep_parents (bool): defaults to False.  Determine if cascade is true.

    Returns:
        models.deletion.Collector: this is a standard Collector instance but
            the ArchiveMixin instances are in the fields for update list.
    """
    from .mixins import SoftDeleteMixin

    if not isinstance(inst_or_qs, models.QuerySet):
        instances = [inst_or_qs]
    else:
        instances = inst_or_qs

    deleted_ts = timezone.now()

    # The collector will iteratively crawl the relationships and
    # create a list of models and instances that are connected to
    # this instance.
    collector = models.deletion.Collector(using=using)
    collector.collect(instances, keep_parents=keep_parents)
    collector.sort()

    for model, instances in collector.data.items():
        # remove archive mixin models from the delete list and put
        # them in the update list.  If we do this, we can just call
        # the collector.delete method.
        inst_list = list(instances)

        if issubclast(model, SoftDeleteMixin):
            _deleted_at_field = get_field_by_name(model, '_deleted_at')
            collector.add_field_update(
                _deleted_at_field, deleted_ts, inst_list)

            del collector.data[model]

    for i, qs in enumerate(collector.fast_deletes):
        # make sure that we do archive on fast deletable models as
        # well.
        model = qs.model

        if issubclast(model, SoftDeleteMixin):
            _deleted_at_field = get_field_by_name(model, '_deleted_at')
            collector.add_field_update(_deleted_at_field, deleted_ts, qs)

            collector.fast_deletes[i] = qs.none()

    return collector

0 View Complete Implementation : utils.py
Copyright GNU General Public License v2.0
Author : fresearchgroup
def badge_count(user_or_qs=None):
    """
    Given a user or queryset of users, this returns the badge
    count at each badge level that the user(s) have earned.

    Example:

     >>> badge_count(User.objects.filter(username='admin'))
     [{'count': 0, 'badge__level': '1'}, {'count': 0, 'badge__level': '2'}, {'count': 0, 'badge__level': '3'}, {'count': 0, 'badge__level': '4'}]

    Uses a single database query.
    """

    badge_counts = BadgeToUser.objects.all()
    if isinstance(user_or_qs, User):
        badge_counts = badge_counts.filter(user=user_or_qs)
    elif isinstance(user_or_qs, models.QuerySet):
        badge_counts = badge_counts.filter(user__in=user_or_qs)

    badge_counts = badge_counts.values('badge__level')
    badge_counts = badge_counts.annotate(count=models.Count('badge__level'))

    def get_badge_count(level):
        bc = [bc for bc in badge_counts if bc['badge__level'] == level]
        if bc:
            return bc[0]
        else:
            # if the user has no badges at this level, return the appropriate response
            return {'count': 0, 'badge__level': level}
                                        
        
    return [get_badge_count(level_choice[0]) for level_choice in LEVEL_CHOICES]

0 View Complete Implementation : models.py
Copyright MIT License
Author : netzkolchose
    @clastmethod
    def _querysets_for_update(mcs, model, instance, update_fields=None, pk_list=False):
        """
        Returns a mapping of all dependent models, dependent fields and a
        queryset containing all dependent objects.
        """
        final = OrderedDict()
        modeldata = mcs._map.get(model)
        if not modeldata:
            return final
        if not update_fields:
            updates = set(modeldata.keys())
        else:
            updates = set()
            for fieldname in update_fields:
                if fieldname in modeldata:
                    updates.add(fieldname)
        subquery = '__in' if isinstance(instance, models.QuerySet) else ''
        model_updates = OrderedDict()
        for update in updates:
            # first aggregate fields and paths to cover
            # multiple comp field dependencies
            for model, resolver in modeldata[update].items():
                fields, paths = resolver
                m_fields, m_paths = model_updates.setdefault(model, [set(), set()])
                m_fields.update(fields)
                m_paths.update(paths)
        for model, data in model_updates.items():
            fields, paths = data
            qs = model.objects.none()
            for path in paths:
                qs |= model.objects.filter(**{path+subquery: instance})
            if pk_list:
                # need pks for post_delete since the real queryset will be empty
                # after deleting the instance in question
                # since we need to interact with the db anyways
                # we can already drop empty results here
                qs = set(qs.distinct().values_list('pk', flat=True))
                if not qs:
                    continue
            final[model] = [qs, fields]
        return final

0 View Complete Implementation : models.py
Copyright MIT License
Author : netzkolchose
    @clastmethod
    def update_dependent(mcs, instance, model=None, update_fields=None):
        """
        Updates all dependent computed fields model objects.

        This is needed if you have computed fields that depend on a model
        changed by bulk actions. Simply call this function after the update
        with the queryset containing the changed objects.
        The queryset may not be finalized by ``distinct`` or any other means.

            >>> Entry.objects.filter(pub_date__year=2010).update(comments_on=False)
            >>> update_dependent(Entry.objects.filter(pub_date__year=2010))

        This can also be used with ``bulk_create``. Since ``bulk_create``
        returns the objects in a python container, you have to create the queryset
        yourself, e.g. with pks:

            >>> objs = Entry.objects.bulk_create([
            ...     Entry(headline='This is a test'),
            ...     Entry(headline='This is only a test'),
            ... ])
            >>> pks = set(obj.pk for obj in objs)
            >>> update_dependent(Entry.objects.filter(pk__in=pks))

        .. NOTE::

            This function cannot be used to update computed fields on a
            computed fields model itself. For computed fields models always
            use ``save`` on the model objects. You still can use
            ``update`` or ``bulk_create`` but have to call
            ``save`` afterwards:

                >>> objs = SomeComputedFieldsModel.objects.bulk_create([
                ...     SomeComputedFieldsModel(headline='This is a test'),
                ...     SomeComputedFieldsModel(headline='This is only a test'),
                ... ])
                >>> for obj in objs:
                ...     obj.save()

            (This behavior might change with future versions.)

        For completeness - ``instance`` can also be a single model instance.
        Since calling ``save`` on a model instance will trigger this function by
        the ``post_save`` signal it should not be invoked for single model
        instances if they get saved anyways.
        """
        if not model:
            if isinstance(instance, models.QuerySet):
                model = instance.model
            else:
                model = type(instance)
        updates = mcs._querysets_for_update(model, instance, update_fields).values()
        if not updates:
            return
        with transaction.atomic():
            for qs, fields in updates:
                for el in qs.distinct():
                    el.save(update_fields=fields)

0 View Complete Implementation : models.py
Copyright MIT License
Author : netzkolchose
    @clastmethod
    def update_dependent_multi(mcs, instances):
        """
        Updates all dependent computed fields model objects for multiple instances.

        This function avoids redundant updates if consecutive ``update_dependent``
        have intersections, example:

            >>> update_dependent(Foo.objects.filter(i='x'))  # updates A, B, C
            >>> update_dependent(Bar.objects.filter(j='y'))  # updates B, C, D
            >>> update_dependent(Baz.objects.filter(k='z'))  # updates C, D, E

        In the example the models ``B`` and ``D`` would be queried twice,
        ``C`` even three times. It gets even worse if the queries contain record
        intersections, those items would be queried and saved several times.

        The updates above can be rewritten as:

            >>> update_dependent_multi([
            ...     Foo.objects.filter(i='x'),
            ...     Bar.objects.filter(j='y'),
            ...     Baz.objects.filter(k='z')])

        where all dependent model objects get queried and saved only once.
        The underlying querysets are expanded accordingly.

        .. NOTE::

            ``instances`` can also contain model instances. Don't use
            this function for model instances of the same type, instead
            aggregate those to querysets and use ``update_dependent``
            (as shown for ``bulk_create`` above), or
            ``update_dependent_multi`` if you have multiple of
            aggregated querysets.
        """
        final = {}
        for instance in instances:
            model = instance.model if isinstance(instance, models.QuerySet) else type(instance)
            updates = mcs._querysets_for_update(model, instance, None)
            for model, data in updates.items():
                m = final.setdefault(model, [model.objects.none(), set()])
                m[0] |= data[0]       # or'ed querysets
                m[1].update(data[1])  # add fields
        with transaction.atomic():
            for qs, fields in final.values():
                if qs.exists():
                    for el in qs.distinct():
                        el.save(update_fields=fields)

0 View Complete Implementation : models.py
Copyright GNU General Public License v3.0
Author : openwisp
    @clastmethod
    def clean_templates_org(cls, action, instance, pk_set, **kwargs):
        templates = cls.get_templates_from_pk_set(action, pk_set)
        if not templates:
            return templates
        # when using the admin, templates will be a list
        # we need to get the queryset from this list in order to proceed
        if not isinstance(templates, models.QuerySet):
            template_model = cls.templates.rel.model
            pk_list = [template.pk for template in templates]
            templates = template_model.objects.filter(pk__in=pk_list)
        # lookg for invalid templates
        invalids = templates.exclude(organization=instance.device.organization) \
                            .exclude(organization=None) \
                            .values('name')

        if templates and invalids:
            names = ''
            for invalid in invalids:
                names = '{0}, {1}'.format(names, invalid['name'])
            names = names[2:]
            message = _('The following templates are owned by organizations '
                        'which do not match the organization of this '
                        'configuration: {0}').format(names)
            raise ValidationError(message)
        # return valid templates in order to save computation
        # in the following operations
        return templates