• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python pickle.dumps函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中sentry.utils.compat.pickle.dumps函数的典型用法代码示例。如果您正苦于以下问题:Python dumps函数的具体用法?Python dumps怎么用?Python dumps使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了dumps函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: incr

    def incr(self, model, columns, filters, extra=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
        - Add hashmap key to pending flushes
        """
        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis shard)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, 'm', '%s.%s' % (model.__module__, model.__name__))
        pipe.hsetnx(key, 'f', pickle.dumps(filters))
        for column, amount in columns.iteritems():
            pipe.hincrby(key, 'i+' + column, amount)

        if extra:
            for column, value in extra.iteritems():
                pipe.hset(key, 'e+' + column, pickle.dumps(value))
        pipe.expire(key, self.key_expire)
        pipe.zadd(self.pending_key, time(), key)
        pipe.execute()
开发者ID:daevaorn,项目名称:sentry,代码行数:28,代码来源:redis.py


示例2: get_prep_value

 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     # enforce unicode strings to guarantee consistency
     if isinstance(value, str):
         value = unicode(value)
     return compress(pickle.dumps(value))
开发者ID:CaseCommonsDevOps,项目名称:sentry,代码行数:8,代码来源:gzippeddict.py


示例3: get_prep_value

 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     # enforce six.text_type strings to guarantee consistency
     if isinstance(value, six.binary_type):
         value = six.text_type(value)
     # db values need to be in unicode
     return compress(pickle.dumps(value))
开发者ID:ForkRepo,项目名称:sentry,代码行数:9,代码来源:gzippeddict.py


示例4: test_process_saves_extra

 def test_process_saves_extra(self):
     group = Group.objects.create(project=Project(id=1))
     columns = {'times_seen': 1}
     filters = {'pk': group.pk}
     the_date = datetime.now() + timedelta(days=5)
     self.buf.conn.set('foo', 1)
     self.buf.conn.hset('extra', 'last_seen', pickle.dumps(the_date))
     self.buf.process(Group, columns, filters)
     group_ = Group.objects.get(pk=group.pk)
     self.assertEquals(group_.last_seen, the_date)
开发者ID:clvrobj,项目名称:sentry,代码行数:10,代码来源:tests.py


示例5: test_handles_gettext_lazy

    def test_handles_gettext_lazy(self):
        def fake_gettext(to_translate):
            return u'Igpay Atinlay'

        fake_gettext_lazy = lazy(fake_gettext, str)

        self.assertEquals(
            pickle.loads(pickle.dumps(
                    transform(fake_gettext_lazy("something")))),
            u'Igpay Atinlay')
开发者ID:olarcheveque,项目名称:django-sentry,代码行数:10,代码来源:tests.py


示例6: incr

    def incr(self, model, columns, filters, extra=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
        - Add hashmap key to pending flushes
        """
        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        pending_key = self._make_pending_key_from_key(key)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis partition)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, 'm', '%s.%s' % (model.__module__, model.__name__))
        # TODO(dcramer): once this goes live in production, we can kill the pickle path
        # (this is to ensure a zero downtime deploy where we can transition event processing)
        pipe.hsetnx(key, 'f', pickle.dumps(filters))
        # pipe.hsetnx(key, 'f', json.dumps(self._dump_values(filters)))
        for column, amount in six.iteritems(columns):
            pipe.hincrby(key, 'i+' + column, amount)

        if extra:
            # Group tries to serialize 'score', so we'd need some kind of processing
            # hook here
            # e.g. "update score if last_seen or times_seen is changed"
            for column, value in six.iteritems(extra):
                # TODO(dcramer): once this goes live in production, we can kill the pickle path
                # (this is to ensure a zero downtime deploy where we can transition event processing)
                pipe.hset(key, 'e+' + column, pickle.dumps(value))
                # pipe.hset(key, 'e+' + column, json.dumps(self._dump_value(value)))
        pipe.expire(key, self.key_expire)
        pipe.zadd(pending_key, time(), key)
        pipe.execute()

        metrics.incr('buffer.incr', skip_internal=True, tags={
            'module': model.__module__,
            'model': model.__name__,
        })
开发者ID:alexandrul,项目名称:sentry,代码行数:43,代码来源:redis.py


示例7: test_event_node_id

    def test_event_node_id(self):
        # Create an event without specifying node_id. A node_id should be generated
        e1 = Event(project_id=1, event_id='abc', data={'foo': 'bar'})
        e1.save()
        e1_node_id = e1.data.id
        assert e1.data.id is not None, "We should have generated a node_id for this event"
        e1_body = nodestore.get(e1_node_id)
        assert e1_body == {'foo': 'bar'}, "The event body should be in nodestore"

        e1 = Event.objects.get(project_id=1, event_id='abc')
        assert e1.data.data == {'foo': 'bar'}, "The event body should be loaded from nodestore"
        assert e1.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create another event that references the same nodestore object as the first event.
        e2 = Event(project_id=1, event_id='def', data={'node_id': e1_node_id})
        assert e2.data.id == e1_node_id, "The event should use the provided node_id"
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {'foo': 'bar'}, "The event body should be in nodestore already"
        e2.save()
        e2_body = nodestore.get(e1_node_id)
        assert e2_body == {'foo': 'bar'}, "The event body should not be overwritten by save"

        e2 = Event.objects.get(project_id=1, event_id='def')
        assert e2.data.data == {'foo': 'bar'}, "The event body should be loaded from nodestore"
        assert e2.data.id == e1_node_id, "The event's node_id should be the same after load"

        # Create an event with a new event body that specifies the node_id to use.
        e3 = Event(project_id=1, event_id='ghi', data={'baz': 'quux', 'node_id': '1:ghi'})
        assert e3.data.id == '1:ghi', "Event should have the specified node_id"
        assert e3.data.data == {'baz': 'quux'}, "Event body should be the one provided (sans node_id)"
        e3.save()
        e3_body = nodestore.get('1:ghi')
        assert e3_body == {'baz': 'quux'}, "Event body should be saved to nodestore"

        e3 = Event.objects.get(project_id=1, event_id='ghi')
        assert e3.data.data == {'baz': 'quux'}, "Event body should be loaded from nodestore"
        assert e3.data.id == '1:ghi', "Loaded event should have the correct node_id"

        # Try load it again, but using the pickled/compressed string we would expect to find
        # in the column
        e3_pickled_id = compress(pickle.dumps({'node_id': '1:ghi'}))
        e3 = Event(project_id=1, event_id='jkl', data=e3_pickled_id)
        assert e3.data.data == {'baz': 'quux'}, "Event body should be loaded from nodestore"

        # Event with no data should not be saved (or loaded) from nodestore
        e4 = Event(project_id=1, event_id='mno', data=None)
        e4.save()
        assert nodestore.get('1:mno') is None, "We should not have saved anything to nodestore"
        e4 = Event.objects.get(project_id=1, event_id='mno')
        assert e4.data.id is None
        assert e4.data.data == {}  # NodeData returns {} by default
        Event.objects.bind_nodes([e4], 'data')
        assert e4.data.id is None
        assert e4.data.data == {}
开发者ID:Kayle009,项目名称:sentry,代码行数:54,代码来源:tests.py


示例8: incr

    def incr(self, model, columns, filters, extra=None):
        for column, amount in columns.iteritems():
            key = self._make_key(model, filters, column)
            call_args = (key, str(amount), str(self.key_expire))
            self._tnt.call('box.sentry_buffer.incr', call_args)

        if extra:
            key = self._make_extra_key(model, filters)
            for column, value in extra.iteritems():
                call_args = (key, column, pickle.dumps(value),
                             str(self.key_expire))
                self._tnt.call('box.sentry_buffer.hset', call_args)
        super(TarantoolBuffer, self).incr(model, columns, filters, extra)
开发者ID:toidi,项目名称:tarantool-python-utils,代码行数:13,代码来源:sentry.py


示例9: get_prep_value

    def get_prep_value(self, value):
        if not value and self.null:
            # save ourselves some storage
            return None

        # TODO(dcramer): we should probably do this more intelligently
        # and manually
        if not value.id:
            value.id = nodestore.create(value.data)
        else:
            nodestore.set(value.id, value.data)

        return compress(pickle.dumps({'node_id': value.id}))
开发者ID:NuttasitBoonwat,项目名称:sentry,代码行数:13,代码来源:node.py


示例10: incr

    def incr(self, model, columns, filters, extra=None):
        with self.conn.map() as conn:
            for column, amount in columns.iteritems():
                key = self._make_key(model, filters, column)
                conn.incr(key, amount)
                conn.expire(key, self.key_expire)

            # Store extra in a hashmap so it can easily be removed
            if extra:
                key = self._make_extra_key(model, filters)
                for column, value in extra.iteritems():
                    conn.hset(key, column, pickle.dumps(value))
                    conn.expire(key, self.key_expire)
        super(RedisBuffer, self).incr(model, columns, filters, extra)
开发者ID:DamianZaremba,项目名称:sentry,代码行数:14,代码来源:redis.py


示例11: get_prep_value

    def get_prep_value(self, value):
        """
            Prepares the NodeData to be written in a Model.save() call.

            Makes sure the event body is written to nodestore and
            returns the node_id reference to be written to rowstore.
        """
        if not value and self.null:
            # save ourselves some storage
            return None

        if value.id is None:
            value.id = self.id_func()

        value.save()
        return compress(pickle.dumps({'node_id': value.id}))
开发者ID:getsentry,项目名称:sentry,代码行数:16,代码来源:node.py


示例12: test_does_transition_data_to_node

    def test_does_transition_data_to_node(self):
        group = self.group
        data = {'key': 'value'}

        query_bits = [
            "INSERT INTO sentry_message (group_id, project_id, data, message, datetime)",
            "VALUES(%s, %s, %s, %s, %s)",
        ]
        params = [group.id, group.project_id, compress(pickle.dumps(data)), 'test', timezone.now()]

        # This is pulled from SQLInsertCompiler
        if connection.features.can_return_id_from_insert:
            r_fmt, r_params = connection.ops.return_insert_id()
            if r_fmt:
                query_bits.append(r_fmt % Event._meta.pk.column)
                params += r_params

        cursor = connection.cursor()
        cursor.execute(' '.join(query_bits), params)

        if connection.features.can_return_id_from_insert:
            event_id = connection.ops.fetch_returned_insert_id(cursor)
        else:
            event_id = connection.ops.last_insert_id(
                cursor, Event._meta.db_table, Event._meta.pk.column
            )

        event = Event.objects.get(id=event_id)
        assert type(event.data) == NodeData
        assert event.data == data
        assert event.data.id is None

        event.save()

        assert event.data == data
        assert event.data.id is not None

        node_id = event.data.id
        event = Event.objects.get(id=event_id)

        Event.objects.bind_nodes([event], 'data')

        assert event.data == data
        assert event.data.id == node_id
开发者ID:alexandrul,项目名称:sentry,代码行数:44,代码来源:tests.py


示例13: get_prep_value

    def get_prep_value(self, value):
        if not value and self.null:
            # save ourselves some storage
            return None

        # We can't put our wrappers into the nodestore, so we need to
        # ensure that the data is converted into a plain old dict
        data = value.data
        if isinstance(data, CANONICAL_TYPES):
            data = dict(data.items())

        # TODO(dcramer): we should probably do this more intelligently
        # and manually
        if not value.id:
            value.id = nodestore.create(data)
        else:
            nodestore.set(value.id, data)

        return compress(pickle.dumps({'node_id': value.id}))
开发者ID:alexandrul,项目名称:sentry,代码行数:19,代码来源:node.py


示例14: get_prep_value

 def get_prep_value(self, value):
     if value is None:
         return
     return base64.b64encode(pickle.dumps(value).encode('zlib'))
开发者ID:asavoy,项目名称:sentry,代码行数:4,代码来源:models.py


示例15: get_prep_value

 def get_prep_value(self, value):
     if value is None:
         return
     return base64.b64encode(pickle.dumps(transform(value)).encode("zlib"))
开发者ID:primepix,项目名称:django-sentry,代码行数:4,代码来源:models.py


示例16: test_incr_does_buffer_extra_to_conn

 def test_incr_does_buffer_extra_to_conn(self):
     model = mock.Mock()
     columns = {'times_seen': 1}
     filters = {'pk': 1}
     self.buf.incr(model, columns, filters, extra={'foo': 'bar'})
     self.assertEquals(self.buf.conn.hget('extra', 'foo'), pickle.dumps('bar'))
开发者ID:clvrobj,项目名称:sentry,代码行数:6,代码来源:tests.py


示例17: get_prep_value

 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     return base64.b64encode(pickle.dumps(value).encode('zlib'))
开发者ID:DouweM,项目名称:sentry,代码行数:5,代码来源:models.py


示例18: get_prep_value

 def get_prep_value(self, value):
     if not value and self.null:
         # save ourselves some storage
         return None
     return compress(pickle.dumps(value))
开发者ID:HengeSense,项目名称:sentry,代码行数:5,代码来源:gzippeddict.py


示例19: _postWithKey

 def _postWithKey(self, data, key=None):
     resp = self.client.post(reverse('sentry-api-store'), {
         'data': base64.b64encode(pickle.dumps(data)),
         'key': settings.KEY,
     })
     return resp
开发者ID:Crowdbooster,项目名称:sentry,代码行数:6,代码来源:base.py


示例20: encode

 def encode(self, value):
     return zlib.compress(pickle.dumps(value))
开发者ID:280185386,项目名称:sentry,代码行数:2,代码来源:codecs.py



注:本文中的sentry.utils.compat.pickle.dumps函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python pickle.loads函数代码示例发布时间:2022-05-27
下一篇:
Python cache.set函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap