本文整理汇总了Python中r2.lib.normalized_hot.normalized_hot函数的典型用法代码示例。如果您正苦于以下问题:Python normalized_hot函数的具体用法?Python normalized_hot怎么用?Python normalized_hot使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了normalized_hot函数的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: query
def query(self):
if isinstance(c.site, DefaultSR):
if c.user_is_loggedin:
srlimit = Subreddit.DEFAULT_LIMIT
over18 = c.user.has_subscribed and c.over18
else:
srlimit = g.num_default_reddits
over18 = False
sr_ids = Subreddit.user_subreddits(c.user,
limit=srlimit,
over18=over18)
return normalized_hot(sr_ids)
elif isinstance(c.site, MultiReddit):
return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
else:
if c.site.sticky_fullname:
link_list = [c.site.sticky_fullname]
wrapped = wrap_links(link_list,
wrapper=self.builder_wrapper,
keep_fn=self.keep_fn(),
skip=True)
# add all other items and decrement count if sticky is visible
if wrapped.things:
link_list += [l for l in c.site.get_links('hot', 'all')
if l != c.site.sticky_fullname]
if not self.after:
self.count -= 1
self.num += 1
return link_list
# no sticky or sticky hidden
return c.site.get_links('hot', 'all')
开发者ID:aburan28,项目名称:reddit,代码行数:34,代码来源:listingcontroller.py
示例2: query
def query(self):
#no need to worry when working from the cache
if g.use_query_cache or isinstance(c.site, DefaultSR):
self.fix_listing = False
if isinstance(c.site, DefaultSR):
if c.user_is_loggedin:
srlimit = Subreddit.DEFAULT_LIMIT
over18 = c.user.has_subscribed and c.over18
else:
srlimit = g.num_default_reddits
over18 = False
sr_ids = Subreddit.user_subreddits(c.user,
limit=srlimit,
over18=over18)
return normalized_hot(sr_ids)
elif isinstance(c.site, MultiReddit):
return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
#if not using the query_cache we still want cached front pages
elif (not g.use_query_cache
and not isinstance(c.site, FakeSubreddit)
and self.after is None
and self.count == 0):
return get_hot([c.site])
else:
return c.site.get_links('hot', 'all')
开发者ID:LDot,项目名称:reddit,代码行数:29,代码来源:listingcontroller.py
示例3: find_preview_links
def find_preview_links(sr):
from r2.lib.normalized_hot import normalized_hot
# try to find a link to use, otherwise give up and return
links = normalized_hot([sr._id])
if not links:
links = normalized_hot(Subreddit.default_subreddits())
if links:
links = links[:25]
links = Link._by_fullname(links, data=True, return_dict=False)
return links
开发者ID:tolgaek,项目名称:reddit,代码行数:13,代码来源:cssfilter.py
示例4: query
def query(self):
user = c.user if c.user_is_loggedin else None
srs = Subreddit._byID(Subreddit.user_subreddits(user),
data = True,
return_dict = False)
links = normalized_hot(srs)
return links
开发者ID:cmak,项目名称:reddit,代码行数:7,代码来源:listingcontroller.py
示例5: get_hot_items
def get_hot_items(srs, item_type, src):
"""Get hot links from specified srs."""
hot_srs = {sr._id: sr for sr in srs} # for looking up sr by id
hot_link_fullnames = normalized_hot(sr._id for sr in srs)
hot_links = Link._by_fullname(hot_link_fullnames, return_dict=False)
hot_items = []
for l in hot_links:
hot_items.append(ExploreItem(item_type, src, hot_srs[l.sr_id], l))
return hot_items
开发者ID:0xcd03,项目名称:reddit,代码行数:9,代码来源:recommender.py
示例6: query
def query(self):
# no need to worry when working from the cache
# TODO: just remove this then since we're always using the query cache
self.fix_listing = False
if isinstance(c.site, DefaultSR):
if c.user_is_loggedin:
srlimit = Subreddit.DEFAULT_LIMIT
over18 = c.user.has_subscribed and c.over18
else:
srlimit = g.num_default_reddits
over18 = False
sr_ids = Subreddit.user_subreddits(c.user, limit=srlimit, over18=over18)
return normalized_hot(sr_ids)
elif isinstance(c.site, MultiReddit):
return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
else:
return c.site.get_links("hot", "all")
开发者ID:qq40660,项目名称:MyReddit,代码行数:20,代码来源:listingcontroller.py
示例7: query
def query(self):
if isinstance(c.site, DefaultSR):
sr_ids = Subreddit.user_subreddits(c.user)
return normalized_hot(sr_ids)
elif isinstance(c.site, MultiReddit):
return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
else:
if c.site.sticky_fullname:
link_list = [c.site.sticky_fullname]
wrapped = wrap_links(link_list, wrapper=self.builder_wrapper, keep_fn=self.keep_fn(), skip=True)
# add all other items and decrement count if sticky is visible
if wrapped.things:
link_list += [l for l in c.site.get_links("hot", "all") if l != c.site.sticky_fullname]
if not self.after:
self.count -= 1
self.num += 1
return link_list
# no sticky or sticky hidden
return c.site.get_links("hot", "all")
开发者ID:kcchristianson,项目名称:reddit,代码行数:21,代码来源:listingcontroller.py
示例8: query
def query(self):
# no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
self.fix_listing = False
if c.site == Default:
sr_ids = Subdigg.user_subreddits(c.user)
return normalized_hot(sr_ids)
# if not using the query_cache we still want cached front pages
elif not g.use_query_cache and not isinstance(c.site, FakeSubdigg) and self.after is None and self.count == 0:
return [l._fullname for l in get_hot(c.site)]
else:
return c.site.get_links("hot", "all")
开发者ID:kevinrose,项目名称:diggit,代码行数:13,代码来源:listingcontroller.py
示例9: get_comment_items
def get_comment_items(srs, src, count=4):
"""Get hot links from srs, plus top comment from each link."""
link_fullnames = normalized_hot([sr._id for sr in srs])
hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
top_comments = []
for link in hot_links:
builder = CommentBuilder(
link, operators.desc("_confidence"), comment=None, context=None, num=1, load_more=False
)
listing = NestedListing(builder, parent_name=link._fullname).listing()
top_comments.extend(listing.things)
srs = Subreddit._byID([com.sr_id for com in top_comments])
links = Link._byID([com.link_id for com in top_comments])
comment_items = [ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com) for com in top_comments]
return comment_items
开发者ID:Shilohtd,项目名称:reddit,代码行数:15,代码来源:recommender.py
示例10: query
def query(self):
#no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
self.fix_listing = False
if c.site == Default:
sr_ids = Subreddit.user_subreddits(c.user)
return normalized_hot(sr_ids)
#if not using the query_cache we still want cached front pages
elif (not g.use_query_cache
and not isinstance(c.site, FakeSubreddit)
and self.after is None
and self.count == 0):
return get_hot([c.site], only_fullnames = True)[0]
else:
return c.site.get_links('hot', 'all')
开发者ID:szimpatikus,项目名称:szimpatikus.hu,代码行数:16,代码来源:listingcontroller.py
示例11: query
def query(self):
#no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
self.fix_listing = False
if c.site == Default:
sr_ids = Subreddit.user_subreddits(c.user,
limit=(Subreddit.sr_limit
if c.user_is_loggedin
else g.num_default_reddits))
return normalized_hot(sr_ids)
#if not using the query_cache we still want cached front pages
elif (not g.use_query_cache
and not isinstance(c.site, FakeSubreddit)
and self.after is None
and self.count == 0):
return get_hot([c.site])
else:
return c.site.get_links('hot', 'all')
开发者ID:codetripping,项目名称:reddit,代码行数:19,代码来源:listingcontroller.py
示例12: cached_organic_links
def cached_organic_links(*sr_ids):
sr_count = count.get_link_counts()
#only use links from reddits that you're subscribed to
link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
link_names.sort(key = lambda n: sr_count[n][0])
if not link_names and g.debug:
q = All.get_links('new', 'all')
q._limit = 100 # this decomposes to a _query
link_names = [x._fullname for x in q if x.promoted is None]
g.log.debug('Used inorganic links')
#potentially add an up and coming link
if random.choice((True, False)) and sr_ids:
sr_id = random.choice(sr_ids)
fnames = normalized_hot([sr_id])
if fnames:
if len(fnames) == 1:
new_item = fnames[0]
else:
new_item = random.choice(fnames[1:4])
link_names.insert(0, new_item)
return link_names
开发者ID:AD42,项目名称:reddit,代码行数:24,代码来源:organic.py
注:本文中的r2.lib.normalized_hot.normalized_hot函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论