本文整理汇总了Python中r2.lib.zookeeper.connect_to_zookeeper函数的典型用法代码示例。如果您正苦于以下问题:Python connect_to_zookeeper函数的具体用法?Python connect_to_zookeeper怎么用?Python connect_to_zookeeper使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了connect_to_zookeeper函数的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: main
def main():
parser = ConfigParser.RawConfigParser()
with open(sys.argv[1]) as config_file:
parser.readfp(config_file)
hostname_path = parser.get("DEFAULT", "gold_hostname_file")
zk_connection_string = parser.get("DEFAULT", "zookeeper_connection_string")
zk_username = parser.get("DEFAULT", "zookeeper_username")
zk_password = parser.get("DEFAULT", "zookeeper_password")
client = connect_to_zookeeper(zk_connection_string,
(zk_username, zk_password))
client.add_listener(state_listener)
acl = [client.make_acl(read=True, write=True, create=True, delete=True)]
client.ensure_path(ROOT, acl=acl)
client.ensure_path(LOCK, acl=acl)
try:
acquire_name(client, hostname_path)
finally:
try:
os.unlink(hostname_path)
except OSError:
pass
开发者ID:GodOfConquest,项目名称:reddit-plugin-gold,代码行数:25,代码来源:get-server-name.py
示例2: setup
def setup(self):
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
# initialize caches. Any cache-chains built here must be added
# to cache_chains (closed around by reset_caches) so that they
# can properly reset their local components
localcache_cls = (SelfEmptyingCache if self.running_as_script
else LocalCache)
num_mc_clients = self.num_mc_clients
self.cache_chains = {}
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.throttles = tuple() # immutable since it's not real
self.memcache = CMemcache(self.memcaches, num_clients = num_mc_clients)
self.lock_cache = CMemcache(self.lockcaches, num_clients=num_mc_clients)
self.stats = Stats(self.config.get('statsd_addr'),
self.config.get('statsd_sample_rate'))
event.listens_for(engine.Engine, 'before_cursor_execute')(
self.stats.pg_before_cursor_execute)
event.listens_for(engine.Engine, 'after_cursor_execute')(
self.stats.pg_after_cursor_execute)
self.make_lock = make_lock_factory(self.lock_cache, self.stats)
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
keyspace = "reddit"
self.cassandra_pools = {
"main":
StatsCollectingConnectionPool(
keyspace,
stats=self.stats,
logging_name="main",
server_list=self.cassandra_seeds,
pool_size=self.cassandra_pool_size,
timeout=2,
max_retries=3,
prefill=False
),
}
perma_memcache = (CMemcache(self.permacache_memcaches, num_clients = num_mc_clients)
if self.permacache_memcaches
else None)
self.permacache = CassandraCacheChain(localcache_cls(),
CassandraCache('permacache',
self.cassandra_pools[self.cassandra_default_pool],
read_consistency_level = self.cassandra_rcl,
write_consistency_level = self.cassandra_wcl),
memcache = perma_memcache,
lock_factory = self.make_lock)
self.cache_chains.update(permacache=self.permacache)
# hardcache is done after the db info is loaded, and then the
# chains are reset to use the appropriate initial entries
if self.stalecaches:
self.cache = StaleCacheChain(localcache_cls(),
CMemcache(self.stalecaches, num_clients=num_mc_clients),
self.memcache)
else:
self.cache = MemcacheChain((localcache_cls(), self.memcache))
self.cache_chains.update(cache=self.cache)
self.rendercache = MemcacheChain((localcache_cls(),
CMemcache(self.rendercaches,
noreply=True, no_block=True,
#.........这里部分代码省略.........
开发者ID:Anenome,项目名称:reddit,代码行数:101,代码来源:app_globals.py
示例3: setup
#.........这里部分代码省略.........
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
locale.setlocale(locale.LC_ALL, self.locale)
# Pre-calculate ratelimit values
self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
self.RL_RESET_SECONDS)
self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
self.RL_OAUTH_RESET_SECONDS)
self.startup_timer.intermediate("configuration")
################# ZOOKEEPER
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.secrets = fetch_secrets(self.zookeeper)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.optionxform = str
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.secrets = extract_secrets(parser)
self.throttles = tuple() # immutable since it's not real
self.startup_timer.intermediate("zookeeper")
################# MEMCACHE
num_mc_clients = self.num_mc_clients
# the main memcache pool. used for most everything.
self.memcache = CMemcache(
self.memcaches,
min_compress_len=50 * 1024,
num_clients=num_mc_clients,
)
# a pool just used for @memoize results
开发者ID:Agrajagd,项目名称:reddit,代码行数:67,代码来源:app_globals.py
示例4: setup
#.........这里部分代码省略.........
locale.setlocale(locale.LC_ALL, self.locale)
# Pre-calculate ratelimit values
self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
self.RL_RESET_SECONDS)
self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
self.RL_OAUTH_RESET_SECONDS)
self.RL_LOGIN_MAX_REQS = int(self.config["RL_LOGIN_AVG_PER_SEC"] *
self.RL_RESET_SECONDS)
self.RL_LOGIN_IP_MAX_REQS = int(self.config["RL_LOGIN_IP_AVG_PER_SEC"] *
self.RL_RESET_SECONDS)
self.RL_SHARE_MAX_REQS = int(self.config["RL_SHARE_AVG_PER_SEC"] *
self.RL_RESET_SECONDS)
# Compile ratelimit regexs
user_agent_ratelimit_regexes = {}
for agent_re, limit in self.user_agent_ratelimit_regexes.iteritems():
user_agent_ratelimit_regexes[re.compile(agent_re)] = limit
self.user_agent_ratelimit_regexes = user_agent_ratelimit_regexes
self.startup_timer.intermediate("configuration")
################# ZOOKEEPER
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.secrets = fetch_secrets(self.zookeeper)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
# close our zk connection when the app shuts down
SHUTDOWN_CALLBACKS.append(self.zookeeper.stop)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.optionxform = str
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.secrets = extract_secrets(parser)
self.throttles = tuple() # immutable since it's not real
################# PRIVILEGED USERS
self.admins = PermissionFilteredEmployeeList(
self.live_config, type="admin")
self.sponsors = PermissionFilteredEmployeeList(
self.live_config, type="sponsor")
self.employees = PermissionFilteredEmployeeList(
self.live_config, type="employee")
# Store which OAuth clients employees may use, the keys are just for
# readability.
开发者ID:Arinzeokeke,项目名称:reddit,代码行数:67,代码来源:app_globals.py
示例5: setup
def setup(self):
self.queues = queues.declare_queues(self)
################# CONFIGURATION
# AMQP is required
if not self.amqp_host:
raise ValueError("amqp_host not set in the .ini")
# This requirement doesn't *have* to be a requirement, but there are
# bugs at the moment that will pop up if you violate it
# XXX: get rid of these options. new query cache is always on.
if self.write_query_queue and not self.use_query_cache:
raise Exception("write_query_queue requires use_query_cache")
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
self.origin = "http://" + origin_prefix + self.domain
self.secure_domains = set([urlparse(self.payment_domain).netloc])
self.trusted_domains = set([self.domain])
self.trusted_domains.update(self.authorized_cnames)
if self.https_endpoint:
https_url = urlparse(self.https_endpoint)
self.secure_domains.add(https_url.netloc)
self.trusted_domains.add(https_url.hostname)
if getattr(self, 'oauth_domain', None):
self.secure_domains.add(self.oauth_domain)
# load the unique hashed names of files under static
static_files = os.path.join(self.paths.get('static_files'), 'static')
names_file_path = os.path.join(static_files, 'names.json')
if os.path.exists(names_file_path):
with open(names_file_path) as handle:
self.static_names = json.load(handle)
else:
self.static_names = {}
#setup the logger
self.log = logging.getLogger('reddit')
self.log.addHandler(logging.StreamHandler())
if self.debug:
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.INFO)
# set log level for pycountry which is chatty
logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)
if not self.media_domain:
self.media_domain = self.domain
if self.media_domain == self.domain:
print ("Warning: g.media_domain == g.domain. " +
"This may give untrusted content access to user cookies")
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
self.startup_timer.intermediate("configuration")
################# ZOOKEEPER
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.throttles = tuple() # immutable since it's not real
self.startup_timer.intermediate("zookeeper")
#.........这里部分代码省略.........
开发者ID:debanshuk,项目名称:reddit,代码行数:101,代码来源:app_globals.py
示例6: setup
#.........这里部分代码省略.........
self.countries = json.load(handle)
self.log.debug("Using countries.json.")
except IOError:
self.log.warning("Couldn't find countries.json. Using pycountry.")
self.countries = get_countries_and_codes()
if not self.media_domain:
self.media_domain = self.domain
if self.media_domain == self.domain:
print("Warning: g.media_domain == g.domain. " + "This may give untrusted content access to user cookies")
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, "SIGUSR1"):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
self.startup_timer.intermediate("configuration")
################# ZOOKEEPER
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import connect_to_zookeeper, LiveConfig, LiveList
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username, zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.throttles = LiveList(
self.zookeeper, "/throttles", map_fn=ipaddress.ip_network, reduce_fn=ipaddress.collapse_addresses
)
self.banned_domains = LiveDict(self.zookeeper, "/banned-domains", watch=True)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.throttles = tuple() # immutable since it's not real
self.banned_domains = dict()
self.startup_timer.intermediate("zookeeper")
################# MEMCACHE
num_mc_clients = self.num_mc_clients
# the main memcache pool. used for most everything.
self.memcache = CMemcache(self.memcaches, num_clients=num_mc_clients)
# a smaller pool of caches used only for distributed locks.
# TODO: move this to ZooKeeper
self.lock_cache = CMemcache(self.lockcaches, num_clients=num_mc_clients)
self.make_lock = make_lock_factory(self.lock_cache, self.stats)
# memcaches used in front of the permacache CF in cassandra.
# XXX: this is a legacy thing; permacache was made when C* didn't have
# a row cache.
开发者ID:99plus2,项目名称:reddit,代码行数:67,代码来源:app_globals.py
注:本文中的r2.lib.zookeeper.connect_to_zookeeper函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论