-
Notifications
You must be signed in to change notification settings - Fork 4
/
ipcontroller_config.py
347 lines (267 loc) · 12.8 KB
/
ipcontroller_config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
# Configuration file for ipcontroller.
c = get_config()
#------------------------------------------------------------------------------
# IPControllerApp configuration
#------------------------------------------------------------------------------
# IPControllerApp will inherit config from: BaseParallelApplication,
# BaseIPythonApplication, Application
# Use threads instead of processes for the schedulers
# c.IPControllerApp.use_threads = False
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.IPControllerApp.verbose_crash = False
# JSON filename where client connection info will be stored.
# c.IPControllerApp.client_json_file = 'ipcontroller-client.json'
# String id to add to runtime files, to prevent name collisions when using
# multiple clusters with a single profile simultaneously.
#
# When set, files will be named like: 'ipcontroller-<cluster_id>-engine.json'
#
# Since this is text inserted into filenames, typical recommendations apply:
# Simple character strings are ideal, and spaces are not recommended (but should
# generally work).
# c.IPControllerApp.cluster_id = ''
# Whether to overwrite existing config files when copying
# c.IPControllerApp.overwrite = False
# Set the log level by value or name.
# c.IPControllerApp.log_level = 30
# Set the working dir for the process.
# c.IPControllerApp.work_dir = u'/Users/minrk/.ipython'
# ssh url for engines to use when connecting to the Controller processes. It
# should be of the form: [user@]server[:port]. The Controller's listening
# addresses must be accessible from the ssh server
# c.IPControllerApp.engine_ssh_server = u''
# Whether to create profile dir if it doesn't exist.
# c.IPControllerApp.auto_create = True
# The external IP or domain name of the Controller, used for disambiguating
# engine and client connections.
# c.IPControllerApp.location = u''
# ssh url for clients to use when connecting to the Controller processes. It
# should be of the form: [user@]server[:port]. The Controller's listening
# addresses must be accessible from the ssh server
# c.IPControllerApp.ssh_server = u''
# The IPython profile to use.
# c.IPControllerApp.profile = u'default'
# The ZMQ URL of the iplogger to aggregate logging.
# c.IPControllerApp.log_url = ''
# whether to log to a file
# c.IPControllerApp.log_to_file = False
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.IPControllerApp.ipython_dir = u'/Users/minrk/.ipython'
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.IPControllerApp.copy_config_files = False
# import statements to be run at startup. Necessary in some environments
# c.IPControllerApp.import_statements = []
# Whether to reuse existing json connection files. If False, connection files
# will be removed on a clean exit.
# c.IPControllerApp.reuse_files = False
# Reload engine state from JSON file
# c.IPControllerApp.restore_engines = False
# JSON filename where engine connection info will be stored.
# c.IPControllerApp.engine_json_file = 'ipcontroller-engine.json'
# whether to cleanup old logfiles before starting
# c.IPControllerApp.clean_logs = False
# The Logging format template
# c.IPControllerApp.log_format = '[%(name)s] %(message)s'
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
#------------------------------------------------------------------------------
# Session configuration
#------------------------------------------------------------------------------
# Object for handling serialization and sending of messages.
#
# The Session object handles building messages and sending them with ZMQ sockets
# or ZMQStream objects. Objects can communicate with each other over the
# network via Session objects, and only need to work with the dict-based IPython
# message spec. The Session will handle serialization/deserialization, security,
# and metadata.
#
# Sessions support configurable serialiization via packer/unpacker traits, and
# signing with HMAC digests via the key/keyfile traits.
#
# Parameters ----------
#
# debug : bool
# whether to trigger extra debugging statements
# packer/unpacker : str : 'json', 'pickle' or import_string
# importstrings for methods to serialize message parts. If just
# 'json' or 'pickle', predefined JSON and pickle packers will be used.
# Otherwise, the entire importstring must be used.
#
# The functions must accept at least valid JSON input, and output *bytes*.
#
# For example, to use msgpack:
# packer = 'msgpack.packb', unpacker='msgpack.unpackb'
# pack/unpack : callables
# You can also set the pack/unpack callables for serialization directly.
# session : bytes
# the ID of this Session object. The default is to generate a new UUID.
# username : unicode
# username added to message headers. The default is to ask the OS.
# key : bytes
# The key used to initialize an HMAC signature. If unset, messages
# will not be signed or checked.
# keyfile : filepath
# The file containing a key. If this is set, `key` will be initialized
# to the contents of the file.
# Username for the Session. Default is your system username.
# c.Session.username = 'minrk'
# Threshold (in bytes) beyond which a buffer should be sent without copying.
# c.Session.copy_threshold = 65536
# The name of the packer for serializing messages. Should be one of 'json',
# 'pickle', or an import name for a custom callable serializer.
# c.Session.packer = 'json'
try:
import msgpack
except ImportError:
pass
else:
c.Session.packer = 'msgpack.packb'
c.Session.unpacker = 'msgpack.unpackb'
# The maximum number of items for a container to be introspected for custom
# serialization. Containers larger than this are pickled outright.
# c.Session.item_threshold = 64
# The UUID identifying this session.
# c.Session.session = u''
# execution key, for extra authentication.
# c.Session.key = ''
# Debug output in the Session
# c.Session.debug = False
# The name of the unpacker for unserializing messages. Only used with custom
# functions for `packer`.
# c.Session.unpacker = 'json'
# path to file containing execution key.
# c.Session.keyfile = ''
# Threshold (in bytes) beyond which an object's buffer should be extracted to
# avoid pickling.
# c.Session.buffer_threshold = 1024
# Metadata dictionary, which serves as the default top-level metadata dict for
# each message.
# c.Session.metadata = {}
#------------------------------------------------------------------------------
# HubFactory configuration
#------------------------------------------------------------------------------
# The Configurable for setting up a Hub.
# HubFactory will inherit config from: RegistrationFactory
# Client/Engine Port pair for Control queue
# c.HubFactory.control = None
# 0MQ transport for monitor messages. [default : tcp]
# c.HubFactory.monitor_transport = 'tcp'
# IP on which to listen for client connections. [default: loopback]
# c.HubFactory.client_ip = '127.0.0.1'
# Client/Engine Port pair for Task queue
# c.HubFactory.task = None
# 0MQ transport for engine connections. [default: tcp]
# c.HubFactory.engine_transport = 'tcp'
# 0MQ transport for client connections. [default : tcp]
# c.HubFactory.client_transport = 'tcp'
# Monitor (SUB) port for queue traffic
# c.HubFactory.mon_port = 0
# The IP address for registration. This is generally either '127.0.0.1' for
# loopback only or '*' for all interfaces. [default: '127.0.0.1']
# c.HubFactory.ip = '127.0.0.1'
# Client/Engine Port pair for MUX queue
# c.HubFactory.mux = None
# PUB port for sending engine status notifications
# c.HubFactory.notifier_port = 0
# The port on which the Hub listens for registration.
# c.HubFactory.regport = 0
# The 0MQ url used for registration. This sets transport, ip, and port in one
# variable. For example: url='tcp://127.0.0.1:12345' or url='epgm://*:90210'
# c.HubFactory.url = ''
# IP on which to listen for engine connections. [default: loopback]
# c.HubFactory.engine_ip = '127.0.0.1'
# Client/Engine Port pair for IOPub relay
# c.HubFactory.iopub = None
# PUB/ROUTER Port pair for Engine heartbeats
# c.HubFactory.hb = None
# The class to use for the DB backend
#
# Options include:
#
# SQLiteDB: SQLite MongoDB : use MongoDB DictDB : in-memory storage (fastest,
# but be mindful of memory growth of the Hub) NoDB : disable database
# altogether (default)
c.HubFactory.db_class = 'NoDB'
# c.HubFactory.db_class = 'IPython.parallel.controller.dictdb.DictDB'
# IP on which to listen for monitor messages. [default: loopback]
# c.HubFactory.monitor_ip = '127.0.0.1'
# The 0MQ transport for communications. This will likely be the default of
# 'tcp', but other values include 'ipc', 'epgm', 'inproc'.
# c.HubFactory.transport = 'tcp'
#------------------------------------------------------------------------------
# TaskScheduler configuration
#------------------------------------------------------------------------------
# Python TaskScheduler object.
#
# This is the simplest object that supports msg_id based DAG dependencies.
# *Only* task msg_ids are checked, not msg_ids of jobs submitted via the MUX
# queue.
# select the task scheduler scheme [default: Python LRU] Options are: 'pure',
# 'lru', 'plainrandom', 'weighted', 'twobin', 'leastload'
c.TaskScheduler.scheme_name = 'lru'
# specify the High Water Mark (HWM) for the downstream socket in the Task
# scheduler. This is the maximum number of allowed outstanding tasks on each
# engine.
#
# The default (1) means that only one task can be outstanding on each engine.
# Setting TaskScheduler.hwm=0 means there is no limit, and the engines continue
# to be assigned tasks while they are working, effectively hiding network
# latency behind computation, but can result in an imbalance of work when
# submitting many heterogenous tasks all at once. Any positive value greater
# than one is a compromise between the two.
c.TaskScheduler.hwm = 1
#------------------------------------------------------------------------------
# HeartMonitor configuration
#------------------------------------------------------------------------------
# A basic HeartMonitor class pingstream: a PUB stream pongstream: an ROUTER
# stream period: the period of the heartbeat in milliseconds
# The frequency at which the Hub pings the engines for heartbeats (in ms)
# c.HeartMonitor.period = 3000
c.HeartMonitor.period = 250
#------------------------------------------------------------------------------
# SQLiteDB configuration
#------------------------------------------------------------------------------
# SQLite3 TaskRecord backend.
# The SQLite Table to use for storing tasks for this session. If unspecified, a
# new table will be created with the Hub's IDENT. Specifying the table will
# result in tasks from previous sessions being available via Clients' db_query
# and get_result methods.
# c.SQLiteDB.table = ''
# The directory containing the sqlite task database. The default is to use the
# cluster_dir location.
# c.SQLiteDB.location = ''
# The filename of the sqlite task database. [default: 'tasks.db']
# c.SQLiteDB.filename = 'tasks.db'
#------------------------------------------------------------------------------
# MongoDB configuration
#------------------------------------------------------------------------------
# MongoDB TaskRecord backend.
# Positional arguments to be passed to pymongo.Connection. Only necessary if
# the default mongodb configuration does not point to your mongod instance.
# c.MongoDB.connection_args = []
# Keyword arguments to be passed to pymongo.Connection. Only necessary if the
# default mongodb configuration does not point to your mongod instance.
# c.MongoDB.connection_kwargs = {}
# The MongoDB database name to use for storing tasks for this session. If
# unspecified, a new database will be created with the Hub's IDENT. Specifying
# the database will result in tasks from previous sessions being available via
# Clients' db_query and get_result methods.
# c.MongoDB.database = u''