This is really a question on sampling. I am following your examples but using a three planet system.
I ran the tuning step with
np.random.seed(42)
sampler = xo.PyMC3Sampler(window=100, start=500, finish=200)
with model:
burnin = sampler.tune(tune=5000, start=map_soln, step_kwargs=dict(target_accept=0.9), cores=6)
/home/tom/anaconda3/lib/python3.6/site-packages/theano/tensor/subtensor.py:2197: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.
rval = inputs[0].__getitem__(inputs[1:])
Sampling 6 chains: 100%|██████████| 3012/3012 [1:50:47<00:00, 6.27s/draws]
Sampling 6 chains: 100%|██████████| 612/612 [07:34<00:00, 2.57s/draws]
Sampling 6 chains: 100%|██████████| 1212/1212 [15:29<00:00, 2.65s/draws]
Sampling 6 chains: 100%|██████████| 2412/2412 [1:18:08<00:00, 8.68s/draws]
Sampling 6 chains: 100%|██████████| 4812/4812 [2:54:50<00:00, 10.13s/draws]
Sampling 6 chains: 100%|██████████| 18012/18012 [12:41:31<00:00, 10.79s/draws]
with model:
trace = sampler.sample(draws=2000, cores=6)
Multiprocess sampling (6 chains in 6 jobs)
NUTS: [logw0, logS0, logs2, omega, ecc, rb, t0, logP, rho_star, r_star, u_star, mean]
/home/tom/anaconda3/lib/python3.6/site-packages/theano/tensor/subtensor.py:2197: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.
rval = inputs[0].__getitem__(inputs[1:])
---------------------------------------------------------------------------
OSError Traceback (most recent call last)
<ipython-input-29-82e710a8cf2e> in <module>()
1 with model:
----> 2 trace = sampler.sample(draws=1000, cores=6)
~/anaconda3/lib/python3.6/site-packages/exoplanet/sampling.py in sample(self, trace, step, start, step_kwargs, **kwargs)
191 start=start, step_kwargs=step_kwargs, trace=trace, step=step)
192 kwargs["tune"] = self.finish
--> 193 return pm.sample(start=start, step=step, **kwargs)
~/anaconda3/lib/python3.6/site-packages/pymc3/sampling.py in sample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, nuts_kwargs, step_kwargs, progressbar, model, random_seed, live_plot, discard_tuned_samples, live_plot_kwargs, compute_convergence_checks, use_mmap, **kwargs)
447 _print_step_hierarchy(step)
448 try:
--> 449 trace = _mp_sample(**sample_args)
450 except pickle.PickleError:
451 _log.warning("Could not pickle model, sampling singlethreaded.")
~/anaconda3/lib/python3.6/site-packages/pymc3/sampling.py in _mp_sample(draws, tune, step, chains, cores, chain, random_seed, start, progressbar, trace, model, use_mmap, **kwargs)
994 sampler = ps.ParallelSampler(
995 draws, tune, chains, cores, random_seed, start, step,
--> 996 chain, progressbar)
997 try:
998 with sampler:
~/anaconda3/lib/python3.6/site-packages/pymc3/parallel_sampling.py in __init__(self, draws, tune, chains, cores, seeds, start_points, step_method, start_chain_num, progressbar)
273 ProcessAdapter(draws, tune, step_method,
274 chain + start_chain_num, seed, start)
--> 275 for chain, seed, start in zip(range(chains), seeds, start_points)
276 ]
277
~/anaconda3/lib/python3.6/site-packages/pymc3/parallel_sampling.py in <listcomp>(.0)
273 ProcessAdapter(draws, tune, step_method,
274 chain + start_chain_num, seed, start)
--> 275 for chain, seed, start in zip(range(chains), seeds, start_points)
276 ]
277
~/anaconda3/lib/python3.6/site-packages/pymc3/parallel_sampling.py in __init__(self, draws, tune, step_method, chain, seed, start)
180 draws, tune, seed)
181 # We fork right away, so that the main process can start tqdm threads
--> 182 self._process.start()
183
184 @property
~/anaconda3/lib/python3.6/multiprocessing/process.py in start(self)
103 'daemonic processes are not allowed to have children'
104 _cleanup()
--> 105 self._popen = self._Popen(self)
106 self._sentinel = self._popen.sentinel
107 # Avoid a refcycle if the target function holds an indirect
~/anaconda3/lib/python3.6/multiprocessing/context.py in _Popen(process_obj)
221 @staticmethod
222 def _Popen(process_obj):
--> 223 return _default_context.get_context().Process._Popen(process_obj)
224
225 class DefaultContext(BaseContext):
~/anaconda3/lib/python3.6/multiprocessing/context.py in _Popen(process_obj)
275 def _Popen(process_obj):
276 from .popen_fork import Popen
--> 277 return Popen(process_obj)
278
279 class SpawnProcess(process.BaseProcess):
~/anaconda3/lib/python3.6/multiprocessing/popen_fork.py in __init__(self, process_obj)
17 util._flush_std_streams()
18 self.returncode = None
---> 19 self._launch(process_obj)
20
21 def duplicate_for_child(self, fd):
~/anaconda3/lib/python3.6/multiprocessing/popen_fork.py in _launch(self, process_obj)
64 code = 1
65 parent_r, child_w = os.pipe()
---> 66 self.pid = os.fork()
67 if self.pid == 0:
68 try:
OSError: [Errno 12] Cannot allocate memory