Skip to content

Instantly share code, notes, and snippets.

@jhamman
Created May 3, 2017 21:07
Show Gist options
  • Select an option

  • Save jhamman/9c2b96d8bff449f97a2acded5cb9740e to your computer and use it in GitHub Desktop.

Select an option

Save jhamman/9c2b96d8bff449f97a2acded5cb9740e to your computer and use it in GitHub Desktop.
dask quantile mapping traceback
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-17-2776a0e3d860> in <module>()
----> 1 test_quantile_mapping_dask()
<ipython-input-16-09fe8c5dfb29> in test_quantile_mapping_dask()
21
22 new = quantile_mapping(input_data, data_to_match)
---> 23 new.compute()
24 new.data.visualize()
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/dataarray.py in compute(self)
588 """
589 new = self.copy(deep=False)
--> 590 return new.load()
591
592 def persist(self):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/dataarray.py in load(self)
571 working with many file objects on disk.
572 """
--> 573 ds = self._to_temp_dataset().load()
574 new = self._from_temp_dataset(ds)
575 self._variable = new._variable
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/dataset.py in load(self)
467
468 # evaluate all the dask arrays simultaneously
--> 469 evaluated_data = da.compute(*lazy_data.values())
470
471 for k, data in zip(lazy_data, evaluated_data):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
200 dsk = collections_to_dsk(variables, optimize_graph, **kwargs)
201 keys = [var._keys() for var in variables]
--> 202 results = get(dsk, keys, **kwargs)
203
204 results_iter = iter(results)
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in get_sync(dsk, keys, **kwargs)
542 kwargs.pop('num_workers', None) # if num_workers present, remove it
543 return get_async(apply_sync, 1, dsk, keys,
--> 544 raise_on_exception=True, **kwargs)
545
546
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, dumps, loads, **kwargs)
485 # Seed initial tasks into the thread pool
486 while state['ready'] and len(state['running']) < num_workers:
--> 487 fire_task()
488
489 # Main loop, wait on tasks to finish, insert new ones
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in fire_task()
481 args=(key, dumps((dsk[key], data)),
482 dumps, loads, get_id, raise_on_exception),
--> 483 callback=queue.put)
484
485 # Seed initial tasks into the thread pool
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in apply_sync(func, args, kwds, callback)
530 def apply_sync(func, args=(), kwds={}, callback=None):
531 """ A naive synchronous version of apply_async """
--> 532 res = func(*args, **kwds)
533 if callback is not None:
534 callback(res)
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in execute_task(key, task_info, dumps, loads, get_id, raise_on_exception)
264 try:
265 task, data = loads(task_info)
--> 266 result = _execute_task(task, data)
267 id = get_id()
268 result = dumps((result, None, id))
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in _execute_task(arg, cache, dsk)
244 elif istask(arg):
245 func, args = arg[0], arg[1:]
--> 246 args2 = [_execute_task(a, cache) for a in args]
247 return func(*args2)
248 elif not ishashable(arg):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in <listcomp>(.0)
244 elif istask(arg):
245 func, args = arg[0], arg[1:]
--> 246 args2 = [_execute_task(a, cache) for a in args]
247 return func(*args2)
248 elif not ishashable(arg):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in _execute_task(arg, cache, dsk)
244 elif istask(arg):
245 func, args = arg[0], arg[1:]
--> 246 args2 = [_execute_task(a, cache) for a in args]
247 return func(*args2)
248 elif not ishashable(arg):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in <listcomp>(.0)
244 elif istask(arg):
245 func, args = arg[0], arg[1:]
--> 246 args2 = [_execute_task(a, cache) for a in args]
247 return func(*args2)
248 elif not ishashable(arg):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in _execute_task(arg, cache, dsk)
245 func, args = arg[0], arg[1:]
246 args2 = [_execute_task(a, cache) for a in args]
--> 247 return func(*args2)
248 elif not ishashable(arg):
249 return arg
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/array/core.py in getarray_inline(a, b, lock)
79
80 def getarray_inline(a, b, lock=None):
---> 81 return getarray(a, b, lock=lock)
82
83
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/array/core.py in getarray(a, b, lock)
62 c = a[b]
63 if type(c) != np.ndarray:
---> 64 c = np.asarray(c)
65 finally:
66 if lock:
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/numpy/core/numeric.py in asarray(a, dtype, order)
529
530 """
--> 531 return array(a, dtype, copy=False, order=order)
532
533
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/common.py in __array__(self, dtype)
92
93 def __array__(self, dtype=None):
---> 94 return np.asarray(self.values, dtype=dtype)
95
96 def __repr__(self):
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/dataarray.py in values(self)
399 def values(self):
400 """The array's data as a numpy.ndarray"""
--> 401 return self.variable.values
402
403 @values.setter
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/variable.py in values(self)
306 def values(self):
307 """The variable's data as a numpy.ndarray"""
--> 308 return _as_array_or_item(self._data)
309
310 @values.setter
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/xarray/core/variable.py in _as_array_or_item(data)
182 TODO: remove this (replace with np.asarray) once these issues are fixed
183 """
--> 184 data = np.asarray(data)
185 if data.ndim == 0:
186 if data.dtype.kind == 'M':
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/numpy/core/numeric.py in asarray(a, dtype, order)
529
530 """
--> 531 return array(a, dtype, copy=False, order=order)
532
533
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/array/core.py in __array__(self, dtype, **kwargs)
1054
1055 def __array__(self, dtype=None, **kwargs):
-> 1056 x = self.compute()
1057 if dtype and x.dtype != dtype:
1058 x = x.astype(dtype)
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
93 Extra keywords to forward to the scheduler ``get`` function.
94 """
---> 95 (result,) = compute(self, traverse=False, **kwargs)
96 return result
97
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
200 dsk = collections_to_dsk(variables, optimize_graph, **kwargs)
201 keys = [var._keys() for var in variables]
--> 202 results = get(dsk, keys, **kwargs)
203
204 results_iter = iter(results)
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in get_sync(dsk, keys, **kwargs)
542 kwargs.pop('num_workers', None) # if num_workers present, remove it
543 return get_async(apply_sync, 1, dsk, keys,
--> 544 raise_on_exception=True, **kwargs)
545
546
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, raise_on_exception, rerun_exceptions_locally, callbacks, dumps, loads, **kwargs)
485 # Seed initial tasks into the thread pool
486 while state['ready'] and len(state['running']) < num_workers:
--> 487 fire_task()
488
489 # Main loop, wait on tasks to finish, insert new ones
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in fire_task()
481 args=(key, dumps((dsk[key], data)),
482 dumps, loads, get_id, raise_on_exception),
--> 483 callback=queue.put)
484
485 # Seed initial tasks into the thread pool
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in apply_sync(func, args, kwds, callback)
530 def apply_sync(func, args=(), kwds={}, callback=None):
531 """ A naive synchronous version of apply_async """
--> 532 res = func(*args, **kwds)
533 if callback is not None:
534 callback(res)
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in execute_task(key, task_info, dumps, loads, get_id, raise_on_exception)
264 try:
265 task, data = loads(task_info)
--> 266 result = _execute_task(task, data)
267 id = get_id()
268 result = dumps((result, None, id))
/Users/jhamman/anaconda/envs/storylines/lib/python3.6/site-packages/dask/async.py in _execute_task(arg, cache, dsk)
245 func, args = arg[0], arg[1:]
246 args2 = [_execute_task(a, cache) for a in args]
--> 247 return func(*args2)
248 elif not ishashable(arg):
249 return arg
TypeError: tuple indices must be integers or slices, not tuple
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment