Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save manifoldhiker/667ad92b4f83bcb83cabe6c70e2be22a to your computer and use it in GitHub Desktop.
Save manifoldhiker/667ad92b4f83bcb83cabe6c70e2be22a to your computer and use it in GitHub Desktop.
verta tests error log
PS D:\src\modeldb-client\verta\tests> pytest
================================================= test session starts =================================================
platform win32 -- Python 3.7.1, pytest-5.0.1, py-1.8.0, pluggy-0.12.0
hypothesis profile 'default' -> database=DirectoryBasedExampleDatabase('D:\\src\\modeldb-client\\verta\\tests\\.hypothesis\\examples')
rootdir: D:\src\modeldb-client\verta
plugins: hypothesis-4.31.1
collected 86 items
test_artifacts.py ....FF....... [ 15%]
test_backend.py F [ 16%]
test_datasets.py ...................FFFF.F.F.EF [ 51%]
test_entities.py ............. [ 66%]
test_metadata.py .................... [ 89%]
test_protos.py ... [ 93%]
modelapi_hypothesis\test_modelapi.py ..F [ 96%]
modelapi_hypothesis\test_value_generator.py ... [100%]
======================================================= ERRORS ========================================================
______________ ERROR at setup of TestBigQueryDatasetVersionInfo.test_big_query_dataset_version_creation _______________
@pytest.fixture(scope="session")
def big_query_job():
# needs to be set
#_ = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", DEFAULT_GOOGLE_APPLICATION_CREDENTIALS)
query = (
"""SELECT
id,
`by`,
score,
time,
time_ts,
title,
url,
text,
deleted,
dead,
descendants,
author
FROM
`bigquery-public-data.hacker_news.stories`
LIMIT
1000"""
)
> query_job = bigquery.Client().query(
query,
# Location must match that of the dataset(s) referenced in the query.
location="US",
)
conftest.py:110:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\google\cloud\bigquery\client.py:167: in __init__
project=project, credentials=credentials, _http=_http
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\google\cloud\client.py:227: in __init__
_ClientProjectMixin.__init__(self, project=project)
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\google\cloud\client.py:179: in __init__
project = self._determine_default(project)
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\google\cloud\client.py:194: in _determine_default
return _determine_default_project(project)
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\google\cloud\_helpers.py:186: in _determine_default_project
_, project = google.auth.default()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
scopes = None, request = None
def default(scopes=None, request=None):
"""Gets the default credentials for the current environment.
`Application Default Credentials`_ provides an easy way to obtain
credentials to call Google APIs for server-to-server or local applications.
This function acquires credentials from the environment in the following
order:
1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
to the path of a valid service account JSON private key file, then it is
loaded and returned. The project ID returned is the project ID defined
in the service account file if available (some older files do not
contain project ID information).
2. If the `Google Cloud SDK`_ is installed and has application default
credentials set they are loaded and returned.
To enable application default credentials with the Cloud SDK run::
gcloud auth application-default login
If the Cloud SDK has an active project, the project ID is returned. The
active project can be set using::
gcloud config set project
3. If the application is running in the `App Engine standard environment`_
then the credentials and project ID from the `App Identity Service`_
are used.
4. If the application is running in `Compute Engine`_ or the
`App Engine flexible environment`_ then the credentials and project ID
are obtained from the `Metadata Service`_.
5. If no credentials are found,
:class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
.. _Application Default Credentials: https://developers.google.com\
/identity/protocols/application-default-credentials
.. _Google Cloud SDK: https://cloud.google.com/sdk
.. _App Engine standard environment: https://cloud.google.com/appengine
.. _App Identity Service: https://cloud.google.com/appengine/docs/python\
/appidentity/
.. _Compute Engine: https://cloud.google.com/compute
.. _App Engine flexible environment: https://cloud.google.com\
/appengine/flexible
.. _Metadata Service: https://cloud.google.com/compute/docs\
/storing-retrieving-metadata
Example::
import google.auth
credentials, project_id = google.auth.default()
Args:
scopes (Sequence[str]): The list of scopes for the credentials. If
specified, the credentials will automatically be scoped if
necessary.
request (google.auth.transport.Request): An object used to make
HTTP requests. This is used to detect whether the application
is running on Compute Engine. If not specified, then it will
use the standard library http client to make requests.
Returns:
Tuple[~google.auth.credentials.Credentials, Optional[str]]:
the current environment's credentials and project ID. Project ID
may be None, which indicates that the Project ID could not be
ascertained from the environment.
Raises:
~google.auth.exceptions.DefaultCredentialsError:
If no credentials were found, or if the credentials found were
invalid.
"""
from google.auth.credentials import with_scopes_if_required
explicit_project_id = os.environ.get(
environment_vars.PROJECT,
os.environ.get(environment_vars.LEGACY_PROJECT))
checkers = (
_get_explicit_environ_credentials,
_get_gcloud_sdk_credentials,
_get_gae_credentials,
lambda: _get_gce_credentials(request))
for checker in checkers:
credentials, project_id = checker()
if credentials is not None:
credentials = with_scopes_if_required(credentials, scopes)
effective_project_id = explicit_project_id or project_id
if not effective_project_id:
_LOGGER.warning(
'No project ID could be determined. Consider running '
'`gcloud config set project` or setting the %s '
'environment variable',
environment_vars.PROJECT)
return credentials, effective_project_id
> raise exceptions.DefaultCredentialsError(_HELP_MESSAGE)
E google.auth.exceptions.DefaultCredentialsError: Could not automatically determine credentials. Please set GOOGLE_APPLICATION_CREDENTIALS or explicitly create credentials and re-run the application. For more information, please see https://cloud.google.com/docs/authentication/getting-started
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\google\auth\_default.py:317: DefaultCredentialsError
====================================================== FAILURES =======================================================
_______________________________________________ TestDatasets.test_path ________________________________________________
self = <test_artifacts.TestDatasets object at 0x11308590>
experiment_run = name: Run 2748415643791132954276
url: https://app.verta.ai/project/ec9459b9-af93-4005-8b77-a97b40ca0f15/exp-runs/b8fb5...725
project id: ec9459b9-af93-4005-8b77-a97b40ca0f15
hyperparameters: {}
observations: {}
metrics: {}
artifact keys: []
def test_path(self, experiment_run):
key = utils.gen_str()
path = utils.gen_str()
> experiment_run.log_dataset_path(key, path)
test_artifacts.py:78:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
..\verta\client.py:2524: in log_dataset_path
self._log_dataset_path(key, path, linked_artifact_id=linked_dataset_id)
..\verta\client.py:1972: in _log_dataset_path
response.raise_for_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <Response [400]>
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if isinstance(self.reason, bytes):
# We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string
# isn't utf-8, we fall back to iso-8859-1 for all other
# encodings. (See PR #3538)
try:
reason = self.reason.decode('utf-8')
except UnicodeDecodeError:
reason = self.reason.decode('iso-8859-1')
else:
reason = self.reason
if 400 <= self.status_code < 500:
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
if http_error_msg:
> raise HTTPError(http_error_msg, response=self)
E requests.exceptions.HTTPError: 400 Client Error: Bad Request for url: https://app.verta.ai/v1/experiment-run/logDataset
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\requests\models.py:940: HTTPError
------------------------------------------------ Captured stdout setup ------------------------------------------------
connection successfully established
created new Project: Proj 27484156437911193273
created new Experiment: Expt 2748415643791125775168
created new ExperimentRun: Run 2748415643791132954276
_______________________________________________ TestDatasets.test_store _______________________________________________
self = <test_artifacts.TestDatasets object at 0x11308C50>
experiment_run = name: Run 274841564379118003854
url: https://app.verta.ai/project/c55a9eca-783f-4975-87bd-7235a2e633e6/exp-runs/c1e78f...t id: c55a9eca-783f-4975-87bd-7235a2e633e6
hyperparameters: {}
observations: {}
metrics: {}
artifact keys: ['fgyovdmz']
def test_store(self, experiment_run):
key = utils.gen_str()
dataset = np.random.random(size=(36,6))
experiment_run.log_dataset(key, dataset)
> assert np.array_equal(experiment_run.get_dataset(key), dataset)
test_artifacts.py:86:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
..\verta\client.py:2547: in get_dataset
dataset, path_only, linked_id = self._get_dataset(key)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = name: Run 274841564379118003854
url: https://app.verta.ai/project/c55a9eca-783f-4975-87bd-7235a2e633e6/exp-runs/c1e78f...t id: c55a9eca-783f-4975-87bd-7235a2e633e6
hyperparameters: {}
observations: {}
metrics: {}
artifact keys: ['fgyovdmz']
key = 'fgyovdmz'
def _get_dataset(self, key):
"""
Gets the dataset with name `key` from this Experiment Run.
If the dataset was originally logged as just a filesystem path, that path will be returned.
Otherwise, bytes representing the dataset object will be returned.
Parameters
----------
key : str
Name of the artifact.
Returns
-------
str or bytes
Filesystem path or bytes representing the artifact.
bool
True if the artifact was only logged as its filesystem path.
"""
# get key-path from ModelDB
Message = _ExperimentRunService.GetDatasets
msg = Message(id=self.id)
data = _utils.proto_to_json(msg)
response = _utils.make_request("GET",
"{}://{}/v1/experiment-run/getDatasets".format(self._conn.scheme, self._conn.socket),
self._conn, params=data)
response.raise_for_status()
response_msg = _utils.json_to_proto(response.json(), Message.Response)
dataset = {dataset.key: dataset for dataset in response_msg.datasets}.get(key)
if dataset is None:
> raise KeyError("no dataset found with key {}".format(key))
E KeyError: 'no dataset found with key fgyovdmz'
..\verta\client.py:2050: KeyError
------------------------------------------------ Captured stdout setup ------------------------------------------------
connection successfully established
created new Project: Proj 2748415643791166213417
created new Experiment: Expt 2748415643791173171349
created new ExperimentRun: Run 274841564379118003854
------------------------------------------------ Captured stdout call -------------------------------------------------
upload complete (fgyovdmz.pkl)
_________________________________________________ TestLoad.test_load __________________________________________________
self = <test_backend.TestLoad object at 0x118B70B0>, client = <verta.client.Client object at 0x118B77B0>
def test_load(self, client):
client.set_project()
client.set_experiment()
with Pool(36) as pool:
> pool.map(self.run_fake_experiment, [client]*180)
test_backend.py:41:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\multiprocessing\pool.py:290: in map
return self._map_async(func, iterable, mapstar, chunksize).get()
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\multiprocessing\pool.py:683: in get
raise self._value
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\multiprocessing\pool.py:457: in _handle_tasks
put(task)
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\multiprocessing\connection.py:206: in send
self._send_bytes(_ForkingPickler.dumps(obj))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
cls = <class 'multiprocessing.reduction.ForkingPickler'>
obj = (0, 0, <function mapstar at 0x0EF06738>, ((<function TestLoad.run_fake_experiment at 0x11188E88>, (<verta.client.Client object at 0x118B77B0>, <verta.client.Client object at 0x118B77B0>)),), {})
protocol = None
@classmethod
def dumps(cls, obj, protocol=None):
buf = io.BytesIO()
> cls(buf, protocol).dump(obj)
E TypeError: can't pickle module objects
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\multiprocessing\reduction.py:51: TypeError
------------------------------------------------ Captured stdout setup ------------------------------------------------
connection successfully established
------------------------------------------------ Captured stdout call -------------------------------------------------
created new Project: Proj 2748415643792195414715
created new Experiment: Expt 2748415643792201949391
__________________________________ TestFileSystemDatasetVersionInfo.test_single_file __________________________________
self = <test_datasets.TestFileSystemDatasetVersionInfo object at 0x00BBFE90>
def test_single_file(self):
> dir_name, file_names = self.create_dir_with_files(num_files=1)
test_datasets.py:294:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <test_datasets.TestFileSystemDatasetVersionInfo object at 0x00BBFE90>, num_files = 1
def create_dir_with_files(self, num_files=10):
dir_name = 'FSD:' + str(time.time())
file_names = []
> os.mkdir(dir_name)
E NotADirectoryError: [WinError 267] The directory name is invalid: 'FSD:1564379259.4748738'
test_datasets.py:310: NotADirectoryError
______________________________________ TestFileSystemDatasetVersionInfo.test_dir ______________________________________
self = <test_datasets.TestFileSystemDatasetVersionInfo object at 0x00BB7750>
def test_dir(self):
> dir_name, _ = self.create_dir_with_files(num_files=10)
test_datasets.py:301:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <test_datasets.TestFileSystemDatasetVersionInfo object at 0x00BB7750>, num_files = 10
def create_dir_with_files(self, num_files=10):
dir_name = 'FSD:' + str(time.time())
file_names = []
> os.mkdir(dir_name)
E NotADirectoryError: [WinError 267] The directory name is invalid: 'FSD:1564379259.4907954'
test_datasets.py:310: NotADirectoryError
_____________________________________ TestS3DatasetVersionInfo.test_single_object _____________________________________
self = <test_datasets.TestS3DatasetVersionInfo object at 0x010EB030>, s3_bucket = None, s3_object = None
def test_single_object(self, s3_bucket, s3_object):
> s3dvi = S3DatasetVersionInfo(s3_bucket, s3_object)
test_datasets.py:321:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <verta.client.S3DatasetVersionInfo object at 0x010EB210>, bucket_name = None, key = None, url_stub = None
def __init__(self, bucket_name, key=None, url_stub=None):
super(S3DatasetVersionInfo, self).__init__()
self.location_type = _DatasetVersionService.PathLocationTypeEnum.S3_FILE_SYSTEM
self.bucket_name = bucket_name
self.key = key
self.url_stub = url_stub
self.base_path = ("" if url_stub is None else url_stub) + bucket_name \
> + (("/" + key) if key is not None else "")
E TypeError: can only concatenate str (not "NoneType") to str
..\verta\client.py:762: TypeError
________________________________________ TestS3DatasetVersionInfo.test_bucket _________________________________________
self = <test_datasets.TestS3DatasetVersionInfo object at 0x118983F0>, s3_bucket = None
def test_bucket(self, s3_bucket):
> s3dvi = S3DatasetVersionInfo(s3_bucket)
test_datasets.py:326:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <verta.client.S3DatasetVersionInfo object at 0x11898950>, bucket_name = None, key = None, url_stub = None
def __init__(self, bucket_name, key=None, url_stub=None):
super(S3DatasetVersionInfo, self).__init__()
self.location_type = _DatasetVersionService.PathLocationTypeEnum.S3_FILE_SYSTEM
self.bucket_name = bucket_name
self.key = key
self.url_stub = url_stub
self.base_path = ("" if url_stub is None else url_stub) + bucket_name \
> + (("/" + key) if key is not None else "")
E TypeError: can only concatenate str (not "NoneType") to str
..\verta\client.py:762: TypeError
_______________________________ TestS3ClientFunctions.test_s3_dataset_version_creation ________________________________
self = <test_datasets.TestS3ClientFunctions object at 0x118B7EB0>, client = <verta.client.Client object at 0x118BA810>
s3_bucket = None
def test_s3_dataset_version_creation(self, client, s3_bucket):
name = utils.gen_str()
dataset = client.create_s3_dataset("s3-" + name)
dataset_version = client.create_s3_dataset_version(dataset,
> s3_bucket)
test_datasets.py:341:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
..\verta\client.py:387: in create_s3_dataset_version
url_stub=url_stub)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <verta.client.S3DatasetVersionInfo object at 0x118BA350>, bucket_name = None, key = None, url_stub = None
def __init__(self, bucket_name, key=None, url_stub=None):
super(S3DatasetVersionInfo, self).__init__()
self.location_type = _DatasetVersionService.PathLocationTypeEnum.S3_FILE_SYSTEM
self.bucket_name = bucket_name
self.key = key
self.url_stub = url_stub
self.base_path = ("" if url_stub is None else url_stub) + bucket_name \
> + (("/" + key) if key is not None else "")
E TypeError: can only concatenate str (not "NoneType") to str
..\verta\client.py:762: TypeError
------------------------------------------------ Captured stdout setup ------------------------------------------------
connection successfully established
------------------------------------------------ Captured stdout call -------------------------------------------------
created new Dataset: s3-hrpddxdn
_______________________ TestFilesystemClientFunctions.test_filesystem_dataset_version_creation ________________________
self = <test_datasets.TestFilesystemClientFunctions object at 0x00BBFED0>
client = <verta.client.Client object at 0x00BBF790>
def test_filesystem_dataset_version_creation(self, client):
> dir_name, _ = self.create_dir_with_files(num_files=3)
test_datasets.py:353:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <test_datasets.TestFilesystemClientFunctions object at 0x00BBFED0>, num_files = 3
def create_dir_with_files(self, num_files=10):
dir_name = 'FSD:' + str(time.time())
file_names = []
> os.mkdir(dir_name)
E NotADirectoryError: [WinError 267] The directory name is invalid: 'FSD:1564379263.6132054'
test_datasets.py:365: NotADirectoryError
------------------------------------------------ Captured stdout setup ------------------------------------------------
connection successfully established
___________________________________ TestLogDatasetVersion.test_log_dataset_version ____________________________________
self = <test_datasets.TestLogDatasetVersion object at 0x118BF530>, client = <verta.client.Client object at 0x118BF5B0>
experiment_run = name: Run 274841564379275720642
url: https://app.verta.ai/project/14564734-925d-43d9-a54c-6fa1e797188e/exp-runs/bbfcfe...86a
project id: 14564734-925d-43d9-a54c-6fa1e797188e
hyperparameters: {}
observations: {}
metrics: {}
artifact keys: []
s3_bucket = None
def test_log_dataset_version(self, client, experiment_run, s3_bucket):
name = utils.gen_str()
dataset = client.create_s3_dataset("s3-" + name)
assert dataset.dataset_type == _DatasetService.DatasetTypeEnum.PATH
> dataset_version = client.create_s3_dataset_version(dataset, s3_bucket)
test_datasets.py:395:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
..\verta\client.py:387: in create_s3_dataset_version
url_stub=url_stub)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <verta.client.S3DatasetVersionInfo object at 0x150C5E50>, bucket_name = None, key = None, url_stub = None
def __init__(self, bucket_name, key=None, url_stub=None):
super(S3DatasetVersionInfo, self).__init__()
self.location_type = _DatasetVersionService.PathLocationTypeEnum.S3_FILE_SYSTEM
self.bucket_name = bucket_name
self.key = key
self.url_stub = url_stub
self.base_path = ("" if url_stub is None else url_stub) + bucket_name \
> + (("/" + key) if key is not None else "")
E TypeError: can only concatenate str (not "NoneType") to str
..\verta\client.py:762: TypeError
------------------------------------------------ Captured stdout setup ------------------------------------------------
connection successfully established
created new Project: Proj 274841564379274472959
created new Experiment: Expt 27484156437927507779
created new ExperimentRun: Run 274841564379275720642
------------------------------------------------ Captured stdout call -------------------------------------------------
created new Dataset: s3-nhykajpg
_________________________________________ test_dataframe_modelapi_and_values __________________________________________
@hypothesis.given(dataframe_api_and_values)
> def test_dataframe_modelapi_and_values(dataframe_api_and_values):
modelapi_hypothesis\test_modelapi.py:28:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
dataframe_api_and_values = ({'name': '', 'type': 'VertaJson', 'value': [{'name': '', 'type': 'VertaBool'}]},
0 False)
@hypothesis.given(dataframe_api_and_values)
def test_dataframe_modelapi_and_values(dataframe_api_and_values):
api, values = dataframe_api_and_values
predicted_api = ModelAPI._data_to_api(values)
> assert json.dumps(api, sort_keys=True, indent=2) == json.dumps(predicted_api, sort_keys=True, indent=2)
E assert '{\n "name":... }\n ]\n}' == '{\n "name": ... }\n ]\n}'
E Skipping 68 identical trailing characters in diff, use -v to show
E {
E "name": "",
E - "type": "VertaJson",
E ? ^ ^^
E + "type": "VertaList",
E ? ^^ ^...
E
E ...Full output truncated (2 lines hidden), use '-vv' to show
modelapi_hypothesis\test_modelapi.py:32: AssertionError
----------------------------------------------------- Hypothesis ------------------------------------------------------
Falsifying example: test_dataframe_modelapi_and_values(dataframe_api_and_values=({'name': '',
'type': 'VertaJson',
'value': [{'name': '', 'type': 'VertaBool'}]},
0 False))
================================================== warnings summary ===================================================
tests/test_artifacts.py::TestImages::test_store_plt
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\matplotlib\cbook\__init__.py:2349: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working
if isinstance(obj, collections.Iterator):
tests/test_artifacts.py::TestImages::test_store_plt
c:\users\kilianovski\appdata\local\programs\python\python37-32\lib\site-packages\matplotlib\cbook\__init__.py:2366: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working
return list(data) if isinstance(data, collections.MappingView) else data
-- Docs: https://docs.pytest.org/en/latest/warnings.html
============================ 11 failed, 74 passed, 2 warnings, 1 error in 1480.49 seconds =============================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment