I took all my instructions from this page. https://github.com/crr0004/deepracer
Here are the revised instructions for OSX (bold is console command)
- Change to a folder in terminal that is not case-sensitive. ~/ should be fine
- git clone --recurse-submodules https://github.com/crr0004/deepracer.git
- brew install minio/stable/minio -- you may need to install brew first -- /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
- install vncviewer from here https://www.realvnc.com/download/file/viewer.files/VNC-Viewer-6.19.325-MacOSX-x86_64.dmg
- cd rl_coach
- vim env.sh
- replace the $(hostname -i) with your IP Address (i to edit, esc - :wq to save and quit) -- ifconfig|grep -e 'inet [197][970]'
- add a "g" before readlink, so that it reads greadlink
- save and exit
- brew install coreutils
- the "source" command in linux means run a shell script. in Mac you can use "." instead of "source"
- . ./env.sh
- minio server data
- Browse to http://127.0.0.1:9000 and use the credentials the minio command gave you to login
- Create a bucket called "bucket"
- Now edit the env.sh file again, this time replacing "minio" with the minio access key and "miniokey" with the access secret.
- Now you're all done setting up your fake s3 bucket/server
- Let's start Sagemaker setup, do Command T to open new terminal
- Go back to the "deepracer" or repo root folder cd ..
- python3 -m venv sagemaker_venv
- This assumes you already have python3 installed. You probably need both pythons installed, 2 and 3.
- . sagemaker_venv/bin/activate
- pip install PyYAML==3.11
- pip install urllib3==1.21.1
- pip install -U sagemaker-python-sdk/ awscli ipython pandas
- docker pull crr0004/sagemaker-rl-tensorflow:console
- docker tag crr0004/sagemaker-rl-tensorflow:console 520713654638.dkr.ecr.us-east-1.amazonaws.com/sagemaker-rl-tensorflow:coach0.11-cpu-py3
- I'm also assuming you already have docker installed and logged in with a docker account
- mkdir -p ~/.sagemaker && cp config.yaml ~/.sagemaker
- cd rl_coach
- export LOCAL_ENV_VAR_JSON_PATH=$(greadlink -f ./env_vars.json)
- mkdir ~/robo
- mkdir ~/robo/container
- ipython rl_deepracer_coach_robomaker.py
- NOW SAGEMAKER LOCAL should be working
- Now for Robomaker
- Command T to open new terminal window
- cd ..
- . sagemaker_venv/bin/activate
- cd rl_coach
- . ./env.sh
- docker pull crr0004/deepracer_robomaker:console
- cd ..
- edit the robomaker.env file to also reference your local ip address and your aws key and secret
- docker run --rm --name dr --env-file ./robomaker.env --network sagemaker-local -p 8080:5900 -it crr0004/deepracer_robomaker:console
- Command Space, open vnc viewer, connect to 127.0.0.1:8080 to view Gazebo
when I try to start sage maker, im getting below exception..Also getting InvalidAccessKeyId but i have given correct minio access key
Model checkpoints and other metadata will be stored at: s3://bucket/rl-deepracer-sagemaker
Uploading to s3://bucket/rl-deepracer-sagemaker
WARNING:sagemaker:Parameter
image_name
is specified,toolkit
,toolkit_version
,framework
are going to be ignored when choosing the image.s3.ServiceResource()
Using provided s3_client
ClientError Traceback (most recent call last)
/deepracer/sagemaker_venv/lib/python3.6/site-packages/boto3/s3/transfer.py in upload_file(self, filename, bucket, key, callback, extra_args)
278 try:
--> 279 future.result()
280 # If a client error was raised, add the backwards compatibility layer
/deepracer/sagemaker_venv/lib/python3.6/site-packages/s3transfer/futures.py in result(self)
105 # out of this and propogate the exception.
--> 106 return self._coordinator.result()
107 except KeyboardInterrupt as e:
/deepracer/sagemaker_venv/lib/python3.6/site-packages/s3transfer/futures.py in result(self)
264 if self._exception:
--> 265 raise self._exception
266 return self._result
/deepracer/sagemaker_venv/lib/python3.6/site-packages/s3transfer/tasks.py in call(self)
125 if not self._transfer_coordinator.done():
--> 126 return self._execute_main(kwargs)
127 except Exception as e:
/deepracer/sagemaker_venv/lib/python3.6/site-packages/s3transfer/tasks.py in _execute_main(self, kwargs)
149
--> 150 return_value = self._main(**kwargs)
151 # If the task is the final task, then set the TransferFuture's
/deepracer/sagemaker_venv/lib/python3.6/site-packages/s3transfer/upload.py in _main(self, client, fileobj, bucket, key, extra_args)
691 with fileobj as body:
--> 692 client.put_object(Bucket=bucket, Key=key, Body=body, **extra_args)
693
/deepracer/sagemaker_venv/lib/python3.6/site-packages/botocore/client.py in _api_call(self, *args, **kwargs)
315 # The "self" in this scope is referring to the BaseClient.
--> 316 return self._make_api_call(operation_name, kwargs)
317
/deepracer/sagemaker_venv/lib/python3.6/site-packages/botocore/client.py in _make_api_call(self, operation_name, api_params)
634 error_class = self.exceptions.from_code(error_code)
--> 635 raise error_class(parsed_response, operation_name)
636 else:
ClientError: An error occurred (InvalidAccessKeyId) when calling the PutObject operation: The Access Key Id you provided does not exist in our records.
During handling of the above exception, another exception occurred:
S3UploadFailedError Traceback (most recent call last)
/deepracer/rl_coach/rl_deepracer_coach_robomaker.py in
128 )
129
--> 130 estimator.fit(job_name=job_name, wait=False)
/deepracer/sagemaker_venv/lib/python3.6/site-packages/sagemaker/estimator.py in fit(self, inputs, wait, logs, job_name)
230 based on the training image name and current timestamp.
231 """
--> 232 self._prepare_for_training(job_name=job_name)
233
234 self.latest_training_job = _TrainingJob.start_new(self, inputs)
/deepracer/sagemaker_venv/lib/python3.6/site-packages/sagemaker/estimator.py in _prepare_for_training(self, job_name)
849 script = self.entry_point
850 else:
--> 851 self.uploaded_code = self._stage_user_code_in_s3()
852 code_dir = self.uploaded_code.s3_prefix
853 script = self.uploaded_code.script_name
/deepracer/sagemaker_venv/lib/python3.6/site-packages/sagemaker/estimator.py in _stage_user_code_in_s3(self)
892 dependencies=self.dependencies,
893 kms_key=kms_key,
--> 894 s3_client=self.sagemaker_session.s3_client)
895
896 def _model_source_dir(self):
/deepracer/sagemaker_venv/lib/python3.6/site-packages/sagemaker/fw_utils.py in tar_and_upload_dir(session, bucket, s3_key_prefix, script, directory, dependencies, kms_key, s3_client)
193 else:
194 print("Using provided s3_client")
--> 195 s3_client.Object(bucket, key).upload_file(tar_file, ExtraArgs=extra_args)
196 finally:
197 shutil.rmtree(tmp)
/deepracer/sagemaker_venv/lib/python3.6/site-packages/boto3/s3/inject.py in object_upload_file(self, Filename, ExtraArgs, Callback, Config)
278 return self.meta.client.upload_file(
279 Filename=Filename, Bucket=self.bucket_name, Key=self.key,
--> 280 ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
281
282
/deepracer/sagemaker_venv/lib/python3.6/site-packages/boto3/s3/inject.py in upload_file(self, Filename, Bucket, Key, ExtraArgs, Callback, Config)
129 return transfer.upload_file(
130 filename=Filename, bucket=Bucket, key=Key,
--> 131 extra_args=ExtraArgs, callback=Callback)
132
133
/deepracer/sagemaker_venv/lib/python3.6/site-packages/boto3/s3/transfer.py in upload_file(self, filename, bucket, key, callback, extra_args)
285 raise S3UploadFailedError(
286 "Failed to upload %s to %s: %s" % (
--> 287 filename, '/'.join([bucket, key]), e))
288
289 def download_file(self, bucket, key, filename, extra_args=None,
S3UploadFailedError: Failed to upload /var/folders/kv/qfrzr8td1vsck5z77f1lv6xr0000gn/T/tmptehvrg1n/source.tar.gz to bucket/rl-deepracer-sagemaker/source/sourcedir.tar.gz: An error occurred (InvalidAccessKeyId) when calling the PutObject operation: The Access Key Id you provided does not exist in our records.