Skip to content

Instantly share code, notes, and snippets.

import os
import zipfile
import image
import numpy as np
os.environ["KERAS_BACKEND"] = "plaidml.keras.backend"
import keras
import keras.applications as kapp
from keras.preprocessing import image
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 08:21:18: Cached data exists getEnvironmentVariables, <No Resource>
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 08:21:18: > ~/anaconda3/bin/python ~/.vscode-server/extensions/ms-python.python-2019.9.34911/pythonFiles/normalizeForInterpreter.py print
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 08:21:18: Submitting code for 2a131d57-507f-4e95-8586-a0b41080c77a
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 08:21:18: Waiting for jupyter server and web panel ...
notificationsAlerts.ts:40 Executing code failed : Error: Jupyter server crashed. Unable to connect. Error code from jupyter: 1
onDidNotificationChange @ notificationsAlerts.ts:40
_register.model.onDidNotificationChange.e @ notificationsAlerts.ts:26
fire @ event.ts:572
addNotification @ notifications.ts:156
notify @ notificationService.ts:101
User belongs to experiment group 'ShowPlayIcon - start'
Starting Jedi Python language engine.
> conda --version
> pyenv root
> python3.7 -c "import sys;print(sys.executable)"
> python3.6 -c "import sys;print(sys.executable)"
> python3 -c "import sys;print(sys.executable)"
> python2 -c "import sys;print(sys.executable)"
> python -c "import sys;print(sys.executable)"
> ~/anaconda3/bin/python -c "import sys;print(sys.executable)"
extensionHost.ts:294 [Extension Host] debugger listening on port 60018
console.ts:137 [Extension Host] Extension 'mechatroner.rainbow-csv' uses a document selector without scheme. Learn more about this: https://go.microsoft.com/fwlink/?linkid=872305
console.ts:137 [Extension Host] Extension 'ms-python.python' uses a document selector without scheme. Learn more about this: https://go.microsoft.com/fwlink/?linkid=872305
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 21:36:12: Display locator refreshing progress, Class name = p, completed in 2ms, , Return Value: undefined
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 21:36:12: Notify locators are locating, Class name = p, completed in 5ms, , Return Value: undefined
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 21:36:12: Checking whether locactors have completed locating, Class name = p, completed in 1ms, , Return Value: false
console.ts:137 [Extension Host] Info Python Extension: 2019-09-25 21:36:12: Det
# Configuration file for jupyter-notebook.
#------------------------------------------------------------------------------
# Application(SingletonConfigurable) configuration
#------------------------------------------------------------------------------
## This is an application.
## The date format used by logging formatters for %(asctime)s
#c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S'
{
nbformat: 4,
nbformat_minor: 0,
metadata: {
colab: {
name: "01-numpy-tutorial.ipynb",
provenance: [ ],
collapsed_sections: [ ],
include_colab_link: true
},
name: python3
channels:
- conda-forge
- anaconda
- defaults
dependencies:
- cryptography=2.3.1=py36hc365091_0
- curl=7.61.0=h84994c4_0
- krb5=1.14.2=hd3fe544_3
- libcurl=7.61.0=h1ad7b7a_0
class Attention(nn.Module):
def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.):
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = qk_scale or head_dim ** -0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) # create qkv
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
embed_dim = 768
num_heads = 8
block = Block(embed_dim, 8)
batch_size = 1
class_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
class_tokens = class_token.expand(batch_size, -1, -1)
pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim))
x = torch.cat((class_tokens, patch_output), dim=1)
x = x + pos_embed
block(x)
class Block(nn.Module):
def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm):
super().__init__()
self.norm1 = nn.LayerNorm(dim)
self.attn = Attention(dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop)
self.norm2 = nn.LayerNorm(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop)
def forward(self, x):
x = x + self.attn(self.norm1(x))