.blueColor() to UIColor.blue
.clearColor() to UIColor.clear
.darkGrayColor() to UIColor.darkGray
.lightGrayColor() to UIColor.lightGray
class ConvNet(nn.Module): | |
def __init__(self, num_classes=10): | |
super(ConvNet, self).__init__() | |
self.layer1 = nn.Sequential( | |
nn.Conv2d(1, 16, kernel_size=5, stride=1, padding=2), | |
nn.BatchNorm2d(16), | |
nn.ReLU(), | |
nn.MaxPool2d(kernel_size=2, stride=2)) | |
self.layer2 = nn.Sequential( | |
nn.Conv2d(16, 32, kernel_size=5, stride=1, padding=2), |
import os | |
from datetime import datetime | |
import argparse | |
import torch.multiprocessing as mp | |
import torchvision | |
import torchvision.transforms as transforms | |
import torch | |
import torch.nn as nn | |
import torch.distributed as dist | |
from apex.parallel import DistributedDataParallel as DDP |
{ | |
"Iris": 100, | |
"MNIST": 100000, | |
"Public SVNH": 1000000, | |
"Champolion": 10000000, | |
"ImageNet": 100000000, | |
"HN transcriptor": 1000000000, | |
"Seconds from birth to college graduation": 10000000000, | |
"HN detector": 100000000000 | |
} |
def start_dataflow_pipeline(): | |
bucket = ‘fb_bucket’ | |
BODY = { | |
“jobName”: ‘fb_ | |
catalog_upload’, | |
“gcsPath”: “gs://{bucket}/catalog_up”.format(bucket=bucket), | |
“environment”: { | |
“tempLocation”: “gs://{bucket}/temp”.format(bucket=bucket), | |
“zone”: “us-central1-f” | |
} |
#!/bin/bash | |
python -m catalog_upload — project project_name — runner DataflowRunner \ | |
— staging_location $BUCKET/staging — temp_location $BUCKET/temp \ | |
— output $BUCKET/results/output — template_location $BUCKET/catalog_up \ | |
— requirements_file requirements.txt |
def transform_entity(pb): | |
from google.cloud.datastore.helpers import entity_from_protobuf | |
entity = entity_from_protobuf(pb) | |
retailer_id = entity.get('id', '') | |
name = entity.get('name') | |
category = entity.get(‘category’, ‘’) | |
description = entity.get(‘description’, ‘’) | |
image_link = entity.get(‘image’, ‘’) | |
price = entity.get(‘price’, ‘0’) | |
link = entity.get('url', '') |
#1 Here we define the Dataflow pipeline | |
with beam.Pipeline() as p: | |
# 2 We query all products in the queried_namespace | |
query = query_pb2.Query() | |
query.kind.add().name = 'Product' | |
entities = p | 'Read From Datastore' >> | |
ReadFromDatastore('project_name', query, namespace='queried_namespace') | |
# 3 Formatting the rows | |
products = entities | 'Format Rows' >> beam.Map(transform_entity) |
find . -type f -name '*.swift' -exec sed -i '' s/\.endIndex/\.upperBound/ {} + | |
find . -type f -name '*.swift' -exec sed -i '' s/\.startIndex/\.lowerBound/ {} + | |
find . -type f -name '*.swift' -exec sed -i '' s/offsetInPlace/offsetBy/ {} + | |
# Uppercase to lowercase | |
find . -type f -name '*.swift' -exec sed -i '' s/\.CGColor/\.cgColor/ {} + |
Language files blank comment code | |
------------------------------------------------------------------------------- | |
Objective C 1089 31528 14773 132248 | |
Swift 864 19360 8987 75756 |