Skip to content

Instantly share code, notes, and snippets.

View shwangdev's full-sized avatar
🎯
Focusing

王翔(Shawn Wang) shwangdev

🎯
Focusing
View GitHub Profile
#!/usr/bin/env python
import time
import threading
import urllib
import httplib2
import re
sum = 25000
concurrent = 1000
@shwangdev
shwangdev / connect.c
Created December 2, 2011 14:07
connect.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <memory.h>
#include <errno.h>
#include <assert.h>
#include <sys/types.h>
#include <stdarg.h>
#include <fcntl.h>
#!/usr/bin/env python
# Time-stamp: <2011-12-25 16:34:05 Sunday by devil>
# @version 1.0
# @author ahei
import httplib
import os.path
import urllib2
#!/usr/bin/env python
import time
import threading
import urllib
import httplib2
import re
sum = 10
concurrent = 1
@shwangdev
shwangdev / junipernc
Created March 28, 2012 15:36
junipernc
#!/bin/sh
#
# Simple control script for Juniper Network Connect VPN clients
#
C='Copyright 2008-2011 Paul D. Smith <[email protected]>'
V='Version 1.16'
D='31 Oct 2011'
U='http://mad-scientist.net/juniper.html'
#
# This script is free software; you can redistribute it and/or modify it under
#include <baseapi.h>
#include <iostream>
#include <allheaders.h>
Pix *pix;
int offset;
float slope;
int main(int argc, char *argv[]) {
tesseract::TessBaseAPI api;
#!/bin/sh
cd $OPENSHIFT_TMP_DIR
wget http://python.org/ftp/python/2.7.3/Python-2.7.3.tar.bz2
tar jxf Python-2.7.3.tar.bz2
cd Python-2.7.3
./configure --prefix=$OPENSHIFT_RUNTIME_DIR
make install
cd $OPENSHIFT_TMP_DIR
wget http://pypi.python.org/packages/source/s/setuptools/setuptools-0.6c11.tar.gz
'''
Created on 2012-9-19
@author: Xiang Wang
'''
import ctypes
import logging
import os
class ColorizingStreamHandler(logging.StreamHandler):
#!/usr/bin/env python
# Time-stamp: <2012-11-01 22:36:50 Thursday by devil>
# @version 1.0
# @author Xiang Wang ([email protected])
import urllib2,sys
@shwangdev
shwangdev / attention_lstm.py
Created June 12, 2019 15:23 — forked from mbollmann/attention_lstm.py
My attempt at creating an LSTM with attention in Keras
class AttentionLSTM(LSTM):
"""LSTM with attention mechanism
This is an LSTM incorporating an attention mechanism into its hidden states.
Currently, the context vector calculated from the attended vector is fed
into the model's internal states, closely following the model by Xu et al.
(2016, Sec. 3.1.2), using a soft attention model following
Bahdanau et al. (2014).
The layer expects two inputs instead of the usual one: