This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Biaffine(nn.Module): | |
def __init__(self, in1_features, in2_features, out_features, | |
bias=(True, True)): | |
super(Biaffine, self).__init__() | |
self.in1_features = in1_features | |
self.in2_features = in2_features | |
self.out_features = out_features | |
self.bias = bias | |
self.linear_input_size = in1_features + int(bias[0]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class GCN(nn.Module): | |
""" A GCN/Contextualized GCN module operated on dependency graphs. """ | |
def __init__(self, in_dim, mem_dim, num_layers, in_drop=0.5, out_drop=0.5, batch=True): | |
super(GCN, self).__init__() | |
self.layers = num_layers | |
self.mem_dim = mem_dim | |
self.in_dim = in_dim |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class GAT(nn.Module): | |
def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads): | |
"""Dense version of GAT.""" | |
super(GAT, self).__init__() | |
self.dropout = dropout | |
self.attentions = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] | |
for i, attention in enumerate(self.attentions): | |
self.add_module('attention_{}'.format(i), attention) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from ekphrasis.classes.preprocessor import TextPreProcessor | |
from ekphrasis.classes.tokenizer import SocialTokenizer | |
from ekphrasis.dicts.emoticons import emoticons | |
import numpy as np | |
import re | |
import io | |
label2emotion = {0: "others", 1: "happy", 2: "sad", 3: "angry"} | |
emotion2label = {"others": 0, "happy": 1, "sad": 2, "angry": 3} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
# -*- coding:UTF-8 -*- | |
import torch | |
import torch.nn as nn | |
import torch.nn.init as init | |
def weight_init(m): | |
''' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.cyhone; | |
import java.text.SimpleDateFormat; | |
import java.util.Calendar; | |
import java.util.Date; | |
import java.util.concurrent.TimeUnit; | |
/** | |
* @author cyhone | |
* @date 2017/3/28 |