Taken from Gensim Tutorial
- Download the attachment
- Make sure python is installed
- Type
pip install gensim nltk
- If any library is missing, install them as
pip install <library_name>
- Run
python code_runner.py
# Dummy Configuration file for Request Logging in Thanos. | |
# The options field can be added individually for http/grpc. | |
# If you want to add config for both grpc/http, indent the config field at the same level of http/grpc | |
http: | |
config: | |
- path: /api/v1/query | |
port: 10904 | |
- path: apiv1/app_range/metrics |
request_logging: | |
- http: | |
- config: | |
- path: # regexp “*” e.g /api/v1/query_range or /metrics | |
port: # int | |
- path: # regexp “*” e.g /api/v1/query_range or /metrics | |
port: # int | |
- options: # We configure the options for this indvidual path | |
- level: # INFO / DEBUG / WARN / ERROR -> means what messages with certain log level will be printed. |
#include <bits/stdc++.h> | |
#define MAX_VAL 40000 | |
using namespace std; | |
/* | |
Code for calculation of articulation point and bridges | |
We use dp array for calculating the back edges over u(parent) -> v(child) edge | |
dp[u] = # back-edge going up + # back-edge going down - Sum of dp[child] where child-> child of u(i.e. v) |
import java.util.*; | |
// Detect cycle in a directed graph | |
public class cycle | |
{ | |
int V; | |
LinkedList<Integer> adj[]; | |
public cycle(int v) |
def _insert_in_metadata_fits_safe(meta, key, value): | |
"""Helper function to insert key-value pair into metadata in a way that | |
FITS can serialize | |
Parameters | |
---------- | |
key : str | |
Key to be inserted in the dictionary | |
value : str or None | |
Value to be inserted | |
Notes |
Taken from Gensim Tutorial
pip install gensim nltk
pip install <library_name>
python code_runner.py
pip install gensim
python sum100.py <input folder> <output folder>
This gist lists up the use case and details of using asyncio support in Scrapy
Python3.7
.pip install asyncio
.aiohttp
,install pip install aiohttp
.import scrapy | |
from scrapy.Fetch import Fetch | |
import asyncio | |
import aiohttp | |
class QuotesSpider(scrapy.Spider): | |
name = "quotes" | |
async def start_requests(self): | |
urls = [ |