You can bridge gitter in matrix. For that you need to conver gitter url to valid matrix address.
Bridge template #gitter_<server>=2F<channel>:matrix.org
e.g.
https://gitter.im/xonsh/xonsh
server/channel
| """ | |
| Delete xonsh shell history by matching patterns | |
| i.e. `python delete-history.py "^ping"` will delete every history command that starts with "ping" | |
| """ | |
| import json | |
| from json import JSONDecodeError | |
| from pathlib import Path | |
| import click | |
| import os |
| # LICENSE GPLv3 | |
| def lazy_dict_merge(root, update): | |
| """ | |
| this function merges two dictionaries lazily and recursively (supports nested dicts) | |
| Lazy means only missing keys will be updated, | |
| i.e. update['foo'] will only be copied to root if it doesn't have 'foo' already | |
| >>> lazy_dict_merge({'foo': '1'}, {'bar': '2'}) |
| import requests | |
| from minds import Minds | |
| api = Minds() | |
| print(api) |
| # instead of | |
| with open('./data/avail_urls.txt', 'w') as f: | |
| for item in items: | |
| if 'archived_snapshots' in item: | |
| if 'closest' in item['archived_snapshots']: | |
| f.write(item['archived_snapshots']['closest']['url'] + '\n') | |
| # write | |
| with open('./data/avail_urls.txt', 'w') as f: | |
| for item in items: | |
| if 'closest' not in item.get('archived_snapshots', []): |
| import os | |
| from requests.exceptions import ProxyError, ReadTimeout | |
| from requests_futures.sessions import FuturesSession | |
| def check_proxies(proxies, max_workers=5, timeout=5): | |
| """ | |
| Check whether proxies are functional and whether authentication matches. | |
| This function will filter out any proxies that: | |
| * return 407 credential missmatch |
| #!/usr/bin/env python3 | |
| from urllib.parse import quote, unquote | |
| # requires click, parsel, requests_futures from pip | |
| # requires python3.6 | |
| import click | |
| from requests_futures.sessions import FuturesSession | |
| from parsel import Selector |
| import json | |
| import re | |
| import requests | |
| from parsel import Selector | |
| def scrape(): | |
| data = requests.get('https://www.bundlestars.com/api/promotions/mega-pick-mix-bundle-2') | |
| products = json.loads(data.text)[0]['products'] |
| # -*- coding: utf-8 -*- | |
| import scrapy | |
| class MyipSpider(scrapy.Spider): | |
| name = "myip" | |
| allowed_domains = ["http://httpbin.org/ip"] | |
| start_urls = ( | |
| 'http://httpbin.org/ip', | |
| ) |
| import requests | |
| from parsel import Selector | |
| def scrape(): | |
| data = """ | |
| 7 Grand Steps: What Ancients Begat (DRM Free + Steam) | |
| 2064: Read Only Memories (DRM Free + Steam) | |
| A Virues Named TOM (DRM Free + Steam) | |
| AI War: Fleet Command (DRM Free + Steam) |