- Please include a brief summary of the change and which issue is fixed.
- Please also include relevant motivation and context.
- List any dependencies that are required for this change.
Fixes # (issue)
{"lastUpload":"2021-11-15T07:51:35.825Z","extensionVersion":"v3.4.3"} |
# Place your secret text file in same directory as this script | |
# Update your INPUT_FILE & OUTPUT_FILE | |
import re | |
INPUT_FILE = "bashrc" | |
OUTPUT_FILE = "masked_bashrc" | |
with open(INPUT_FILE) as f: | |
exports = f.readlines() |
const sortObjectByKeys = object => { | |
let sortedObj = {}, | |
keys = Object.keys(object); | |
keys.sort(function(key1, key2) { | |
(key1 = key1.toLowerCase()), (key2 = key2.toLowerCase()); | |
if (key1 < key2) return -1; | |
if (key1 > key2) return 1; | |
return 0; | |
}); |
import scrapy | |
import sys | |
from bs4 import BeautifulSoup | |
from PyQt5.QtWebEngineWidgets import QWebEnginePage | |
from PyQt5.QtWidgets import QApplication | |
from PyQt5.QtCore import QUrl | |
class Client(QWebEnginePage): | |
def __init__(self, url): |
from django_filters import FilterSet, OrderingFilter | |
from .models import Person | |
class PersonFilter(FilterSet): | |
class Meta: | |
model = Person | |
fields = ['first_name', 'last_name'] |
[chromedriver] | |
path = driver\chromedriver.exe | |
[website] | |
url = http://quotes.toscrape.com/ | |
[delay] | |
seconds = 1 |
class ArticleNode(DjangoObjectType): | |
dataloader_reporter = Field('starter.types.ReporterNode', description='Get a single Reporter detail using dataloader.') | |
class Meta: | |
model = Article | |
interfaces = (Node,) | |
filterset_class = ArticleFilter | |
connection_class = CountableConnectionBase | |
@staticmethod |
{ | |
"_type": "export", | |
"__export_format": 4, | |
"__export_date": "2021-03-13T05:05:13.722Z", | |
"__export_source": "insomnia.desktop.app:v2021.1.1", | |
"resources": [ | |
{ | |
"_id": "req_0c9319edd46b4a5b92e2bc2fd65bf2ec", | |
"parentId": "wrk_0a110636708841cc8f82e73c43c2585b", | |
"modified": 1615391416735, |
import logging | |
from scrapy import Spider | |
from sqlalchemy.orm import sessionmaker | |
from example.items import ProductItem | |
from example.models import Price, Product, create_table, db_connect | |
logger = logging.getLogger(__name__) |