file
main
lst
tmp
temp
duple
first
second
third
sudo
apt
install
download
pip
python
SQL
exit
import
math
csv
OS
sys
Tkinter
print()
input()
if
else
elif
for
while
which
in
def
with
range()
random()
randint()
int
str
bool
True
False
float
list
tuple
dict
set
Class
self
__init__
super()
*args
**kwargs
SELECT
DISTINCT
FROM
JOIN
WHERE
LIKE
ORDER BY
GROUP BY
INSERT INTO
DESC
LIMIT
HAVING
AND
OR
NOT
BETWEEN
IN
IF
CREATE
TABLE
ROUND
SUM
COUNT
MIN
MAX
AVG
SET
END
UPDATE
1 == 1
x != 0
file main lst tmp temp duple first second third
sudo apt install download pip python SQL exit
import math csv OS sys Tkinter
print() input() if else elif for while which in def None with range() random() randint()
int str bool True False float list tuple dict set
Class self __init__ super() *args **kwargs
SELECT DISTINCT FROM JOIN WHERE LIKE ORDER BY GROUP BY INSERT INTO DESC LIMIT HAVING AND OR NOT BETWEEN IN IF CREATE TABLE ROUND SUM COUNT MIN MAX AVG SET END UPDATE NULL
1 == 1 x != 0 // ++
---
CONST_NAME = 'Name'
lst = [1, 2, 3]
var = {1: first_name, 2: last_name, 3: number_phone}
print(sum(map(int, input().split())))
lst.append(lst[i] + lst[i-1])
[i for i in string if i not in letters]
f'Hello {name}. Your city is {city}?'
'-'.join(['a', 'b', 'c'])
'Any text'.split('-')
for i in range(1, 20):
for i in range(len(lst)):
with open('text.html', 'w', encoding='utf-8') as file: file.write(responce)
with open('text.html') as file: res = file.read()
URL = 'https://www.link.ru/'
HEADERS = {'user-agent': user, 'accept': '*/*'}
FILE = 'bd.csv'
user = fake_useragent.UserAgent().random
html = get_html(URL)
import requests
r = requests.get(url, headers=HEADERS)
test.py
file.exe
<Response [200]>
import requests
from bs4 import BeautifulSoup
URL = f'https://link.com/?page={count}'
response = requests.get(URL, headers=HEADERS)
soup = BeautifulSoup(response.text, "lxml")
data = soup.find("div", class_="class-body").text.replace("n", "")
data = soup.find_all("div", class_="class-body")
url_img = data.find("img").get("src")
card_url = data.find("a").get("href")
cmd
powershell
file
main
lst
tmp
temp
duple
first
second
third
sudo
apt
install
download
pip
python
SQL
exit
import
math
csv
OS
sys
Tkinter
print()
input()
if
else
elif
for
while
which
in
def
with
range()
random()
randint()
int
str
bool
True
False
float
list
tuple
dict
set
Class
self
__init__
super()
*args
**kwargs
SELECT
DISTINCT
FROM
JOIN
WHERE
LIKE
ORDER BY
GROUP BY
INSERT INTO
DESC
LIMIT
HAVING
AND
OR
NOT
BETWEEN
IN
IF
CREATE
TABLE
ROUND
SUM
COUNT
MIN
MAX
AVG
SET
END
UPDATE
1 == 1
x != 0
//
++
CONST_NAME = 'Name'
lst = [1, 2, 3]
var = {1: first_name, 2: last_name, 3: number_phone}
print(sum(map(int, input().split())))
lst.append(lst[i] + lst[i-1])
[i for i in string if i not in letters]
f'Hello {name}. Your city is {city}?'
'-'.join(['a', 'b', 'c'])
'Any text'.split('-')
for i in range(1, 20):
for i in range(len(lst)):
with open('text.html', 'w', encoding='utf-8') as file: file.write(responce)
with open('text.html') as file: res = file.read()
URL = 'https://www.link.ru/'
HEADERS = {'user-agent': user, 'accept': '*/*'}
FILE = 'bd.csv'
user = fake_useragent.UserAgent().random
html = get_html(URL)
import requests
r = requests.get(url, headers=HEADERS)
test.py
file.exe
<Response [200]>
import requests
from bs4 import BeautifulSoup
URL = f'https://link.com/?page={count}'
response = requests.get(URL, headers=HEADERS)
soup = BeautifulSoup(response.text, "lxml")
data = soup.find("div", class_="class-body").text.replace("n", "")
data = soup.find_all("div", class_="class-body")
url_img = data.find("img").get("src")
card_url = data.find("a").get("href")
cmd
powershell
cd c:
Комментарии